From c122bc09b49ae2304fa6c82b0222b22674b01472 Mon Sep 17 00:00:00 2001 From: Vibhu Pandey Date: Thu, 16 Oct 2025 18:00:38 +0530 Subject: [PATCH] feat(tokenizer|sso): add tokenizer for session management and oidc sso support (#9183) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## 📄 Summary - Instead of relying on JWT for session management, we are adding another token system: opaque. This gives the benefits of expiration and revocation. - We are now ensuring that emails are regex checked throughout the backend. - Support has been added for OIDC protocol --- .github/workflows/integrationci.yaml | 17 +- cmd/community/server.go | 10 +- cmd/config.go | 10 - cmd/enterprise/server.go | 31 +- conf/example.yaml | 25 + .../callbackauthn/oidccallbackauthn/authn.go | 191 ++++ .../callbackauthn/samlcallbackauthn/authn.go | 155 +++ ee/query-service/app/api/api.go | 9 +- ee/query-service/app/api/auth.go | 107 -- ee/query-service/app/api/cloudIntegrations.go | 38 +- ee/query-service/app/server.go | 21 +- ee/query-service/constants/constants.go | 11 - frontend/src/AppRoutes/utils.ts | 3 - frontend/src/api/common/getQueryStats.ts | 62 -- frontend/src/api/index.ts | 42 +- frontend/src/api/utils.ts | 11 +- .../src/api/v1/domains/{ => id}/delete.ts | 5 +- frontend/src/api/v1/domains/id/put.ts | 25 + frontend/src/api/v1/domains/list.ts | 12 +- frontend/src/api/v1/domains/post.ts | 26 + frontend/src/api/v1/domains/update.ts | 23 - frontend/src/api/v1/invite/id/accept.ts | 9 +- frontend/src/api/v1/login/loginPrecheck.ts | 28 - frontend/src/api/v1/register/post.ts | 27 + frontend/src/api/v1/register/signup.ts | 22 - .../api/v1/{login/login.ts => user/me/get.ts} | 12 +- .../v1/{domains/create.ts => version/get.ts} | 11 +- frontend/src/api/v1/version/getVersion.ts | 25 - frontend/src/api/v2/sessions/context/get.ts | 27 + frontend/src/api/v2/sessions/delete.ts | 19 + .../api/v2/sessions/email_password/post.ts | 23 + frontend/src/api/v2/sessions/rotate/post.ts | 23 + frontend/src/constants/api.ts | 4 +- .../__tests__/EndPointDetails.test.tsx | 1 + .../__tests__/TopErrors.test.tsx | 1 + frontend/src/container/AppLayout/index.tsx | 10 +- .../__tests__/EntityEvents.test.tsx | 1 + .../__tests__/EntityMetrics.test.tsx | 1 + .../__tests__/EntityTraces.test.tsx | 1 + .../container/Login/__tests__/Login.test.tsx | 910 ++++++++++++++++-- frontend/src/container/Login/index.tsx | 457 +++++---- .../LogsActionsContainer.tsx | 20 - .../src/container/LogsExplorerViews/index.tsx | 16 - .../tests/LogsExplorerViews.test.tsx | 4 - .../container/MySettings/Password/index.tsx | 154 --- .../container/MySettings/UserInfo/index.tsx | 20 +- .../OnboardingContainer.tsx | 1 + .../AuthDomain/AuthDomain.styles.scss | 19 + .../CreateEdit/AuthnProviderSelector.tsx | 90 ++ .../CreateEdit/CreateEdit.styles.scss | 39 + .../AuthDomain/CreateEdit/CreateEdit.tsx | 134 +++ .../CreateEdit/Providers/AuthnGoogleAuth.tsx | 68 ++ .../CreateEdit/Providers/AuthnOIDC.tsx | 105 ++ .../CreateEdit/Providers/AuthnSAML.tsx | 77 ++ .../Providers/Providers.styles.scss | 67 ++ .../AuthDomain/Toggle.tsx | 45 + .../OrganizationSettings/AuthDomain/index.tsx | 148 +++ .../AuthDomains/AddDomain/index.tsx | 101 -- .../AuthDomains/Create/Row/index.tsx | 39 - .../AuthDomains/Create/Row/styles.ts | 16 - .../AuthDomains/Create/index.tsx | 118 --- .../AuthDomains/Create/styles.ts | 14 - .../AuthDomains/Edit/EditGoogleAuth.tsx | 49 - .../AuthDomains/Edit/EditSAML.tsx | 43 - .../AuthDomains/Edit/helpers.ts | 40 - .../AuthDomains/Edit/index.tsx | 102 -- .../AuthDomains/Switch/index.tsx | 46 - .../AuthDomains/helpers.test.ts | 74 -- .../AuthDomains/helpers.ts | 45 - .../AuthDomains/index.tsx | 263 ----- .../AuthDomains/styles.ts | 15 - .../InviteUserModal/InviteUserModal.tsx | 92 +- .../OrganizationSettings/Members/index.tsx | 32 +- .../PendingInvitesContainer/index.tsx | 48 +- .../container/OrganizationSettings/index.tsx | 4 +- .../useActiveLicenseV3/useActiveLicenseV3.tsx | 1 + frontend/src/index.tsx | 11 +- frontend/src/pages/Login/index.tsx | 16 +- frontend/src/pages/ResetPassword/index.tsx | 48 +- frontend/src/pages/SignUp/SignUp.tsx | 124 +-- frontend/src/pages/SignUp/utils.ts | 13 - frontend/src/providers/App/App.tsx | 23 +- frontend/src/providers/App/types.ts | 4 +- frontend/src/providers/App/utils.ts | 3 +- frontend/src/providers/EventSource.tsx | 22 +- frontend/src/types/api/index.ts | 5 + frontend/src/types/api/user/accept.ts | 4 +- .../src/types/api/user/getInviteDetails.ts | 2 - frontend/src/types/api/user/login.ts | 18 - frontend/src/types/api/user/loginPrecheck.ts | 16 - frontend/src/types/api/v1/domains/list.ts | 44 + frontend/src/types/api/v1/domains/post.ts | 39 + frontend/src/types/api/v1/domains/put.ts | 37 + frontend/src/types/api/v1/register/post.ts | 10 + .../{user/getVersion.ts => v1/version/get.ts} | 2 +- .../src/types/api/v2/sessions/context/get.ts | 32 + .../api/v2/sessions/email_password/post.ts | 10 + .../src/types/api/v2/sessions/rotate/post.ts | 8 + go.mod | 14 +- go.sum | 53 +- pkg/authn/authn.go | 25 + pkg/authn/authnstore/sqlauthnstore/store.go | 65 ++ .../googlecallbackauthn/authn.go | 129 +++ .../passwordauthn/emailpasswordauthn/authn.go | 34 + pkg/cache/cache.go | 6 +- pkg/cache/memorycache/provider.go | 32 +- pkg/cache/rediscache/provider.go | 11 +- pkg/cache/rediscache/provider_test.go | 2 +- pkg/errors/code.go | 23 +- pkg/errors/errors.go | 5 +- pkg/errors/http.go | 60 ++ pkg/errors/type.go | 27 +- pkg/http/middleware/api_key.go | 4 +- pkg/http/middleware/auth.go | 66 -- pkg/http/middleware/authn.go | 150 +++ pkg/http/middleware/doc.go | 2 - pkg/http/render/render.go | 39 +- pkg/instrumentation/loghandler/correlation.go | 7 - pkg/instrumentation/sdk.go | 35 +- pkg/modules/authdomain/authdomain.go | 39 + .../authdomain/implauthdomain/handler.go | 144 +++ .../authdomain/implauthdomain/module.go | 45 + .../authdomain/implauthdomain/store.go | 159 +++ .../dashboard/impldashboard/handler.go | 2 +- pkg/modules/session/implsession/handler.go | 192 ++++ pkg/modules/session/implsession/module.go | 233 +++++ pkg/modules/session/session.go | 60 ++ pkg/modules/user/impluser/getter.go | 57 +- pkg/modules/user/impluser/handler.go | 358 +------ pkg/modules/user/impluser/module.go | 408 ++------ pkg/modules/user/impluser/store.go | 429 ++------- pkg/modules/user/option.go | 29 +- pkg/modules/user/user.go | 73 +- .../app/cloudintegrations/controller_test.go | 372 ------- pkg/query-service/app/http_handler.go | 95 +- .../app/integrations/manager_test.go | 105 -- .../app/integrations/test_utils.go | 223 ----- .../app/logparsingpipeline/pipelineBuilder.go | 2 +- .../config_provider_multitenancy_test.go | 1 - .../app/opamp/config_provider_test.go | 332 ------- pkg/query-service/app/server.go | 20 +- pkg/query-service/constants/constants.go | 4 - pkg/query-service/rules/manager_test.go | 818 ---------------- .../integration/filter_suggestions_test.go | 389 -------- .../integration/logparsingpipeline_test.go | 866 ----------------- .../signoz_cloud_integrations_test.go | 605 ------------ .../integration/signoz_integrations_test.go | 663 ------------- .../tests/integration/test_utils.go | 235 ----- pkg/query-service/utils/pass.go | 10 - pkg/query-service/utils/testutils.go | 119 --- pkg/signoz/authn.go | 26 + pkg/signoz/config.go | 10 + pkg/signoz/handler.go | 10 +- pkg/signoz/handler_test.go | 7 +- pkg/signoz/module.go | 18 +- pkg/signoz/module_test.go | 7 +- pkg/signoz/provider.go | 27 +- pkg/signoz/provider_test.go | 3 +- pkg/signoz/signoz.go | 46 +- pkg/sqlmigration/050_add_auth_token.go | 95 ++ pkg/sqlstore/sqlitesqlstore/provider.go | 1 + pkg/sqlstore/sqlstorehook/instrumentation.go | 40 + pkg/sqlstore/sqlstorehook/logging.go | 5 +- .../analyticsstatsreporter/provider.go | 31 +- pkg/tokenizer/config.go | 112 +++ pkg/tokenizer/jwttokenizer/claims.go | 34 + pkg/tokenizer/jwttokenizer/provider.go | 152 +++ pkg/tokenizer/opaquetokenizer/provider.go | 488 ++++++++++ pkg/tokenizer/opaquetokenizer/store.go | 258 +++++ pkg/tokenizer/tokenizer.go | 43 + pkg/tokenizer/tokenizertest/provider.go | 76 ++ pkg/tokenizer/wrapped.go | 135 +++ pkg/types/authtypes/authn.go | 128 +++ pkg/types/authtypes/claims.go | 53 +- pkg/types/authtypes/domain.go | 206 ++++ pkg/types/authtypes/email_password.go | 38 + pkg/types/authtypes/google.go | 38 + pkg/types/authtypes/jwt.go | 143 --- pkg/types/authtypes/jwt_test.go | 210 ---- pkg/types/authtypes/oidc.go | 65 ++ pkg/types/authtypes/saml.go | 47 + pkg/types/authtypes/session.go | 78 ++ pkg/types/authtypes/token.go | 263 +++++ pkg/types/cachetypes/cacheable.go | 9 + pkg/types/ctxtypes/auth.go | 16 +- pkg/types/ctxtypes/constants.go | 13 - pkg/types/domain.go | 187 ---- pkg/types/invite.go | 108 ++- pkg/types/licensetypes/license.go | 2 +- pkg/types/ssotypes/google.go | 90 -- pkg/types/ssotypes/saml.go | 107 -- pkg/types/ssotypes/sso.go | 91 -- pkg/types/tracefunneltypes/utils.go | 2 +- pkg/types/tracefunneltypes/utils_test.go | 51 +- pkg/types/user.go | 162 +--- pkg/valuer/email.go | 105 ++ pkg/valuer/email_test.go | 39 + pkg/valuer/uuid.go | 4 +- pkg/valuer/valuer.go | 1 + templates/email/update_role.gotmpl | 7 - tests/integration/conftest.py | 3 + tests/integration/fixtures/auth.py | 130 ++- tests/integration/fixtures/driver.py | 21 + tests/integration/fixtures/http.py | 8 +- tests/integration/fixtures/idp.py | 95 ++ tests/integration/fixtures/idputils.py | 305 ++++++ tests/integration/fixtures/network.py | 15 +- tests/integration/fixtures/postgres.py | 2 +- tests/integration/fixtures/signoz.py | 15 +- tests/integration/fixtures/types.py | 22 +- tests/integration/poetry.lock | 665 ++++++++++++- tests/integration/pyproject.toml | 7 +- tests/integration/src/bootstrap/setup.py | 8 +- .../src/{auth => callbackauthn}/__init__.py | 0 .../integration/src/callbackauthn/a_domain.py | 167 ++++ tests/integration/src/callbackauthn/b_saml.py | 108 +++ tests/integration/src/callbackauthn/c_oidc.py | 129 +++ .../integration/src/passwordauthn/__init__.py | 0 .../src/{auth => passwordauthn}/a_register.py | 58 +- .../src/{auth => passwordauthn}/b_license.py | 35 +- .../src/{auth => passwordauthn}/c_apikey.py | 5 +- .../src/{auth => passwordauthn}/d_password.py | 29 +- tests/integration/src/querier/a_logs.py | 43 +- tests/integration/src/querier/b_traces.py | 4 +- tests/integration/src/ttl/a_ttl.py | 71 +- 225 files changed, 9291 insertions(+), 9503 deletions(-) create mode 100644 ee/authn/callbackauthn/oidccallbackauthn/authn.go create mode 100644 ee/authn/callbackauthn/samlcallbackauthn/authn.go delete mode 100644 ee/query-service/app/api/auth.go delete mode 100644 frontend/src/api/common/getQueryStats.ts rename frontend/src/api/v1/domains/{ => id}/delete.ts (62%) create mode 100644 frontend/src/api/v1/domains/id/put.ts create mode 100644 frontend/src/api/v1/domains/post.ts delete mode 100644 frontend/src/api/v1/domains/update.ts delete mode 100644 frontend/src/api/v1/login/loginPrecheck.ts create mode 100644 frontend/src/api/v1/register/post.ts delete mode 100644 frontend/src/api/v1/register/signup.ts rename frontend/src/api/v1/{login/login.ts => user/me/get.ts} (57%) rename frontend/src/api/v1/{domains/create.ts => version/get.ts} (50%) delete mode 100644 frontend/src/api/v1/version/getVersion.ts create mode 100644 frontend/src/api/v2/sessions/context/get.ts create mode 100644 frontend/src/api/v2/sessions/delete.ts create mode 100644 frontend/src/api/v2/sessions/email_password/post.ts create mode 100644 frontend/src/api/v2/sessions/rotate/post.ts delete mode 100644 frontend/src/container/MySettings/Password/index.tsx create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/AuthDomain.styles.scss create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/AuthnProviderSelector.tsx create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/CreateEdit.styles.scss create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/CreateEdit.tsx create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/AuthnGoogleAuth.tsx create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/AuthnOIDC.tsx create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/AuthnSAML.tsx create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/Providers.styles.scss create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/Toggle.tsx create mode 100644 frontend/src/container/OrganizationSettings/AuthDomain/index.tsx delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/AddDomain/index.tsx delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/Create/Row/index.tsx delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/Create/Row/styles.ts delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/Create/index.tsx delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/Create/styles.ts delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/Edit/EditGoogleAuth.tsx delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/Edit/EditSAML.tsx delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/Edit/helpers.ts delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/Edit/index.tsx delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/Switch/index.tsx delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/helpers.test.ts delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/helpers.ts delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/index.tsx delete mode 100644 frontend/src/container/OrganizationSettings/AuthDomains/styles.ts delete mode 100644 frontend/src/pages/SignUp/utils.ts delete mode 100644 frontend/src/types/api/user/login.ts delete mode 100644 frontend/src/types/api/user/loginPrecheck.ts create mode 100644 frontend/src/types/api/v1/domains/list.ts create mode 100644 frontend/src/types/api/v1/domains/post.ts create mode 100644 frontend/src/types/api/v1/domains/put.ts create mode 100644 frontend/src/types/api/v1/register/post.ts rename frontend/src/types/api/{user/getVersion.ts => v1/version/get.ts} (65%) create mode 100644 frontend/src/types/api/v2/sessions/context/get.ts create mode 100644 frontend/src/types/api/v2/sessions/email_password/post.ts create mode 100644 frontend/src/types/api/v2/sessions/rotate/post.ts create mode 100644 pkg/authn/authn.go create mode 100644 pkg/authn/authnstore/sqlauthnstore/store.go create mode 100644 pkg/authn/callbackauthn/googlecallbackauthn/authn.go create mode 100644 pkg/authn/passwordauthn/emailpasswordauthn/authn.go create mode 100644 pkg/errors/http.go delete mode 100644 pkg/http/middleware/auth.go create mode 100644 pkg/http/middleware/authn.go delete mode 100644 pkg/http/middleware/doc.go create mode 100644 pkg/modules/authdomain/authdomain.go create mode 100644 pkg/modules/authdomain/implauthdomain/handler.go create mode 100644 pkg/modules/authdomain/implauthdomain/module.go create mode 100644 pkg/modules/authdomain/implauthdomain/store.go create mode 100644 pkg/modules/session/implsession/handler.go create mode 100644 pkg/modules/session/implsession/module.go create mode 100644 pkg/modules/session/session.go delete mode 100644 pkg/query-service/app/cloudintegrations/controller_test.go delete mode 100644 pkg/query-service/app/integrations/manager_test.go delete mode 100644 pkg/query-service/app/integrations/test_utils.go delete mode 100644 pkg/query-service/app/opamp/config_provider_multitenancy_test.go delete mode 100644 pkg/query-service/app/opamp/config_provider_test.go delete mode 100644 pkg/query-service/rules/manager_test.go delete mode 100644 pkg/query-service/tests/integration/filter_suggestions_test.go delete mode 100644 pkg/query-service/tests/integration/logparsingpipeline_test.go delete mode 100644 pkg/query-service/tests/integration/signoz_cloud_integrations_test.go delete mode 100644 pkg/query-service/tests/integration/signoz_integrations_test.go delete mode 100644 pkg/query-service/tests/integration/test_utils.go delete mode 100644 pkg/query-service/utils/pass.go delete mode 100644 pkg/query-service/utils/testutils.go create mode 100644 pkg/signoz/authn.go create mode 100644 pkg/sqlmigration/050_add_auth_token.go create mode 100644 pkg/sqlstore/sqlstorehook/instrumentation.go create mode 100644 pkg/tokenizer/config.go create mode 100644 pkg/tokenizer/jwttokenizer/claims.go create mode 100644 pkg/tokenizer/jwttokenizer/provider.go create mode 100644 pkg/tokenizer/opaquetokenizer/provider.go create mode 100644 pkg/tokenizer/opaquetokenizer/store.go create mode 100644 pkg/tokenizer/tokenizer.go create mode 100644 pkg/tokenizer/tokenizertest/provider.go create mode 100644 pkg/tokenizer/wrapped.go create mode 100644 pkg/types/authtypes/authn.go create mode 100644 pkg/types/authtypes/domain.go create mode 100644 pkg/types/authtypes/email_password.go create mode 100644 pkg/types/authtypes/google.go delete mode 100644 pkg/types/authtypes/jwt.go delete mode 100644 pkg/types/authtypes/jwt_test.go create mode 100644 pkg/types/authtypes/oidc.go create mode 100644 pkg/types/authtypes/saml.go create mode 100644 pkg/types/authtypes/session.go create mode 100644 pkg/types/authtypes/token.go delete mode 100644 pkg/types/ctxtypes/constants.go delete mode 100644 pkg/types/domain.go delete mode 100644 pkg/types/ssotypes/google.go delete mode 100644 pkg/types/ssotypes/saml.go delete mode 100644 pkg/types/ssotypes/sso.go create mode 100644 pkg/valuer/email.go create mode 100644 pkg/valuer/email_test.go create mode 100644 tests/integration/fixtures/driver.py create mode 100644 tests/integration/fixtures/idp.py create mode 100644 tests/integration/fixtures/idputils.py rename tests/integration/src/{auth => callbackauthn}/__init__.py (100%) create mode 100644 tests/integration/src/callbackauthn/a_domain.py create mode 100644 tests/integration/src/callbackauthn/b_saml.py create mode 100644 tests/integration/src/callbackauthn/c_oidc.py create mode 100644 tests/integration/src/passwordauthn/__init__.py rename tests/integration/src/{auth => passwordauthn}/a_register.py (78%) rename tests/integration/src/{auth => passwordauthn}/b_license.py (86%) rename tests/integration/src/{auth => passwordauthn}/c_apikey.py (89%) rename tests/integration/src/{auth => passwordauthn}/d_password.py (87%) diff --git a/.github/workflows/integrationci.yaml b/.github/workflows/integrationci.yaml index b8356846e6..a1f5e7a1e7 100644 --- a/.github/workflows/integrationci.yaml +++ b/.github/workflows/integrationci.yaml @@ -15,7 +15,8 @@ jobs: matrix: src: - bootstrap - - auth + - passwordauthn + - callbackauthn - querier - ttl sqlstore-provider: @@ -43,6 +44,20 @@ jobs: python -m pip install poetry==2.1.2 python -m poetry config virtualenvs.in-project true cd tests/integration && poetry install --no-root + - name: webdriver + run: | + wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add - + echo "deb http://dl.google.com/linux/chrome/deb/ stable main" | sudo tee -a /etc/apt/sources.list.d/google-chrome.list + sudo apt-get update -qqy + sudo apt-get -qqy install google-chrome-stable + CHROME_VERSION=$(google-chrome-stable --version) + CHROME_FULL_VERSION=${CHROME_VERSION%%.*} + CHROME_MAJOR_VERSION=${CHROME_FULL_VERSION//[!0-9]} + sudo rm /etc/apt/sources.list.d/google-chrome.list + export CHROMEDRIVER_VERSION=`curl -s https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_${CHROME_MAJOR_VERSION%%.*}` + curl -L -O "https://storage.googleapis.com/chrome-for-testing-public/${CHROMEDRIVER_VERSION}/linux64/chromedriver-linux64.zip" + unzip chromedriver-linux64.zip && chmod +x chromedriver && sudo mv chromedriver /usr/local/bin + chromedriver -version - name: run run: | cd tests/integration && \ diff --git a/cmd/community/server.go b/cmd/community/server.go index a437b450c1..9d7909f4b7 100644 --- a/cmd/community/server.go +++ b/cmd/community/server.go @@ -3,11 +3,11 @@ package main import ( "context" "log/slog" - "time" "github.com/SigNoz/signoz/cmd" "github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore" "github.com/SigNoz/signoz/pkg/analytics" + "github.com/SigNoz/signoz/pkg/authn" "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/licensing" "github.com/SigNoz/signoz/pkg/licensing/nooplicensing" @@ -56,12 +56,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e return err } - jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour) - signoz, err := signoz.New( ctx, config, - jwt, zeus.Config{}, noopzeus.NewProviderFactory(), licensing.Config{}, @@ -76,13 +73,16 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e }, signoz.NewSQLStoreProviderFactories(), signoz.NewTelemetryStoreProviderFactories(), + func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) { + return signoz.NewAuthNs(ctx, providerSettings, store, licensing) + }, ) if err != nil { logger.ErrorContext(ctx, "failed to create signoz", "error", err) return err } - server, err := app.NewServer(config, signoz, jwt) + server, err := app.NewServer(config, signoz) if err != nil { logger.ErrorContext(ctx, "failed to create server", "error", err) return err diff --git a/cmd/config.go b/cmd/config.go index 206d9b44d4..85524aba62 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -3,7 +3,6 @@ package cmd import ( "context" "log/slog" - "os" "github.com/SigNoz/signoz/pkg/config" "github.com/SigNoz/signoz/pkg/config/envprovider" @@ -30,12 +29,3 @@ func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.Depr return config, nil } - -func NewJWTSecret(ctx context.Context, logger *slog.Logger) string { - jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET") - if len(jwtSecret) == 0 { - logger.ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.") - } - - return jwtSecret -} diff --git a/cmd/enterprise/server.go b/cmd/enterprise/server.go index b513e9a744..87d3354d16 100644 --- a/cmd/enterprise/server.go +++ b/cmd/enterprise/server.go @@ -6,6 +6,8 @@ import ( "time" "github.com/SigNoz/signoz/cmd" + "github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn" + "github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn" enterpriselicensing "github.com/SigNoz/signoz/ee/licensing" "github.com/SigNoz/signoz/ee/licensing/httplicensing" enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app" @@ -14,6 +16,7 @@ import ( enterprisezeus "github.com/SigNoz/signoz/ee/zeus" "github.com/SigNoz/signoz/ee/zeus/httpzeus" "github.com/SigNoz/signoz/pkg/analytics" + "github.com/SigNoz/signoz/pkg/authn" "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/licensing" "github.com/SigNoz/signoz/pkg/modules/organization" @@ -54,17 +57,14 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e // add enterprise sqlstore factories to the community sqlstore factories sqlstoreFactories := signoz.NewSQLStoreProviderFactories() - if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil { + if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory(), sqlstorehook.NewInstrumentationFactory())); err != nil { logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err) return err } - jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour) - signoz, err := signoz.New( ctx, config, - jwt, enterprisezeus.Config(), httpzeus.NewProviderFactory(), enterpriselicensing.Config(24*time.Hour, 3), @@ -84,13 +84,34 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e }, sqlstoreFactories, signoz.NewTelemetryStoreProviderFactories(), + func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) { + samlCallbackAuthN, err := samlcallbackauthn.New(ctx, store, licensing) + if err != nil { + return nil, err + } + + oidcCallbackAuthN, err := oidccallbackauthn.New(store, licensing, providerSettings) + if err != nil { + return nil, err + } + + authNs, err := signoz.NewAuthNs(ctx, providerSettings, store, licensing) + if err != nil { + return nil, err + } + + authNs[authtypes.AuthNProviderSAML] = samlCallbackAuthN + authNs[authtypes.AuthNProviderOIDC] = oidcCallbackAuthN + + return authNs, nil + }, ) if err != nil { logger.ErrorContext(ctx, "failed to create signoz", "error", err) return err } - server, err := enterpriseapp.NewServer(config, signoz, jwt) + server, err := enterpriseapp.NewServer(config, signoz) if err != nil { logger.ErrorContext(ctx, "failed to create server", "error", err) return err diff --git a/conf/example.yaml b/conf/example.yaml index d22fa37cab..9ace9702b6 100644 --- a/conf/example.yaml +++ b/conf/example.yaml @@ -243,3 +243,28 @@ statsreporter: gateway: # The URL of the gateway's api. url: http://localhost:8080 + +##################### Tokenizer ##################### +tokenizer: + # Specifies the tokenizer provider to use. + provider: jwt + lifetime: + # The duration for which a user can be idle before being required to authenticate. + idle: 168h + # The duration for which a user can remain logged in before being asked to login. + max: 720h + rotation: + # The interval to rotate tokens in. + interval: 30m + # The duration for which the previous token pair remains valid after a token pair is rotated. + duration: 60s + jwt: + # The secret to sign the JWT tokens. + secret: secret + opaque: + gc: + # The interval to perform garbage collection. + interval: 1h + token: + # The maximum number of tokens a user can have. This limits the number of concurrent sessions a user can have. + max_per_user: 5 diff --git a/ee/authn/callbackauthn/oidccallbackauthn/authn.go b/ee/authn/callbackauthn/oidccallbackauthn/authn.go new file mode 100644 index 0000000000..b1a048fbb5 --- /dev/null +++ b/ee/authn/callbackauthn/oidccallbackauthn/authn.go @@ -0,0 +1,191 @@ +package oidccallbackauthn + +import ( + "context" + "net/url" + + "github.com/SigNoz/signoz/pkg/authn" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/http/client" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/coreos/go-oidc/v3/oidc" + "golang.org/x/oauth2" +) + +const ( + redirectPath string = "/api/v1/complete/oidc" +) + +var ( + scopes []string = []string{"email", oidc.ScopeOpenID} +) + +var _ authn.CallbackAuthN = (*AuthN)(nil) + +type AuthN struct { + store authtypes.AuthNStore + licensing licensing.Licensing + httpClient *client.Client +} + +func New(store authtypes.AuthNStore, licensing licensing.Licensing, providerSettings factory.ProviderSettings) (*AuthN, error) { + httpClient, err := client.New(providerSettings.Logger, providerSettings.TracerProvider, providerSettings.MeterProvider) + if err != nil { + return nil, err + } + + return &AuthN{ + store: store, + licensing: licensing, + httpClient: httpClient, + }, nil +} + +func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) { + if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderOIDC { + return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "domain type is not oidc") + } + + _, oauth2Config, err := a.oidcProviderAndoauth2Config(ctx, siteURL, authDomain) + if err != nil { + return "", err + } + + return oauth2Config.AuthCodeURL(authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String()), nil +} + +func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtypes.CallbackIdentity, error) { + if err := query.Get("error"); err != "" { + return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: error while authenticating").WithAdditional(query.Get("error_description")) + } + + state, err := authtypes.NewStateFromString(query.Get("state")) + if err != nil { + return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "oidc: invalid state").WithAdditional(err.Error()) + } + + authDomain, err := a.store.GetAuthDomainFromID(ctx, state.DomainID) + if err != nil { + return nil, err + } + + _, err = a.licensing.GetActive(ctx, authDomain.StorableAuthDomain().OrgID) + if err != nil { + return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error()) + } + + oidcProvider, oauth2Config, err := a.oidcProviderAndoauth2Config(ctx, state.URL, authDomain) + if err != nil { + return nil, err + } + + ctx = context.WithValue(ctx, oauth2.HTTPClient, a.httpClient.Client()) + token, err := oauth2Config.Exchange(ctx, query.Get("code")) + if err != nil { + var retrieveError *oauth2.RetrieveError + if errors.As(err, &retrieveError) { + return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "oidc: failed to get token").WithAdditional(retrieveError.ErrorDescription).WithAdditional(string(retrieveError.Body)) + } + + return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to get token").WithAdditional(err.Error()) + } + + claims, err := a.claimsFromIDToken(ctx, authDomain, oidcProvider, token) + if err != nil && !errors.Ast(err, errors.TypeNotFound) { + return nil, err + } + + if claims == nil && authDomain.AuthDomainConfig().OIDC.GetUserInfo { + claims, err = a.claimsFromUserInfo(ctx, oidcProvider, token) + if err != nil { + return nil, err + } + } + + emailClaim, ok := claims[authDomain.AuthDomainConfig().OIDC.ClaimMapping.Email].(string) + if !ok { + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: missing email in claims") + } + + email, err := valuer.NewEmail(emailClaim) + if err != nil { + return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to parse email").WithAdditional(err.Error()) + } + + if !authDomain.AuthDomainConfig().OIDC.InsecureSkipEmailVerified { + emailVerifiedClaim, ok := claims["email_verified"].(bool) + if !ok { + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: missing email_verified in claims") + } + + if !emailVerifiedClaim { + return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "oidc: email is not verified") + } + } + + return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil +} + +func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (*oidc.Provider, *oauth2.Config, error) { + if authDomain.AuthDomainConfig().OIDC.IssuerAlias != "" { + ctx = oidc.InsecureIssuerURLContext(ctx, authDomain.AuthDomainConfig().OIDC.IssuerAlias) + } + + oidcProvider, err := oidc.NewProvider(ctx, authDomain.AuthDomainConfig().OIDC.Issuer) + if err != nil { + return nil, nil, err + } + + return oidcProvider, &oauth2.Config{ + ClientID: authDomain.AuthDomainConfig().OIDC.ClientID, + ClientSecret: authDomain.AuthDomainConfig().OIDC.ClientSecret, + Endpoint: oidcProvider.Endpoint(), + Scopes: scopes, + RedirectURL: (&url.URL{ + Scheme: siteURL.Scheme, + Host: siteURL.Host, + Path: redirectPath, + }).String(), + }, nil +} + +func (a *AuthN) claimsFromIDToken(ctx context.Context, authDomain *authtypes.AuthDomain, provider *oidc.Provider, token *oauth2.Token) (map[string]any, error) { + rawIDToken, ok := token.Extra("id_token").(string) + if !ok { + return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "oidc: no id_token in token response") + } + + verifier := provider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().OIDC.ClientID}) + idToken, err := verifier.Verify(ctx, rawIDToken) + if err != nil { + return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "oidc: failed to verify token").WithAdditional(err.Error()) + } + + var claims map[string]any + if err := idToken.Claims(&claims); err != nil { + return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to decode claims").WithAdditional(err.Error()) + } + + return claims, nil +} + +func (a *AuthN) claimsFromUserInfo(ctx context.Context, provider *oidc.Provider, token *oauth2.Token) (map[string]any, error) { + var claims map[string]any + + userInfo, err := provider.UserInfo(ctx, oauth2.StaticTokenSource(&oauth2.Token{ + AccessToken: token.AccessToken, + TokenType: "Bearer", // The UserInfo endpoint requires a bearer token as per RFC6750 + })) + if err != nil { + return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to get user info").WithAdditional(err.Error()) + } + + if err := userInfo.Claims(&claims); err != nil { + return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to decode claims").WithAdditional(err.Error()) + } + + return claims, nil +} diff --git a/ee/authn/callbackauthn/samlcallbackauthn/authn.go b/ee/authn/callbackauthn/samlcallbackauthn/authn.go new file mode 100644 index 0000000000..1fc99d0744 --- /dev/null +++ b/ee/authn/callbackauthn/samlcallbackauthn/authn.go @@ -0,0 +1,155 @@ +package samlcallbackauthn + +import ( + "context" + "crypto/x509" + "encoding/base64" + "encoding/pem" + "net/url" + "strings" + + "github.com/SigNoz/signoz/pkg/authn" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" + saml2 "github.com/russellhaering/gosaml2" + dsig "github.com/russellhaering/goxmldsig" +) + +const ( + redirectPath string = "/api/v1/complete/saml" +) + +var _ authn.CallbackAuthN = (*AuthN)(nil) + +type AuthN struct { + store authtypes.AuthNStore + licensing licensing.Licensing +} + +func New(ctx context.Context, store authtypes.AuthNStore, licensing licensing.Licensing) (*AuthN, error) { + return &AuthN{ + store: store, + licensing: licensing, + }, nil +} + +func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) { + if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderSAML { + return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "saml: domain type is not saml") + } + + sp, err := a.serviceProvider(siteURL, authDomain) + if err != nil { + return "", err + } + + url, err := sp.BuildAuthURL(authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String()) + if err != nil { + return "", err + } + + return url, nil +} + +func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*authtypes.CallbackIdentity, error) { + state, err := authtypes.NewStateFromString(formValues.Get("RelayState")) + if err != nil { + return nil, errors.New(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "saml: invalid state").WithAdditional(err.Error()) + } + + authDomain, err := a.store.GetAuthDomainFromID(ctx, state.DomainID) + if err != nil { + return nil, err + } + + _, err = a.licensing.GetActive(ctx, authDomain.StorableAuthDomain().OrgID) + if err != nil { + return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error()) + } + + sp, err := a.serviceProvider(state.URL, authDomain) + if err != nil { + return nil, err + } + + assertionInfo, err := sp.RetrieveAssertionInfo(formValues.Get("SAMLResponse")) + if err != nil { + if errors.As(err, &saml2.ErrVerification{}) { + return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, err.Error()) + } + + if errors.As(err, &saml2.ErrMissingElement{}) { + return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, err.Error()) + } + + return nil, err + } + + if assertionInfo.WarningInfo.InvalidTime { + return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "saml: expired saml response") + } + + email, err := valuer.NewEmail(assertionInfo.NameID) + if err != nil { + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "saml: invalid email").WithAdditional("The nameID assertion is used to retrieve the email address, please check your IDP configuration and try again.") + } + + return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil +} + +func (a *AuthN) serviceProvider(siteURL *url.URL, authDomain *authtypes.AuthDomain) (*saml2.SAMLServiceProvider, error) { + certStore, err := a.getCertificateStore(authDomain) + if err != nil { + return nil, err + } + + acsURL := &url.URL{Scheme: siteURL.Scheme, Host: siteURL.Host, Path: redirectPath} + + // Note: + // The ServiceProviderIssuer is the client id in case of keycloak. Since we set it to the host here, we need to set the client id == host in keycloak. + // For AWSSSO, this is the value of Application SAML audience. + return &saml2.SAMLServiceProvider{ + IdentityProviderSSOURL: authDomain.AuthDomainConfig().SAML.SamlIdp, + IdentityProviderIssuer: authDomain.AuthDomainConfig().SAML.SamlEntity, + ServiceProviderIssuer: siteURL.Host, + AssertionConsumerServiceURL: acsURL.String(), + SignAuthnRequests: !authDomain.AuthDomainConfig().SAML.InsecureSkipAuthNRequestsSigned, + AllowMissingAttributes: true, + IDPCertificateStore: certStore, + SPKeyStore: dsig.RandomKeyStoreForTest(), + }, nil +} + +func (a *AuthN) getCertificateStore(authDomain *authtypes.AuthDomain) (dsig.X509CertificateStore, error) { + certStore := &dsig.MemoryX509CertificateStore{ + Roots: []*x509.Certificate{}, + } + + var certBytes []byte + if strings.Contains(authDomain.AuthDomainConfig().SAML.SamlCert, "-----BEGIN CERTIFICATE-----") { + block, _ := pem.Decode([]byte(authDomain.AuthDomainConfig().SAML.SamlCert)) + if block == nil { + return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no valid pem cert found") + } + + certBytes = block.Bytes + } else { + certData, err := base64.StdEncoding.DecodeString(authDomain.AuthDomainConfig().SAML.SamlCert) + if err != nil { + return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to read certificate: %s", err.Error()) + } + + certBytes = certData + } + + idpCert, err := x509.ParseCertificate(certBytes) + if err != nil { + return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to prepare saml request, invalid cert: %s", err.Error()) + } + + certStore.Roots = append(certStore.Roots, idpCert) + + return certStore, nil +} diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go index c5fcbf64f4..d3663c9108 100644 --- a/ee/query-service/app/api/api.go +++ b/ee/query-service/app/api/api.go @@ -20,7 +20,6 @@ import ( basemodel "github.com/SigNoz/signoz/pkg/query-service/model" rules "github.com/SigNoz/signoz/pkg/query-service/rules" "github.com/SigNoz/signoz/pkg/signoz" - "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/version" "github.com/gorilla/mux" ) @@ -35,10 +34,7 @@ type APIHandlerOptions struct { Gateway *httputil.ReverseProxy GatewayUrl string // Querier Influx Interval - FluxInterval time.Duration - UseLogsNewSchema bool - UseTraceNewSchema bool - JWT *authtypes.JWT + FluxInterval time.Duration } type APIHandler struct { @@ -93,7 +89,8 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet) // paid plans specific routes - router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.Signoz.Handlers.Session.CreateSessionBySAMLCallback)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/complete/oidc", am.OpenAccess(ah.Signoz.Handlers.Session.CreateSessionByOIDCCallback)).Methods(http.MethodGet) // base overrides router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet) diff --git a/ee/query-service/app/api/auth.go b/ee/query-service/app/api/auth.go deleted file mode 100644 index 3ffa247440..0000000000 --- a/ee/query-service/app/api/auth.go +++ /dev/null @@ -1,107 +0,0 @@ -package api - -import ( - "context" - "encoding/base64" - "fmt" - "net/http" - "net/url" - - "go.uber.org/zap" - - "github.com/SigNoz/signoz/pkg/query-service/constants" - "github.com/SigNoz/signoz/pkg/valuer" -) - -func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) { - ssoError := []byte("Login failed. Please contact your system administrator") - dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError))) - base64.StdEncoding.Encode(dst, ssoError) - - http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther) -} - -// receiveSAML completes a SAML request and gets user logged in -func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { - // this is the source url that initiated the login request - redirectUri := constants.GetDefaultSiteURL() - ctx := context.Background() - - err := r.ParseForm() - if err != nil { - zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) - handleSsoError(w, r, redirectUri) - return - } - - // the relay state is sent when a login request is submitted to - // Idp. - relayState := r.FormValue("RelayState") - zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState)) - - parsedState, err := url.Parse(relayState) - if err != nil || relayState == "" { - zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) - handleSsoError(w, r, redirectUri) - return - } - - // upgrade redirect url from the relay state for better accuracy - redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login") - - // fetch domain by parsing relay state. - domain, err := ah.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState) - if err != nil { - handleSsoError(w, r, redirectUri) - return - } - - orgID, err := valuer.NewUUID(domain.OrgID) - if err != nil { - handleSsoError(w, r, redirectUri) - return - } - - _, err = ah.Signoz.Licensing.GetActive(ctx, orgID) - if err != nil { - zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain") - http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently) - return - } - - sp, err := domain.PrepareSamlRequest(parsedState) - if err != nil { - zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse")) - if err != nil { - zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - if assertionInfo.WarningInfo.InvalidTime { - zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - email := assertionInfo.NameID - if email == "" { - zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String())) - handleSsoError(w, r, redirectUri) - return - } - - nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email) - if err != nil { - zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - http.Redirect(w, r, nextPage, http.StatusSeeOther) -} diff --git a/ee/query-service/app/api/cloudIntegrations.go b/ee/query-service/app/api/cloudIntegrations.go index 101646e4ee..6b513fc9c1 100644 --- a/ee/query-service/app/api/cloudIntegrations.go +++ b/ee/query-service/app/api/cloudIntegrations.go @@ -168,38 +168,22 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId func (ah *APIHandler) getOrCreateCloudIntegrationUser( ctx context.Context, orgId string, cloudProvider string, ) (*types.User, *basemodel.ApiError) { - cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider) - email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser) + cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider) + email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName)) - integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email) - if err != nil && !errors.Ast(err, errors.TypeNotFound) { - return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err)) - } - - if integrationUserResult != nil { - return &integrationUserResult.User, nil - } - - zap.L().Info( - "cloud integration user not found. Attempting to create the user", - zap.String("cloudProvider", cloudProvider), - ) - - newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId) - if err != nil { - return nil, basemodel.InternalError(fmt.Errorf( - "couldn't create cloud integration user: %w", err, - )) - } - - password := types.MustGenerateFactorPassword(newUser.ID.StringValue()) - - err = ah.Signoz.Modules.User.CreateUser(ctx, newUser, user.WithFactorPassword(password)) + cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId)) if err != nil { return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err)) } - return newUser, nil + password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue()) + + cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password)) + if err != nil { + return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err)) + } + + return cloudIntegrationUser, nil } func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) ( diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index dab2b7f516..c9c6205c38 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -7,8 +7,11 @@ import ( "net" "net/http" _ "net/http/pprof" // http profiler + "slices" "github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore" + "go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux" + "go.opentelemetry.io/otel/propagation" "github.com/gorilla/handlers" @@ -25,7 +28,6 @@ import ( "github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/telemetrystore" - "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/web" "github.com/rs/cors" "github.com/soheilhy/cmux" @@ -50,7 +52,6 @@ import ( type Server struct { config signoz.Config signoz *signoz.SigNoz - jwt *authtypes.JWT ruleManager *baserules.Manager // public http router @@ -67,7 +68,7 @@ type Server struct { } // NewServer creates and initializes Server -func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) (*Server, error) { +func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) { gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix) if err != nil { return nil, err @@ -153,7 +154,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) FluxInterval: config.Querier.FluxInterval, Gateway: gatewayProxy, GatewayUrl: config.Gateway.URL.String(), - JWT: jwt, } apiHandler, err := api.NewAPIHandler(apiOpts, signoz) @@ -164,7 +164,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) s := &Server{ config: config, signoz: signoz, - jwt: jwt, ruleManager: rm, httpHostPort: baseconst.HTTPHostPort, unavailableChannel: make(chan healthcheck.Status), @@ -195,7 +194,17 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h r := baseapp.NewRouter() am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger()) - r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap) + r.Use(otelmux.Middleware( + "apiserver", + otelmux.WithMeterProvider(s.signoz.Instrumentation.MeterProvider()), + otelmux.WithTracerProvider(s.signoz.Instrumentation.TracerProvider()), + otelmux.WithPropagators(propagation.NewCompositeTextMapPropagator(propagation.Baggage{}, propagation.TraceContext{})), + otelmux.WithFilter(func(r *http.Request) bool { + return !slices.Contains([]string{"/api/v1/health"}, r.URL.Path) + }), + otelmux.WithPublicEndpoint(), + )) + r.Use(middleware.NewAuthN([]string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Tokenizer, s.signoz.Instrumentation.Logger()).Wrap) r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap) r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(), s.config.APIServer.Timeout.ExcludedRoutes, diff --git a/ee/query-service/constants/constants.go b/ee/query-service/constants/constants.go index b7c6d11fc5..0fb7078b5e 100644 --- a/ee/query-service/constants/constants.go +++ b/ee/query-service/constants/constants.go @@ -4,10 +4,6 @@ import ( "os" ) -const ( - DefaultSiteURL = "https://localhost:8080" -) - var LicenseSignozIo = "https://license.signoz.io/api/v1" var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "") var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "") @@ -27,13 +23,6 @@ func GetOrDefaultEnv(key string, fallback string) string { // constant functions that override env vars -// GetDefaultSiteURL returns default site url, primarily -// used to send saml request and allowing backend to -// handle http redirect -func GetDefaultSiteURL() string { - return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL) -} - const DotMetricsEnabled = "DOT_METRICS_ENABLED" var IsDotMetricsEnabled = false diff --git a/frontend/src/AppRoutes/utils.ts b/frontend/src/AppRoutes/utils.ts index 804740f7a5..17d51bbdcf 100644 --- a/frontend/src/AppRoutes/utils.ts +++ b/frontend/src/AppRoutes/utils.ts @@ -2,14 +2,12 @@ import setLocalStorageApi from 'api/browser/localstorage/set'; import { LOCALSTORAGE } from 'constants/localStorage'; const afterLogin = ( - userId: string, authToken: string, refreshToken: string, interceptorRejected?: boolean, ): void => { setLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN, authToken); setLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN, refreshToken); - setLocalStorageApi(LOCALSTORAGE.USER_ID, userId); setLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN, 'true'); if (!interceptorRejected) { @@ -18,7 +16,6 @@ const afterLogin = ( detail: { accessJWT: authToken, refreshJWT: refreshToken, - id: userId, }, }), ); diff --git a/frontend/src/api/common/getQueryStats.ts b/frontend/src/api/common/getQueryStats.ts deleted file mode 100644 index c7e8bd2b4b..0000000000 --- a/frontend/src/api/common/getQueryStats.ts +++ /dev/null @@ -1,62 +0,0 @@ -import getLocalStorageApi from 'api/browser/localstorage/get'; -import { ENVIRONMENT } from 'constants/env'; -import { LOCALSTORAGE } from 'constants/localStorage'; -import { isEmpty } from 'lodash-es'; - -export interface WsDataEvent { - read_rows: number; - read_bytes: number; - elapsed_ms: number; -} -interface GetQueryStatsProps { - queryId: string; - setData: React.Dispatch>; -} - -function getURL(baseURL: string, queryId: string): URL | string { - if (baseURL && !isEmpty(baseURL)) { - return `${baseURL}/ws/query_progress?q=${queryId}`; - } - const url = new URL(`/ws/query_progress?q=${queryId}`, window.location.href); - - if (window.location.protocol === 'http:') { - url.protocol = 'ws'; - } else { - url.protocol = 'wss'; - } - - return url; -} - -export function getQueryStats(props: GetQueryStatsProps): void { - const { queryId, setData } = props; - - const token = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) || ''; - - // https://github.com/whatwg/websockets/issues/20 reason for not using the relative URLs - const url = getURL(ENVIRONMENT.wsURL, queryId); - - const socket = new WebSocket(url, token); - - socket.addEventListener('message', (event) => { - try { - const parsedData = JSON.parse(event?.data); - setData(parsedData); - } catch { - setData(event?.data); - } - }); - - socket.addEventListener('error', (event) => { - console.error(event); - }); - - socket.addEventListener('close', (event) => { - // 1000 is a normal closure status code - if (event.code !== 1000) { - console.error('WebSocket closed with error:', event); - } else { - console.error('WebSocket closed normally.'); - } - }); -} diff --git a/frontend/src/api/index.ts b/frontend/src/api/index.ts index 9e78b90221..51964d9db2 100644 --- a/frontend/src/api/index.ts +++ b/frontend/src/api/index.ts @@ -2,7 +2,7 @@ /* eslint-disable no-param-reassign */ /* eslint-disable @typescript-eslint/no-explicit-any */ import getLocalStorageApi from 'api/browser/localstorage/get'; -import loginApi from 'api/v1/login/login'; +import post from 'api/v2/sessions/rotate/post'; import afterLogin from 'AppRoutes/utils'; import axios, { AxiosError, @@ -12,6 +12,7 @@ import axios, { import { ENVIRONMENT } from 'constants/env'; import { Events } from 'constants/events'; import { LOCALSTORAGE } from 'constants/localStorage'; +import { QueryClient } from 'react-query'; import { eventEmitter } from 'utils/getEventEmitter'; import apiV1, { @@ -26,6 +27,14 @@ import apiV1, { import { Logout } from './utils'; const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + refetchOnWindowFocus: false, + retry: false, + }, + }, +}); const interceptorsResponse = ( value: AxiosResponse, @@ -74,19 +83,24 @@ const interceptorRejected = async ( try { if (axios.isAxiosError(value) && value.response) { const { response } = value; - // reject the refresh token error - if (response.status === 401 && response.config.url !== '/login') { + + if ( + response.status === 401 && + // if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry! + response.config.url !== '/sessions/rotate' && + !( + response.config.url === '/sessions' && response.config.method === 'delete' + ) + ) { try { - const response = await loginApi({ - refreshToken: getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN) || '', + const accessToken = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN); + const refreshToken = getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN); + const response = await queryClient.fetchQuery({ + queryFn: () => post({ refreshToken: refreshToken || '' }), + queryKey: ['/api/v2/sessions/rotate', accessToken, refreshToken], }); - afterLogin( - response.data.userId, - response.data.accessJwt, - response.data.refreshJwt, - true, - ); + afterLogin(response.data.accessToken, response.data.refreshToken, true); try { const reResponse = await axios( @@ -95,7 +109,7 @@ const interceptorRejected = async ( method: value.config.method, headers: { ...value.config.headers, - Authorization: `Bearer ${response.data.accessJwt}`, + Authorization: `Bearer ${response.data.accessToken}`, }, data: { ...JSON.parse(value.config.data || '{}'), @@ -113,8 +127,8 @@ const interceptorRejected = async ( Logout(); } } - // when refresh token is expired - if (response.status === 401 && response.config.url === '/login') { + + if (response.status === 401 && response.config.url === '/sessions/rotate') { Logout(); } } diff --git a/frontend/src/api/utils.ts b/frontend/src/api/utils.ts index 731b8e859a..0744e7f25e 100644 --- a/frontend/src/api/utils.ts +++ b/frontend/src/api/utils.ts @@ -3,7 +3,15 @@ import { LOCALSTORAGE } from 'constants/localStorage'; import ROUTES from 'constants/routes'; import history from 'lib/history'; -export const Logout = (): void => { +import deleteSession from './v2/sessions/delete'; + +export const Logout = async (): Promise => { + try { + await deleteSession(); + } catch (error) { + console.error(error); + } + deleteLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN); deleteLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN); deleteLocalStorageKey(LOCALSTORAGE.IS_IDENTIFIED_USER); @@ -14,7 +22,6 @@ export const Logout = (): void => { deleteLocalStorageKey(LOCALSTORAGE.USER_ID); deleteLocalStorageKey(LOCALSTORAGE.QUICK_FILTERS_SETTINGS_ANNOUNCEMENT); window.dispatchEvent(new CustomEvent('LOGOUT')); - history.push(ROUTES.LOGIN); }; diff --git a/frontend/src/api/v1/domains/delete.ts b/frontend/src/api/v1/domains/id/delete.ts similarity index 62% rename from frontend/src/api/v1/domains/delete.ts rename to frontend/src/api/v1/domains/id/delete.ts index 0c1f452248..2a432f4506 100644 --- a/frontend/src/api/v1/domains/delete.ts +++ b/frontend/src/api/v1/domains/id/delete.ts @@ -2,11 +2,10 @@ import axios from 'api'; import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; import { AxiosError } from 'axios'; import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; -import { PayloadProps, Props } from 'types/api/SAML/deleteDomain'; -const deleteDomain = async (props: Props): Promise> => { +const deleteDomain = async (id: string): Promise> => { try { - const response = await axios.delete(`/domains/${props.id}`); + const response = await axios.delete(`/domains/${id}`); return { httpStatusCode: response.status, diff --git a/frontend/src/api/v1/domains/id/put.ts b/frontend/src/api/v1/domains/id/put.ts new file mode 100644 index 0000000000..ea11b338a9 --- /dev/null +++ b/frontend/src/api/v1/domains/id/put.ts @@ -0,0 +1,25 @@ +import axios from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api'; +import { UpdatableAuthDomain } from 'types/api/v1/domains/put'; + +const put = async ( + props: UpdatableAuthDomain, +): Promise> => { + try { + const response = await axios.put>( + `/domains/${props.id}`, + { config: props.config }, + ); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default put; diff --git a/frontend/src/api/v1/domains/list.ts b/frontend/src/api/v1/domains/list.ts index fc056873a0..e9fb369190 100644 --- a/frontend/src/api/v1/domains/list.ts +++ b/frontend/src/api/v1/domains/list.ts @@ -1,12 +1,16 @@ import axios from 'api'; import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; import { AxiosError } from 'axios'; -import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; -import { AuthDomain, PayloadProps } from 'types/api/SAML/listDomain'; +import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api'; +import { GettableAuthDomain } from 'types/api/v1/domains/list'; -const listAllDomain = async (): Promise> => { +const listAllDomain = async (): Promise< + SuccessResponseV2 +> => { try { - const response = await axios.get(`/domains`); + const response = await axios.get>( + `/domains`, + ); return { httpStatusCode: response.status, diff --git a/frontend/src/api/v1/domains/post.ts b/frontend/src/api/v1/domains/post.ts new file mode 100644 index 0000000000..dc8538ba1f --- /dev/null +++ b/frontend/src/api/v1/domains/post.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api'; +import { GettableAuthDomain } from 'types/api/v1/domains/list'; +import { PostableAuthDomain } from 'types/api/v1/domains/post'; + +const post = async ( + props: PostableAuthDomain, +): Promise> => { + try { + const response = await axios.post>( + `/domains`, + props, + ); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default post; diff --git a/frontend/src/api/v1/domains/update.ts b/frontend/src/api/v1/domains/update.ts deleted file mode 100644 index 701555a39d..0000000000 --- a/frontend/src/api/v1/domains/update.ts +++ /dev/null @@ -1,23 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; -import { AxiosError } from 'axios'; -import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; -import { AuthDomain } from 'types/api/SAML/listDomain'; -import { PayloadProps, Props } from 'types/api/SAML/updateDomain'; - -const updateDomain = async ( - props: Props, -): Promise> => { - try { - const response = await axios.put(`/domains/${props.id}`, props); - - return { - httpStatusCode: response.status, - data: response.data.data, - }; - } catch (error) { - ErrorResponseHandlerV2(error as AxiosError); - } -}; - -export default updateDomain; diff --git a/frontend/src/api/v1/invite/id/accept.ts b/frontend/src/api/v1/invite/id/accept.ts index 3c466fbbd2..68d17a080d 100644 --- a/frontend/src/api/v1/invite/id/accept.ts +++ b/frontend/src/api/v1/invite/id/accept.ts @@ -2,15 +2,12 @@ import axios from 'api'; import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; import { AxiosError } from 'axios'; import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; -import { - LoginPrecheckResponse, - PayloadProps, - Props, -} from 'types/api/user/accept'; +import { PayloadProps, Props } from 'types/api/user/accept'; +import { UserResponse } from 'types/api/user/getUser'; const accept = async ( props: Props, -): Promise> => { +): Promise> => { try { const response = await axios.post(`/invite/accept`, props); return { diff --git a/frontend/src/api/v1/login/loginPrecheck.ts b/frontend/src/api/v1/login/loginPrecheck.ts deleted file mode 100644 index eac00182cb..0000000000 --- a/frontend/src/api/v1/login/loginPrecheck.ts +++ /dev/null @@ -1,28 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { Props, Signup as PayloadProps } from 'types/api/user/loginPrecheck'; - -const loginPrecheck = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.get( - `/loginPrecheck?email=${encodeURIComponent( - props.email, - )}&ref=${encodeURIComponent(window.location.href)}`, - ); - - return { - statusCode: 200, - error: null, - message: response.statusText, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default loginPrecheck; diff --git a/frontend/src/api/v1/register/post.ts b/frontend/src/api/v1/register/post.ts new file mode 100644 index 0000000000..262c8a73c4 --- /dev/null +++ b/frontend/src/api/v1/register/post.ts @@ -0,0 +1,27 @@ +import axios from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api'; +import { Props } from 'types/api/user/signup'; +import { SignupResponse } from 'types/api/v1/register/post'; + +const post = async ( + props: Props, +): Promise> => { + try { + const response = await axios.post>( + `/register`, + { + ...props, + }, + ); + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default post; diff --git a/frontend/src/api/v1/register/signup.ts b/frontend/src/api/v1/register/signup.ts deleted file mode 100644 index 5838a8e7ad..0000000000 --- a/frontend/src/api/v1/register/signup.ts +++ /dev/null @@ -1,22 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; -import { AxiosError } from 'axios'; -import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; -import { PayloadProps, Signup } from 'types/api/user/loginPrecheck'; -import { Props } from 'types/api/user/signup'; - -const signup = async (props: Props): Promise> => { - try { - const response = await axios.post(`/register`, { - ...props, - }); - return { - httpStatusCode: response.status, - data: response.data.data, - }; - } catch (error) { - ErrorResponseHandlerV2(error as AxiosError); - } -}; - -export default signup; diff --git a/frontend/src/api/v1/login/login.ts b/frontend/src/api/v1/user/me/get.ts similarity index 57% rename from frontend/src/api/v1/login/login.ts rename to frontend/src/api/v1/user/me/get.ts index 46f17ad711..f5dd8d9fa1 100644 --- a/frontend/src/api/v1/login/login.ts +++ b/frontend/src/api/v1/user/me/get.ts @@ -2,15 +2,11 @@ import axios from 'api'; import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; import { AxiosError } from 'axios'; import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; -import { PayloadProps, Props, UserLoginResponse } from 'types/api/user/login'; +import { PayloadProps, UserResponse } from 'types/api/user/getUser'; -const login = async ( - props: Props, -): Promise> => { +const get = async (): Promise> => { try { - const response = await axios.post(`/login`, { - ...props, - }); + const response = await axios.get(`/user/me`); return { httpStatusCode: response.status, @@ -21,4 +17,4 @@ const login = async ( } }; -export default login; +export default get; diff --git a/frontend/src/api/v1/domains/create.ts b/frontend/src/api/v1/version/get.ts similarity index 50% rename from frontend/src/api/v1/domains/create.ts rename to frontend/src/api/v1/version/get.ts index 18fbc21b2b..d60b32ff36 100644 --- a/frontend/src/api/v1/domains/create.ts +++ b/frontend/src/api/v1/version/get.ts @@ -2,20 +2,19 @@ import axios from 'api'; import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; import { AxiosError } from 'axios'; import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; -import { AuthDomain } from 'types/api/SAML/listDomain'; -import { PayloadProps, Props } from 'types/api/SAML/postDomain'; +import { Info } from 'types/api/v1/version/get'; -const create = async (props: Props): Promise> => { +const get = async (): Promise> => { try { - const response = await axios.post(`/domains`, props); + const response = await axios.get(`/version`); return { httpStatusCode: response.status, - data: response.data.data, + data: response.data, }; } catch (error) { ErrorResponseHandlerV2(error as AxiosError); } }; -export default create; +export default get; diff --git a/frontend/src/api/v1/version/getVersion.ts b/frontend/src/api/v1/version/getVersion.ts deleted file mode 100644 index 0f3e7f8e83..0000000000 --- a/frontend/src/api/v1/version/getVersion.ts +++ /dev/null @@ -1,25 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { getVersion } from 'constants/api'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps } from 'types/api/user/getVersion'; - -const getVersionApi = async (): Promise< - SuccessResponse | ErrorResponse -> => { - try { - const response = await axios.get(`/${getVersion}`); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default getVersionApi; diff --git a/frontend/src/api/v2/sessions/context/get.ts b/frontend/src/api/v2/sessions/context/get.ts new file mode 100644 index 0000000000..2f08c74623 --- /dev/null +++ b/frontend/src/api/v2/sessions/context/get.ts @@ -0,0 +1,27 @@ +import { ApiV2Instance as axios } from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api'; +import { Props, SessionsContext } from 'types/api/v2/sessions/context/get'; + +const get = async ( + props: Props, +): Promise> => { + try { + const response = await axios.get>( + '/sessions/context', + { + params: props, + }, + ); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default get; diff --git a/frontend/src/api/v2/sessions/delete.ts b/frontend/src/api/v2/sessions/delete.ts new file mode 100644 index 0000000000..339ea0639e --- /dev/null +++ b/frontend/src/api/v2/sessions/delete.ts @@ -0,0 +1,19 @@ +import { ApiV2Instance as axios } from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api'; + +const deleteSession = async (): Promise> => { + try { + const response = await axios.delete>('/sessions'); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default deleteSession; diff --git a/frontend/src/api/v2/sessions/email_password/post.ts b/frontend/src/api/v2/sessions/email_password/post.ts new file mode 100644 index 0000000000..643821a0a0 --- /dev/null +++ b/frontend/src/api/v2/sessions/email_password/post.ts @@ -0,0 +1,23 @@ +import { ApiV2Instance as axios } from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api'; +import { Props, Token } from 'types/api/v2/sessions/email_password/post'; + +const post = async (props: Props): Promise> => { + try { + const response = await axios.post>( + '/sessions/email_password', + props, + ); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default post; diff --git a/frontend/src/api/v2/sessions/rotate/post.ts b/frontend/src/api/v2/sessions/rotate/post.ts new file mode 100644 index 0000000000..7738aba0a8 --- /dev/null +++ b/frontend/src/api/v2/sessions/rotate/post.ts @@ -0,0 +1,23 @@ +import { ApiV2Instance as axios } from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api'; +import { Props, Token } from 'types/api/v2/sessions/rotate/post'; + +const post = async (props: Props): Promise> => { + try { + const response = await axios.post>( + '/sessions/rotate', + props, + ); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default post; diff --git a/frontend/src/constants/api.ts b/frontend/src/constants/api.ts index abb7d42ad6..fcf32e1292 100644 --- a/frontend/src/constants/api.ts +++ b/frontend/src/constants/api.ts @@ -1,5 +1,3 @@ const SOMETHING_WENT_WRONG = 'Something went wrong'; -const getVersion = 'version'; - -export { getVersion, SOMETHING_WENT_WRONG }; +export { SOMETHING_WENT_WRONG }; diff --git a/frontend/src/container/ApiMonitoring/__tests__/EndPointDetails.test.tsx b/frontend/src/container/ApiMonitoring/__tests__/EndPointDetails.test.tsx index 33bf79814b..1787abd963 100644 --- a/frontend/src/container/ApiMonitoring/__tests__/EndPointDetails.test.tsx +++ b/frontend/src/container/ApiMonitoring/__tests__/EndPointDetails.test.tsx @@ -22,6 +22,7 @@ import EndPointDetails from '../Explorer/Domains/DomainDetails/EndPointDetails'; // Mock dependencies jest.mock('react-query', () => ({ + ...jest.requireActual('react-query'), useQueries: jest.fn(), })); diff --git a/frontend/src/container/ApiMonitoring/__tests__/TopErrors.test.tsx b/frontend/src/container/ApiMonitoring/__tests__/TopErrors.test.tsx index 0d8732fbd8..5150cd319f 100644 --- a/frontend/src/container/ApiMonitoring/__tests__/TopErrors.test.tsx +++ b/frontend/src/container/ApiMonitoring/__tests__/TopErrors.test.tsx @@ -37,6 +37,7 @@ jest.mock( // Mock dependencies jest.mock('react-query', () => ({ + ...jest.requireActual('react-query'), useQueries: jest.fn(), })); diff --git a/frontend/src/container/AppLayout/index.tsx b/frontend/src/container/AppLayout/index.tsx index bdb2ebbcf2..805ba72a90 100644 --- a/frontend/src/container/AppLayout/index.tsx +++ b/frontend/src/container/AppLayout/index.tsx @@ -12,8 +12,8 @@ import getChangelogByVersion from 'api/changelog/getChangelogByVersion'; import logEvent from 'api/common/logEvent'; import manageCreditCardApi from 'api/v1/portal/create'; import updateUserPreference from 'api/v1/user/preferences/name/update'; +import getUserVersion from 'api/v1/version/get'; import getUserLatestVersion from 'api/v1/version/getLatestVersion'; -import getUserVersion from 'api/v1/version/getVersion'; import { AxiosError } from 'axios'; import cx from 'classnames'; import ChangelogModal from 'components/ChangelogModal/ChangelogModal'; @@ -317,14 +317,14 @@ function AppLayout(props: AppLayoutProps): JSX.Element { getUserVersionResponse.isFetched && getUserVersionResponse.isSuccess && getUserVersionResponse.data && - getUserVersionResponse.data.payload + getUserVersionResponse.data.data ) { dispatch({ type: UPDATE_CURRENT_VERSION, payload: { - currentVersion: getUserVersionResponse.data.payload.version, - ee: getUserVersionResponse.data.payload.ee, - setupCompleted: getUserVersionResponse.data.payload.setupCompleted, + currentVersion: getUserVersionResponse.data.data.version, + ee: getUserVersionResponse.data.data.ee, + setupCompleted: getUserVersionResponse.data.data.setupCompleted, }, }); } diff --git a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents/__tests__/EntityEvents.test.tsx b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents/__tests__/EntityEvents.test.tsx index bda94ab5d7..797eb7acff 100644 --- a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents/__tests__/EntityEvents.test.tsx +++ b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents/__tests__/EntityEvents.test.tsx @@ -23,6 +23,7 @@ jest.mock('container/TopNav/DateTimeSelectionV2', () => ({ const mockUseQuery = jest.fn(); jest.mock('react-query', () => ({ + ...jest.requireActual('react-query'), useQuery: (queryKey: any, queryFn: any, options: any): any => mockUseQuery(queryKey, queryFn, options), })); diff --git a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityMetrics/__tests__/EntityMetrics.test.tsx b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityMetrics/__tests__/EntityMetrics.test.tsx index ad488460e9..21ed2ecb55 100644 --- a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityMetrics/__tests__/EntityMetrics.test.tsx +++ b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityMetrics/__tests__/EntityMetrics.test.tsx @@ -52,6 +52,7 @@ jest.mock('container/InfraMonitoringK8s/commonUtils', () => ({ const mockUseQueries = jest.fn(); const mockUseQuery = jest.fn(); jest.mock('react-query', () => ({ + ...jest.requireActual('react-query'), useQueries: (queryConfigs: any[]): any[] => mockUseQueries(queryConfigs), useQuery: (config: any): any => mockUseQuery(config), })); diff --git a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityTraces/__tests__/EntityTraces.test.tsx b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityTraces/__tests__/EntityTraces.test.tsx index 40977f9ea2..98f71ce427 100644 --- a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityTraces/__tests__/EntityTraces.test.tsx +++ b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityTraces/__tests__/EntityTraces.test.tsx @@ -22,6 +22,7 @@ jest.mock('container/TopNav/DateTimeSelectionV2', () => ({ const mockUseQuery = jest.fn(); jest.mock('react-query', () => ({ + ...jest.requireActual('react-query'), useQuery: (queryKey: any, queryFn: any, options: any): any => mockUseQuery(queryKey, queryFn, options), })); diff --git a/frontend/src/container/Login/__tests__/Login.test.tsx b/frontend/src/container/Login/__tests__/Login.test.tsx index bfa5a17440..3fda443371 100644 --- a/frontend/src/container/Login/__tests__/Login.test.tsx +++ b/frontend/src/container/Login/__tests__/Login.test.tsx @@ -1,118 +1,830 @@ -import Login from 'container/Login'; -import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils'; +/* eslint-disable sonarjs/no-identical-functions */ +import ROUTES from 'constants/routes'; +import history from 'lib/history'; +import { rest, server } from 'mocks-server/server'; +import { render, screen, userEvent, waitFor } from 'tests/test-utils'; +import { ErrorV2 } from 'types/api'; +import { Info } from 'types/api/v1/version/get'; +import { SessionsContext } from 'types/api/v2/sessions/context/get'; +import { Token } from 'types/api/v2/sessions/email_password/post'; -const errorNotification = jest.fn(); -jest.mock('hooks/useNotifications', () => ({ +import Login from '../index'; + +const VERSION_ENDPOINT = '*/api/v1/version'; +const SESSIONS_CONTEXT_ENDPOINT = '*/api/v2/sessions/context'; +const CALLBACK_AUTHN_ORG = 'callback_authn_org'; +const CALLBACK_AUTHN_URL = 'https://sso.example.com/auth'; +const PASSWORD_AUTHN_ORG = 'password_authn_org'; +const PASSWORD_AUTHN_EMAIL = 'jest.test@signoz.io'; + +jest.mock('lib/history', () => ({ __esModule: true, - useNotifications: jest.fn(() => ({ - notifications: { - error: errorNotification, + default: { + push: jest.fn(), + location: { + search: '', }, - })), + }, })); -describe('Login Flow', () => { - test('Login form is rendered correctly', async () => { - render(); +const mockHistoryPush = history.push as jest.MockedFunction< + typeof history.push +>; - // Check for the main description - expect( - screen.getByText( - 'Sign in to monitor, trace, and troubleshoot your applications effortlessly.', - ), - ).toBeInTheDocument(); +// Mock data +const mockVersionSetupCompleted: Info = { + setupCompleted: true, + ee: 'Y', + version: '0.25.0', +}; - // Email input - const emailInput = screen.getByTestId('email'); - expect(emailInput).toBeInTheDocument(); - expect(emailInput).toHaveAttribute('type', 'email'); +const mockVersionSetupIncomplete: Info = { + setupCompleted: false, + ee: 'Y', + version: '0.25.0', +}; - // Next button - const nextButton = screen.getByRole('button', { name: /next/i }); - expect(nextButton).toBeInTheDocument(); +const mockSingleOrgPasswordAuth: SessionsContext = { + exists: true, + orgs: [ + { + id: 'org-1', + name: 'Test Organization', + authNSupport: { + password: [{ provider: 'email_password' }], + callback: [], + }, + }, + ], +}; - // No account prompt (default: canSelfRegister is false) - expect( - screen.getByText( - "Don't have an account? Contact your admin to send you an invite link.", - ), - ).toBeInTheDocument(); - }); +const mockSingleOrgCallbackAuth: SessionsContext = { + exists: true, + orgs: [ + { + id: 'org-1', + name: 'Test Organization', + authNSupport: { + password: [], + callback: [{ provider: 'google', url: CALLBACK_AUTHN_URL }], + }, + }, + ], +}; - test('Display error if email is not provided', async () => { - render(); +const mockMultiOrgMixedAuth: SessionsContext = { + exists: true, + orgs: [ + { + id: 'org-1', + name: PASSWORD_AUTHN_ORG, + authNSupport: { + password: [{ provider: 'email_password' }], + callback: [], + }, + }, + { + id: 'org-2', + name: CALLBACK_AUTHN_ORG, + authNSupport: { + password: [], + callback: [{ provider: 'google', url: CALLBACK_AUTHN_URL }], + }, + }, + ], +}; - const nextButton = screen.getByRole('button', { name: /next/i }); - fireEvent.click(nextButton); +const mockOrgWithWarning: SessionsContext = { + exists: true, + orgs: [ + { + id: 'org-1', + name: 'Warning Organization', + authNSupport: { + password: [{ provider: 'email_password' }], + callback: [], + }, + warning: { + code: 'ORG_WARNING', + message: 'Organization has limited access', + url: 'https://example.com/warning', + errors: [{ message: 'Contact admin for full access' }], + } as ErrorV2, + }, + ], +}; - await waitFor(() => - expect(errorNotification).toHaveBeenCalledWith({ - message: 'Please enter a valid email address', - }), - ); - }); +const mockEmailPasswordResponse: Token = { + accessToken: 'mock-access-token', + refreshToken: 'mock-refresh-token', +}; - test('Display error if invalid email is provided and next clicked', async () => { - render(); +describe('Login Component', () => { + beforeEach(() => { + jest.clearAllMocks(); - const emailInput = screen.getByTestId('email'); - fireEvent.change(emailInput, { - target: { value: 'failEmail@signoz.io' }, - }); - - const nextButton = screen.getByRole('button', { name: /next/i }); - fireEvent.click(nextButton); - - await waitFor(() => - expect(errorNotification).toHaveBeenCalledWith({ - message: - 'Invalid configuration detected, please contact your administrator', - }), - ); - }); - - test('providing shaheer@signoz.io as email and pressing next, should make the Login with SSO button visible', async () => { - render(); - act(() => { - fireEvent.change(screen.getByTestId('email'), { - target: { value: 'shaheer@signoz.io' }, - }); - fireEvent.click(screen.getByTestId('initiate_login')); - }); - - await waitFor(() => { - expect(screen.getByText(/login with sso/i)).toBeInTheDocument(); - }); - }); - - test('Display email, password, forgot password if password=Y', () => { - render(); - - const emailInput = screen.getByTestId('email'); - expect(emailInput).toBeInTheDocument(); - - const passwordInput = screen.getByTestId('password'); - expect(passwordInput).toBeInTheDocument(); - - const forgotPasswordLink = screen.getByText('Forgot password?'); - expect(forgotPasswordLink).toBeInTheDocument(); - }); - - test('Display tooltip with correct message if forgot password is hovered while password=Y', async () => { - render(); - const forgotPasswordLink = screen.getByText('Forgot password?'); - - act(() => { - fireEvent.mouseOver(forgotPasswordLink); - }); - - await waitFor(() => { - // Tooltip text is static in the new UI - expect( - screen.getByText( - 'Ask your admin to reset your password and send you a new invite link', + server.use( + rest.get(VERSION_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ data: mockVersionSetupCompleted, status: 'success' }), ), + ), + ); + }); + + afterEach(() => { + server.resetHandlers(); + }); + + describe('Initial Render', () => { + it('renders login form with email input and next button', () => { + const { getByTestId, getByPlaceholderText } = render(); + + expect( + screen.getByText(/sign in to monitor, trace, and troubleshoot/i), ).toBeInTheDocument(); + expect(getByTestId('email')).toBeInTheDocument(); + expect(getByTestId('initiate_login')).toBeInTheDocument(); + expect(getByPlaceholderText('name@yourcompany.com')).toBeInTheDocument(); + }); + + it('shows loading state when version data is being fetched', () => { + server.use( + rest.get(VERSION_ENDPOINT, (_, res, ctx) => + res( + ctx.delay(100), + ctx.status(200), + ctx.json({ data: mockVersionSetupCompleted, status: 'success' }), + ), + ), + ); + + const { getByTestId } = render(); + + expect(getByTestId('initiate_login')).toBeDisabled(); + }); + }); + + describe('Setup Check', () => { + it('redirects to signup when setup is not completed', async () => { + server.use( + rest.get(VERSION_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ data: mockVersionSetupIncomplete, status: 'success' }), + ), + ), + ); + + render(); + + await waitFor(() => { + expect(mockHistoryPush).toHaveBeenCalledWith(ROUTES.SIGN_UP); + }); + }); + + it('stays on login page when setup is completed', async () => { + render(); + + await waitFor(() => { + expect(mockHistoryPush).not.toHaveBeenCalled(); + }); + }); + + it('handles version API error gracefully', async () => { + server.use( + rest.get(VERSION_ENDPOINT, (req, res, ctx) => + res(ctx.status(500), ctx.json({ error: 'Server error' })), + ), + ); + + render(); + + await waitFor(() => { + expect(mockHistoryPush).not.toHaveBeenCalled(); + }); + }); + }); + + describe('Session Context Fetching', () => { + it('fetches session context on next button click and enables password', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockSingleOrgPasswordAuth }), + ), + ), + ); + + const { getByTestId } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(getByTestId('password')).toBeInTheDocument(); + }); + }); + + it('handles session context API errors', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(500), + ctx.json({ + error: { + code: 'internal_server', + message: 'couldnt fetch the sessions context', + url: '', + }, + }), + ), + ), + ); + + const { getByTestId, getByText } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(getByText('couldnt fetch the sessions context')).toBeInTheDocument(); + }); + }); + + it('auto-selects organization when only one exists', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (req, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockSingleOrgPasswordAuth }), + ), + ), + ); + + const { getByTestId } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + // Should show password field directly (no org selection needed) + expect(getByTestId('password')).toBeInTheDocument(); + expect(screen.queryByText(/organization name/i)).not.toBeInTheDocument(); + }); + }); + }); + + describe('Organization Selection', () => { + it('shows organization dropdown when multiple orgs exist', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockMultiOrgMixedAuth }), + ), + ), + ); + + const { getByTestId, getByText } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(getByText('Organization Name')).toBeInTheDocument(); + expect(screen.getByRole('combobox')).toBeInTheDocument(); + }); + + // Click on the dropdown to reveal the options + await user.click(screen.getByRole('combobox')); + + await waitFor(() => { + expect(screen.getByText(PASSWORD_AUTHN_ORG)).toBeInTheDocument(); + expect(screen.getByText(CALLBACK_AUTHN_ORG)).toBeInTheDocument(); + }); + }); + + it('updates selected organization on dropdown change', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (req, res, ctx) => + res(ctx.status(200), ctx.json({ data: mockMultiOrgMixedAuth })), + ), + ); + + render(); + + const emailInput = screen.getByTestId('email'); + const nextButton = screen.getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(screen.getByRole('combobox')).toBeInTheDocument(); + }); + + // Select CALLBACK_AUTHN_ORG + await user.click(screen.getByRole('combobox')); + await user.click(screen.getByText(CALLBACK_AUTHN_ORG)); + + await waitFor(() => { + expect( + screen.getByRole('button', { name: /login with callback/i }), + ).toBeInTheDocument(); + }); + }); + }); + + describe('Password Authentication', () => { + it('shows password field when password auth is supported', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockSingleOrgPasswordAuth }), + ), + ), + ); + + const { getByTestId, getByText } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(getByTestId('password')).toBeInTheDocument(); + expect(getByText(/forgot password/i)).toBeInTheDocument(); + expect(getByTestId('password_authn_submit')).toBeInTheDocument(); + }); + }); + + it('enables password auth when URL parameter password=Y', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockSingleOrgCallbackAuth }), + ), + ), + ); + + const { getByTestId } = render(, undefined, { + initialRoute: '/login?password=Y', + }); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + // Should show password field even for SSO org due to password=Y override + expect(getByTestId('password')).toBeInTheDocument(); + }); + }); + }); + + describe('Callback Authentication', () => { + it('shows callback login button when callback auth is supported', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockSingleOrgCallbackAuth }), + ), + ), + ); + + const { getByTestId, queryByTestId } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(getByTestId('callback_authn_submit')).toBeInTheDocument(); + expect(queryByTestId('password')).not.toBeInTheDocument(); + }); + }); + + it('redirects to callback URL on button click', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + // Mock window.location.href + const mockLocation = { + href: 'http://localhost/', + }; + Object.defineProperty(window, 'location', { + value: mockLocation, + writable: true, + }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockSingleOrgCallbackAuth }), + ), + ), + ); + + const { getByTestId, queryByTestId } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(getByTestId('callback_authn_submit')).toBeInTheDocument(); + expect(queryByTestId('password')).not.toBeInTheDocument(); + }); + + const callbackButton = getByTestId('callback_authn_submit'); + await user.click(callbackButton); + + // Check that window.location.href was set to the callback URL + await waitFor(() => { + expect(window.location.href).toBe(CALLBACK_AUTHN_URL); + }); + }); + }); + + describe('Password Authentication Execution', () => { + it('calls email/password API with correct parameters', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockSingleOrgPasswordAuth }), + ), + ), + rest.post('*/api/v2/sessions/email_password', async (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockEmailPasswordResponse }), + ), + ), + ); + + const { getByTestId } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(getByTestId('password')).toBeInTheDocument(); + }); + + const passwordInput = getByTestId('password'); + const loginButton = getByTestId('password_authn_submit'); + + await user.type(passwordInput, 'testpassword'); + await user.click(loginButton); + + // do not test for the request paramters here. Reference: https://mswjs.io/docs/best-practices/avoid-request-assertions + // rather test for the effects of the request + await waitFor(() => { + expect(localStorage.getItem('AUTH_TOKEN')).toBe('mock-access-token'); + }); + }); + + it('shows error modal on authentication failure', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockSingleOrgPasswordAuth }), + ), + ), + rest.post('*/api/v2/sessions/email_password', (_, res, ctx) => + res( + ctx.status(401), + ctx.json({ + error: { + code: 'invalid_input', + message: 'invalid password', + url: '', + }, + }), + ), + ), + ); + + const { getByTestId, getByText } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(getByTestId('password')).toBeInTheDocument(); + }); + + const passwordInput = getByTestId('password'); + const loginButton = getByTestId('password_authn_submit'); + + await user.type(passwordInput, 'wrongpassword'); + await user.click(loginButton); + + await waitFor(() => { + expect(getByText('invalid password')).toBeInTheDocument(); + }); + }); + }); + + describe('URL Parameter Handling', () => { + it('calls afterLogin when accessToken and refreshToken are in URL', async () => { + render(, undefined, { + initialRoute: '/login?accessToken=test-token&refreshToken=test-refresh', + }); + + await waitFor(() => { + expect(localStorage.getItem('AUTH_TOKEN')).toBe('test-token'); + expect(localStorage.getItem('REFRESH_AUTH_TOKEN')).toBe('test-refresh'); + }); + }); + + it('shows error modal when callbackauthnerr parameter exists', async () => { + const { getByText } = render(, undefined, { + initialRoute: + '/login?callbackauthnerr=true&code=AUTH_ERROR&message=Authentication failed&url=https://example.com/error&errors=[{"message":"Invalid token"}]', + }); + + await waitFor(() => { + expect(getByText('AUTH_ERROR')).toBeInTheDocument(); + }); + }); + + it('handles malformed error JSON gracefully', async () => { + const { queryByText, getByText } = render(, undefined, { + initialRoute: + '/login?callbackauthnerr=true&code=AUTH_ERROR&message=Authentication failed&errors=invalid-json', + }); + + await waitFor(() => { + expect(queryByText('invalid-json')).not.toBeInTheDocument(); + expect(getByText('AUTH_ERROR')).toBeInTheDocument(); + }); + }); + }); + + describe('Session Organization Warnings', () => { + it('shows warning modal when org has warning', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (req, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockOrgWithWarning }), + ), + ), + ); + + render(); + + const emailInput = screen.getByTestId('email'); + const nextButton = screen.getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect( + screen.getByText(/organization has limited access/i), + ).toBeInTheDocument(); + }); + }); + + it('shows warning modal when a warning org is selected among multiple orgs', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + // Mock multiple orgs including one with a warning + const mockMultiOrgWithWarning = { + orgs: [ + { id: 'org1', name: 'Org 1' }, + { + id: 'org2', + name: 'Org 2', + warning: { + code: 'ORG_WARNING', + message: 'Organization has limited access', + url: 'https://example.com/warning', + errors: [{ message: 'Contact admin for full access' }], + } as ErrorV2, + }, + ], + }; + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (_, res, ctx) => + res( + ctx.status(200), + ctx.json({ status: 'success', data: mockMultiOrgWithWarning }), + ), + ), + ); + + const { getByTestId } = render(); + + const emailInput = getByTestId('email'); + const nextButton = getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + expect(screen.getByRole('combobox')).toBeInTheDocument(); + }); + + // Select the organization with a warning + await user.click(screen.getByRole('combobox')); + await user.click(screen.getByText('Org 2')); + + await waitFor(() => { + expect( + screen.getByText(/organization has limited access/i), + ).toBeInTheDocument(); + }); + }); + }); + + describe('Form State Management', () => { + it('disables form fields during loading states', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (req, res, ctx) => + res( + ctx.delay(100), + ctx.status(200), + ctx.json({ data: mockSingleOrgPasswordAuth }), + ), + ), + ); + + render(); + + const emailInput = screen.getByTestId('email'); + const nextButton = screen.getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + // Button should be disabled during API call + expect(nextButton).toBeDisabled(); + }); + + it('shows correct button text for each auth method', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (req, res, ctx) => + res(ctx.status(200), ctx.json({ data: mockSingleOrgPasswordAuth })), + ), + ); + + render(); + + // Initially shows "Next" button + expect(screen.getByTestId('initiate_login')).toBeInTheDocument(); + + const emailInput = screen.getByTestId('email'); + const nextButton = screen.getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + // Should show "Login" button for password auth + expect(screen.getByTestId('password_authn_submit')).toBeInTheDocument(); + expect(screen.queryByTestId('initiate_login')).not.toBeInTheDocument(); + }); + }); + }); + + describe('Edge Cases', () => { + it('handles user with no organizations', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + const mockNoOrgs: SessionsContext = { + exists: false, + orgs: [], + }; + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (req, res, ctx) => + res(ctx.status(200), ctx.json({ data: mockNoOrgs })), + ), + ); + + render(); + + const emailInput = screen.getByTestId('email'); + const nextButton = screen.getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + // Should not show any auth method buttons + expect( + screen.queryByTestId('password_authn_submit'), + ).not.toBeInTheDocument(); + expect( + screen.queryByTestId('callback_authn_submit'), + ).not.toBeInTheDocument(); + }); + }); + + it('handles organization with no auth support', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + + const mockNoAuthSupport: SessionsContext = { + exists: true, + orgs: [ + { + id: 'org-1', + name: 'No Auth Organization', + authNSupport: { + password: [], + callback: [], + }, + }, + ], + }; + + server.use( + rest.get(SESSIONS_CONTEXT_ENDPOINT, (req, res, ctx) => + res(ctx.status(200), ctx.json({ data: mockNoAuthSupport })), + ), + ); + + render(); + + const emailInput = screen.getByTestId('email'); + const nextButton = screen.getByTestId('initiate_login'); + + await user.type(emailInput, PASSWORD_AUTHN_EMAIL); + await user.click(nextButton); + + await waitFor(() => { + // Should not show any auth method buttons + expect( + screen.queryByTestId('password_authn_submit'), + ).not.toBeInTheDocument(); + expect( + screen.queryByTestId('callback_authn_submit'), + ).not.toBeInTheDocument(); + }); }); }); }); diff --git a/frontend/src/container/Login/index.tsx b/frontend/src/container/Login/index.tsx index 33e8f0edac..ddcece8c5a 100644 --- a/frontend/src/container/Login/index.tsx +++ b/frontend/src/container/Login/index.tsx @@ -1,213 +1,255 @@ import './Login.styles.scss'; -import { Button, Form, Input, Space, Tooltip, Typography } from 'antd'; -import getLocalStorageApi from 'api/browser/localstorage/get'; -import setLocalStorageApi from 'api/browser/localstorage/set'; -import loginApi from 'api/v1/login/login'; -import loginPrecheckApi from 'api/v1/login/loginPrecheck'; -import getUserVersion from 'api/v1/version/getVersion'; +import { Button, Form, Input, Select, Space, Tooltip, Typography } from 'antd'; +import getVersion from 'api/v1/version/get'; +import get from 'api/v2/sessions/context/get'; +import post from 'api/v2/sessions/email_password/post'; import afterLogin from 'AppRoutes/utils'; -import { LOCALSTORAGE } from 'constants/localStorage'; import ROUTES from 'constants/routes'; -import { useNotifications } from 'hooks/useNotifications'; +import useUrlQuery from 'hooks/useUrlQuery'; import history from 'lib/history'; import { ArrowRight } from 'lucide-react'; -import { useAppContext } from 'providers/App/App'; -import { useEffect, useState } from 'react'; +import { useErrorModal } from 'providers/ErrorModalProvider'; +import { useEffect, useMemo, useState } from 'react'; import { useQuery } from 'react-query'; +import { ErrorV2 } from 'types/api'; import APIError from 'types/api/error'; -import { Signup as PrecheckResultType } from 'types/api/user/loginPrecheck'; +import { SessionsContext } from 'types/api/v2/sessions/context/get'; import { FormContainer, Label, ParentContainer } from './styles'; -interface LoginProps { - jwt: string; - refreshjwt: string; - userId: string; - ssoerror: string; - withPassword: string; +function parseErrors(errors: string): { message: string }[] { + try { + const parsedErrors = JSON.parse(errors); + return parsedErrors.map((error: { message: string }) => ({ + message: error.message, + })); + } catch (e) { + console.error('Failed to parse errors:', e); + return []; + } } -type FormValues = { email: string; password: string }; +type FormValues = { + email: string; + password: string; + orgId: string; + url: string; +}; -function Login({ - jwt, - refreshjwt, - userId, - ssoerror = '', - withPassword = '0', -}: LoginProps): JSX.Element { - const [isLoading, setIsLoading] = useState(false); - const { user } = useAppContext(); +function Login(): JSX.Element { + const urlQueryParams = useUrlQuery(); + // override for callbackAuthN in case of some misconfiguration + const isPasswordAuthNEnabled = (urlQueryParams.get('password') || 'N') === 'Y'; - const [precheckResult, setPrecheckResult] = useState({ - sso: false, - ssoUrl: '', - canSelfRegister: false, - isUser: true, - }); + // callbackAuthN handling + const accessToken = urlQueryParams.get('accessToken') || ''; + const refreshToken = urlQueryParams.get('refreshToken') || ''; - const [precheckInProcess, setPrecheckInProcess] = useState(false); - const [precheckComplete, setPrecheckComplete] = useState(false); + // callbackAuthN error handling + const callbackAuthError = urlQueryParams.get('callbackauthnerr') || ''; + const callbackAuthErrorCode = urlQueryParams.get('code') || ''; + const callbackAuthErrorMessage = urlQueryParams.get('message') || ''; + const callbackAuthErrorURL = urlQueryParams.get('url') || ''; + const callbackAuthErrorAdditional = urlQueryParams.get('errors') || ''; - const { notifications } = useNotifications(); + const [sessionsContext, setSessionsContext] = useState(); + const [isSubmitting, setIsSubmitting] = useState(false); + const [sessionsOrgId, setSessionsOrgId] = useState(''); + const [ + sessionsContextLoading, + setIsLoadingSessionsContext, + ] = useState(false); + const [form] = Form.useForm(); + const { showErrorModal } = useErrorModal(); - const getUserVersionResponse = useQuery({ - queryFn: getUserVersion, - queryKey: ['getUserVersion', user?.accessJwt], + // setupCompleted information to route to signup page in case setup is incomplete + const { + data: versionData, + isLoading: versionLoading, + error: versionError, + } = useQuery({ + queryFn: getVersion, + queryKey: ['api/v1/version/get'], enabled: true, }); + // in case of error do not route to signup page as it may lead to double registration useEffect(() => { if ( - getUserVersionResponse.isFetched && - getUserVersionResponse.data && - getUserVersionResponse.data.payload + versionData && + !versionLoading && + !versionError && + !versionData.data.setupCompleted ) { - const { setupCompleted } = getUserVersionResponse.data.payload; - if (!setupCompleted) { - // no org account registered yet, re-route user to sign up first - history.push(ROUTES.SIGN_UP); - } + history.push(ROUTES.SIGN_UP); } - }, [getUserVersionResponse]); - - const [form] = Form.useForm(); - - useEffect(() => { - if (withPassword === 'Y') { - setPrecheckComplete(true); - } - }, [withPassword]); - - useEffect(() => { - async function processJwt(): Promise { - if (jwt && jwt !== '') { - setIsLoading(true); - await afterLogin(userId, jwt, refreshjwt); - setIsLoading(false); - const fromPathname = getLocalStorageApi( - LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, - ); - if (fromPathname) { - history.push(fromPathname); - setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, ''); - } else { - history.push(ROUTES.APPLICATION); - } - } - } - processJwt(); - }, [jwt, refreshjwt, userId]); - - useEffect(() => { - if (ssoerror !== '') { - notifications.error({ - message: 'sorry, failed to login', - }); - } - }, [ssoerror, notifications]); + }, [versionData, versionLoading, versionError]); + // fetch the sessions context post user entering the email const onNextHandler = async (): Promise => { const email = form.getFieldValue('email'); - if (!email) { - notifications.error({ - message: 'Please enter a valid email address', - }); - return; - } - setPrecheckInProcess(true); + setIsLoadingSessionsContext(true); + try { - const response = await loginPrecheckApi({ + const sessionsContextResponse = await get({ email, + ref: window.location.href, }); - if (response.statusCode === 200) { - setPrecheckResult({ ...precheckResult, ...response.payload }); - - const { isUser } = response.payload; - if (isUser) { - setPrecheckComplete(true); - } else { - notifications.error({ - message: - 'This account does not exist. To create a new account, contact your admin to get an invite link', - }); - } - } else { - notifications.error({ - message: - 'Invalid configuration detected, please contact your administrator', - }); + setSessionsContext(sessionsContextResponse.data); + if (sessionsContextResponse.data.orgs.length === 1) { + setSessionsOrgId(sessionsContextResponse.data.orgs[0].id); } - } catch (e) { - console.log('failed to call precheck Api', e); - notifications.error({ message: 'Sorry, something went wrong' }); - } - setPrecheckInProcess(false); - }; - - const { sso, canSelfRegister } = precheckResult; - - const onSubmitHandler: () => Promise = async () => { - try { - const { email, password } = form.getFieldsValue(); - if (!precheckComplete) { - onNextHandler(); - return; - } - - if (precheckComplete && sso) { - window.location.href = precheckResult.ssoUrl || ''; - return; - } - - setIsLoading(true); - - const response = await loginApi({ - email, - password, - }); - - afterLogin( - response.data.userId, - response.data.accessJwt, - response.data.refreshJwt, - ); - setIsLoading(false); } catch (error) { - setIsLoading(false); - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), - }); + showErrorModal(error as APIError); } + setIsLoadingSessionsContext(false); }; - const renderSAMLAction = (): JSX.Element => ( - - ); + // post selection of email and session org decide on the authN mechanism to use + const isPasswordAuthN = useMemo((): boolean => { + if (!sessionsContext) { + return false; + } - const renderOnSsoError = (): JSX.Element | null => { - if (!ssoerror) { + if (!sessionsOrgId) { + return false; + } + + let isPasswordAuthN = false; + sessionsContext.orgs.forEach((orgSession) => { + if ( + orgSession.id === sessionsOrgId && + orgSession.authNSupport?.password?.length > 0 + ) { + isPasswordAuthN = true; + } + }); + + return isPasswordAuthN || isPasswordAuthNEnabled; + }, [sessionsContext, sessionsOrgId, isPasswordAuthNEnabled]); + + const isCallbackAuthN = useMemo((): boolean => { + if (!sessionsContext) { + return false; + } + + if (!sessionsOrgId) { + return false; + } + + let isCallbackAuthN = false; + sessionsContext.orgs.forEach((orgSession) => { + if ( + orgSession.id === sessionsOrgId && + orgSession.authNSupport?.callback?.length > 0 + ) { + isCallbackAuthN = true; + form.setFieldValue('url', orgSession.authNSupport.callback[0].url); + } + }); + + return isCallbackAuthN && !isPasswordAuthNEnabled; + }, [sessionsContext, sessionsOrgId, isPasswordAuthNEnabled, form]); + + const sessionsOrgWarning = useMemo((): ErrorV2 | null => { + if (!sessionsContext) { return null; } - return ( - - Are you trying to resolve SSO configuration issue?{' '} - Login with password. - - ); + if (!sessionsOrgId) { + return null; + } + + let sessionsOrgWarning; + sessionsContext.orgs.forEach((orgSession) => { + if (orgSession.id === sessionsOrgId && orgSession.warning) { + sessionsOrgWarning = orgSession.warning; + } + }); + + return sessionsOrgWarning || null; + }, [sessionsContext, sessionsOrgId]); + + // once the callback authN redirects to the login screen with access_token and refresh_token navigate them to homepage + useEffect(() => { + if (accessToken && refreshToken) { + afterLogin(accessToken, refreshToken); + } + }, [accessToken, refreshToken]); + + const onSubmitHandler: () => Promise = async () => { + setIsSubmitting(true); + + try { + if (isPasswordAuthN) { + const email = form.getFieldValue('email'); + + const password = form.getFieldValue('password'); + + const createSessionEmailPasswordResponse = await post({ + email, + password, + orgId: sessionsOrgId, + }); + + afterLogin( + createSessionEmailPasswordResponse.data.accessToken, + createSessionEmailPasswordResponse.data.refreshToken, + ); + } + if (isCallbackAuthN) { + const url = form.getFieldValue('url'); + + window.location.href = url; + } + } catch (error) { + showErrorModal(error as APIError); + } finally { + setIsSubmitting(false); + } }; + useEffect(() => { + if (callbackAuthError) { + showErrorModal( + new APIError({ + httpStatusCode: 500, + error: { + code: callbackAuthErrorCode, + message: callbackAuthErrorMessage, + url: callbackAuthErrorURL, + errors: parseErrors(callbackAuthErrorAdditional), + }, + }), + ); + } + }, [ + callbackAuthError, + callbackAuthErrorAdditional, + callbackAuthErrorCode, + callbackAuthErrorMessage, + callbackAuthErrorURL, + showErrorModal, + ]); + + useEffect(() => { + if (sessionsOrgWarning) { + showErrorModal( + new APIError({ + error: { + code: sessionsOrgWarning.code, + message: sessionsOrgWarning.message, + url: sessionsOrgWarning.url, + errors: sessionsOrgWarning.errors, + }, + httpStatusCode: 400, + }), + ); + } + }, [sessionsOrgWarning, showErrorModal]); + return (
@@ -225,17 +267,39 @@ function Login({ - {precheckComplete && !sso && ( + + {sessionsContext && sessionsContext.orgs.length > 1 && ( + + + + + + + + + + + + + + + +
+ ); +} + +export default ConfigureGoogleAuthAuthnProvider; diff --git a/frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/AuthnOIDC.tsx b/frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/AuthnOIDC.tsx new file mode 100644 index 0000000000..3bcbd553bb --- /dev/null +++ b/frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/AuthnOIDC.tsx @@ -0,0 +1,105 @@ +import './Providers.styles.scss'; + +import { Callout } from '@signozhq/callout'; +import { Checkbox, Form, Input, Typography } from 'antd'; + +function ConfigureOIDCAuthnProvider({ + isCreate, +}: { + isCreate: boolean; +}): JSX.Element { + return ( +
+
+ + Edit OIDC Authentication + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ ); +} + +export default ConfigureOIDCAuthnProvider; diff --git a/frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/AuthnSAML.tsx b/frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/AuthnSAML.tsx new file mode 100644 index 0000000000..5ef8344b55 --- /dev/null +++ b/frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/AuthnSAML.tsx @@ -0,0 +1,77 @@ +import './Providers.styles.scss'; + +import { Callout } from '@signozhq/callout'; +import { Checkbox, Form, Input, Typography } from 'antd'; + +function ConfigureSAMLAuthnProvider({ + isCreate, +}: { + isCreate: boolean; +}): JSX.Element { + return ( +
+
+ + Edit SAML Authentication + +
+ + + + + + `, + }} + > + + + + `, + }} + > + + + + {samlCert}`, + }} + > + + + + + For providers like jumpcloud, this should be set to true.Note: This is the reverse of WantAuthnRequestsSigned. If WantAuthnRequestsSigned is false, then InsecureSkipAuthNRequestsSigned should be true.`, + }} + > + + + + +
+ ); +} + +export default ConfigureSAMLAuthnProvider; diff --git a/frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/Providers.styles.scss b/frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/Providers.styles.scss new file mode 100644 index 0000000000..4bbd9da228 --- /dev/null +++ b/frontend/src/container/OrganizationSettings/AuthDomain/CreateEdit/Providers/Providers.styles.scss @@ -0,0 +1,67 @@ +.google-auth { + display: flex; + flex-direction: column; + + .ant-form-item { + margin-bottom: 12px !important; + } + + .header { + display: flex; + flex-direction: column; + gap: 4px; + margin-bottom: 12px; + + .title { + font-weight: bold; + } + + .description { + margin-bottom: 0px !important; + } + } + + .callout { + margin-top: 16px; + } +} + +.saml { + display: flex; + flex-direction: column; + + .ant-form-item { + margin-bottom: 12px !important; + } + + .header { + display: flex; + flex-direction: column; + gap: 4px; + margin-bottom: 12px; + + .title { + font-weight: bold; + } + } + + .field { + .ant-row { + display: flex; + flex-direction: row; + align-items: center; + flex-flow: nowrap; + + .ant-col { + display: flex; + flex-grow: 1; + width: 100%; + padding: 0px; + } + } + } + + .callout { + margin-top: 16px; + } +} diff --git a/frontend/src/container/OrganizationSettings/AuthDomain/Toggle.tsx b/frontend/src/container/OrganizationSettings/AuthDomain/Toggle.tsx new file mode 100644 index 0000000000..0ca5d1db10 --- /dev/null +++ b/frontend/src/container/OrganizationSettings/AuthDomain/Toggle.tsx @@ -0,0 +1,45 @@ +import { Switch } from 'antd'; +import put from 'api/v1/domains/id/put'; +import { useErrorModal } from 'providers/ErrorModalProvider'; +import { useState } from 'react'; +import APIError from 'types/api/error'; +import { GettableAuthDomain } from 'types/api/v1/domains/list'; + +function Toggle({ isDefaultChecked, record }: ToggleProps): JSX.Element { + const [isChecked, setIsChecked] = useState(isDefaultChecked); + const [isLoading, setIsLoading] = useState(false); + const { showErrorModal } = useErrorModal(); + + const onChangeHandler = async (checked: boolean): Promise => { + setIsLoading(true); + + try { + await put({ + id: record.id, + config: { + ssoEnabled: checked, + ssoType: record.ssoType, + googleAuthConfig: record.googleAuthConfig, + oidcConfig: record.oidcConfig, + samlConfig: record.samlConfig, + }, + }); + setIsChecked(checked); + } catch (error) { + showErrorModal(error as APIError); + } + + setIsLoading(false); + }; + + return ( + + ); +} + +interface ToggleProps { + isDefaultChecked: boolean; + record: GettableAuthDomain; +} + +export default Toggle; diff --git a/frontend/src/container/OrganizationSettings/AuthDomain/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomain/index.tsx new file mode 100644 index 0000000000..31cc72aae0 --- /dev/null +++ b/frontend/src/container/OrganizationSettings/AuthDomain/index.tsx @@ -0,0 +1,148 @@ +import './AuthDomain.styles.scss'; + +import { PlusOutlined } from '@ant-design/icons'; +import { Button, Table, Typography } from 'antd'; +import { ColumnsType } from 'antd/lib/table'; +import deleteDomain from 'api/v1/domains/id/delete'; +import listAllDomain from 'api/v1/domains/list'; +import ErrorContent from 'components/ErrorModal/components/ErrorContent'; +import { useErrorModal } from 'providers/ErrorModalProvider'; +import { useState } from 'react'; +import { useQuery } from 'react-query'; +import APIError from 'types/api/error'; +import { GettableAuthDomain, SSOType } from 'types/api/v1/domains/list'; + +import CreateEdit from './CreateEdit/CreateEdit'; +import Toggle from './Toggle'; + +const columns: ColumnsType = [ + { + title: 'Domain', + dataIndex: 'name', + key: 'name', + width: 100, + render: (val): JSX.Element => {val}, + }, + { + title: 'Enforce SSO', + dataIndex: 'ssoEnabled', + key: 'ssoEnabled', + width: 80, + render: (value: boolean, record: GettableAuthDomain): JSX.Element => ( + + ), + }, + { + title: 'Action', + dataIndex: 'action', + key: 'action', + width: 100, + render: (_, record: GettableAuthDomain): JSX.Element => ( +
+ + Configure {SSOType.get(record.ssoType)} + + + Delete + +
+ ), + }, +]; + +async function deleteDomainById( + id: string, + showErrorModal: (error: APIError) => void, + refetchAuthDomainListResponse: () => void, +): Promise { + try { + await deleteDomain(id); + refetchAuthDomainListResponse(); + } catch (error) { + showErrorModal(error as APIError); + } +} + +function AuthDomain(): JSX.Element { + const [record, setRecord] = useState(); + const [addDomain, setAddDomain] = useState(false); + const { showErrorModal } = useErrorModal(); + const { + data: authDomainListResponse, + isLoading: isLoadingAuthDomainListResponse, + isFetching: isFetchingAuthDomainListResponse, + error: errorFetchingAuthDomainListResponse, + refetch: refetchAuthDomainListResponse, + } = useQuery({ + queryFn: listAllDomain, + queryKey: ['/api/v1/domains', 'list'], + enabled: true, + }); + + return ( +
+
+ Authenticated Domains + +
+ {(errorFetchingAuthDomainListResponse as APIError) && ( + + )} + {!(errorFetchingAuthDomainListResponse as APIError) && ( + ({ + onClick: ( + event: React.SyntheticEvent, + ): void => { + const target = event.target as HTMLLinkElement; + const { columnAction } = target.dataset; + switch (columnAction) { + case 'configure': + setRecord(record); + + break; + case 'delete': + deleteDomainById( + record.id, + showErrorModal, + refetchAuthDomainListResponse, + ); + break; + default: + console.error('Unknown action:', columnAction); + } + }, + })} + loading={ + isLoadingAuthDomainListResponse || isFetchingAuthDomainListResponse + } + className="auth-domain-list" + /> + )} + {(addDomain || record) && ( + { + setAddDomain(false); + setRecord(undefined); + refetchAuthDomainListResponse(); + }} + /> + )} + + ); +} + +export default AuthDomain; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/AddDomain/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/AddDomain/index.tsx deleted file mode 100644 index 9d0ccedb8e..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/AddDomain/index.tsx +++ /dev/null @@ -1,101 +0,0 @@ -/* eslint-disable prefer-regex-literals */ -import { PlusOutlined } from '@ant-design/icons'; -import { Button, Form, Input, Modal, Typography } from 'antd'; -import { useForm } from 'antd/es/form/Form'; -import createDomainApi from 'api/v1/domains/create'; -import { useNotifications } from 'hooks/useNotifications'; -import { useAppContext } from 'providers/App/App'; -import { useState } from 'react'; -import { useTranslation } from 'react-i18next'; -import APIError from 'types/api/error'; - -function AddDomain({ refetch }: Props): JSX.Element { - const { t } = useTranslation(['common', 'organizationsettings']); - const [isAddDomains, setIsDomain] = useState(false); - const [form] = useForm(); - const { org } = useAppContext(); - - const { notifications } = useNotifications(); - - const onCreateHandler = async (): Promise => { - try { - await createDomainApi({ - name: form.getFieldValue('domain'), - orgId: (org || [])[0].id, - }); - - notifications.success({ - message: 'Your domain has been added successfully.', - duration: 15, - }); - setIsDomain(false); - refetch(); - } catch (error) { - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), - }); - } - }; - - return ( - <> -
- - {t('authenticated_domains', { - ns: 'organizationsettings', - })} - - -
- setIsDomain(false)} - > -
- - - - - - - -
- - ); -} - -interface FormProps { - domain: string; -} - -interface Props { - refetch: () => void; -} - -export default AddDomain; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Create/Row/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/Create/Row/index.tsx deleted file mode 100644 index 8f2679af79..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Create/Row/index.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import { Button, Space, Typography } from 'antd'; -import { ReactNode } from 'react'; - -import { IconContainer, TitleContainer, TitleText } from './styles'; - -function Row({ - onClickHandler, - Icon, - buttonText, - subTitle, - title, - isDisabled, -}: RowProps): JSX.Element { - return ( - - {Icon} - - - {title} - {subTitle} - - - - - ); -} - -export interface RowProps { - onClickHandler: VoidFunction; - Icon: ReactNode; - title: string; - subTitle: ReactNode; - buttonText: string; - isDisabled: boolean; -} - -export default Row; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Create/Row/styles.ts b/frontend/src/container/OrganizationSettings/AuthDomains/Create/Row/styles.ts deleted file mode 100644 index 12b058a0bb..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Create/Row/styles.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Typography } from 'antd'; -import styled from 'styled-components'; - -export const TitleContainer = styled.div` - display: flex; - flex-direction: column; - gap: 0.25rem; -`; - -export const IconContainer = styled.div` - min-width: 70px; -`; - -export const TitleText = styled(Typography)` - font-weight: bold; -`; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Create/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/Create/index.tsx deleted file mode 100644 index 430935de99..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Create/index.tsx +++ /dev/null @@ -1,118 +0,0 @@ -import { GoogleSquareFilled, KeyOutlined } from '@ant-design/icons'; -import { Typography } from 'antd'; -import { FeatureKeys } from 'constants/features'; -import { useAppContext } from 'providers/App/App'; -import { useCallback, useMemo } from 'react'; -import { AuthDomain, GOOGLE_AUTH, SAML } from 'types/api/SAML/listDomain'; - -import Row, { RowProps } from './Row'; -import { RowContainer, RowSpace } from './styles'; - -function Create({ - ssoMethod, - assignSsoMethod, - setIsSettingsOpen, - setIsEditModalOpen, -}: CreateProps): JSX.Element { - const { featureFlags } = useAppContext(); - const SSOFlag = - featureFlags?.find((flag) => flag.name === FeatureKeys.SSO)?.active || false; - - const onGoogleAuthClickHandler = useCallback(() => { - assignSsoMethod(GOOGLE_AUTH); - setIsSettingsOpen(false); - setIsEditModalOpen(true); - }, [assignSsoMethod, setIsSettingsOpen, setIsEditModalOpen]); - - const onEditSAMLHandler = useCallback(() => { - assignSsoMethod(SAML); - setIsSettingsOpen(false); - setIsEditModalOpen(true); - }, [assignSsoMethod, setIsSettingsOpen, setIsEditModalOpen]); - - const ConfigureButtonText = useMemo(() => { - switch (ssoMethod) { - case GOOGLE_AUTH: - return 'Edit Google Auth'; - case SAML: - return 'Edit SAML'; - default: - return 'Get Started'; - } - }, [ssoMethod]); - - const data: RowProps[] = SSOFlag - ? [ - { - buttonText: ConfigureButtonText, - Icon: , - title: 'Google Apps Authentication', - subTitle: 'Let members sign-in with a Google workspace account', - onClickHandler: onGoogleAuthClickHandler, - isDisabled: false, - }, - { - buttonText: ConfigureButtonText, - Icon: , - onClickHandler: onEditSAMLHandler, - subTitle: ( - <> - Azure, Active Directory, Okta or your custom SAML 2.0 solution{' '} - - (Unsupported SAMLs) - - - ), - title: 'SAML Authentication', - isDisabled: false, - }, - ] - : [ - { - buttonText: ConfigureButtonText, - Icon: , - title: 'Google Apps Authentication', - subTitle: 'Let members sign-in with a Google account', - onClickHandler: onGoogleAuthClickHandler, - isDisabled: false, - }, - ]; - - return ( -
- - SigNoz supports the following single sign-on services (SSO). Get started - with setting your project’s SSO below - - - - - {data.map((rowData) => ( - - ))} - - -
- ); -} - -interface CreateProps { - ssoMethod: AuthDomain['ssoType']; - assignSsoMethod: (value: AuthDomain['ssoType']) => void; - setIsSettingsOpen: (value: boolean) => void; - setIsEditModalOpen: (value: boolean) => void; -} - -export default Create; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Create/styles.ts b/frontend/src/container/OrganizationSettings/AuthDomains/Create/styles.ts deleted file mode 100644 index f0a5cd503f..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Create/styles.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Space } from 'antd'; -import styled from 'styled-components'; - -export const RowContainer = styled.div` - display: flex; - flex-direction: column; - margin-top: 1rem; -`; - -export const RowSpace = styled(Space)` - &&& { - row-gap: 1.5rem !important; - } -`; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Edit/EditGoogleAuth.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/Edit/EditGoogleAuth.tsx deleted file mode 100644 index 2939b8d272..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Edit/EditGoogleAuth.tsx +++ /dev/null @@ -1,49 +0,0 @@ -import { InfoCircleFilled } from '@ant-design/icons'; -import { Card, Form, Input, Space, Typography } from 'antd'; - -function EditGoogleAuth(): JSX.Element { - return ( - <> - - Enter OAuth 2.0 credentials obtained from the Google API Console below. Read - the{' '} - - docs - {' '} - for more information. - - - - - - - - - - - - - - Google OAuth2 won’t be enabled unless you enter all the attributes above - - - - - ); -} - -export default EditGoogleAuth; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Edit/EditSAML.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/Edit/EditSAML.tsx deleted file mode 100644 index b445e5cae9..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Edit/EditSAML.tsx +++ /dev/null @@ -1,43 +0,0 @@ -import { InfoCircleFilled } from '@ant-design/icons'; -import { Card, Form, Input, Space, Typography } from 'antd'; - -function EditSAML(): JSX.Element { - return ( - <> - - - - - - - - - - - - - - - - - SAML won’t be enabled unless you enter all the attributes above - - - - - ); -} - -export default EditSAML; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Edit/helpers.ts b/frontend/src/container/OrganizationSettings/AuthDomains/Edit/helpers.ts deleted file mode 100644 index 38b973537a..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Edit/helpers.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { - AuthDomain, - GOOGLE_AUTH, - GoogleAuthConfig, - isGoogleAuthConfig, - isSAMLConfig, - SAML, - SAMLConfig, -} from 'types/api/SAML/listDomain'; - -export function parseSamlForm( - current: AuthDomain, - formValues: AuthDomain, -): SAMLConfig | undefined { - if (current?.ssoType === SAML && isSAMLConfig(formValues?.samlConfig)) { - return { - ...current.samlConfig, - ...formValues?.samlConfig, - }; - } - - return current.samlConfig; -} - -export function parseGoogleAuthForm( - current: AuthDomain, - formValues: AuthDomain, -): GoogleAuthConfig | undefined { - if ( - current?.ssoType === GOOGLE_AUTH && - isGoogleAuthConfig(formValues?.googleAuthConfig) - ) { - return { - ...current.googleAuthConfig, - ...formValues?.googleAuthConfig, - }; - } - - return current.googleAuthConfig; -} diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Edit/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/Edit/index.tsx deleted file mode 100644 index dc47de5faa..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Edit/index.tsx +++ /dev/null @@ -1,102 +0,0 @@ -import { Button, Form, Space } from 'antd'; -import { useForm } from 'antd/lib/form/Form'; -import { useNotifications } from 'hooks/useNotifications'; -import { useCallback } from 'react'; -import { useTranslation } from 'react-i18next'; -import { AuthDomain, GOOGLE_AUTH, SAML } from 'types/api/SAML/listDomain'; - -import EditGoogleAuth from './EditGoogleAuth'; -import EditSAML from './EditSAML'; -import { parseGoogleAuthForm, parseSamlForm } from './helpers'; - -// renderFormInputs selectively renders form fields depending upon -// sso type -const renderFormInputs = ( - record: AuthDomain | undefined, -): JSX.Element | undefined => { - switch (record?.ssoType) { - case GOOGLE_AUTH: - return ; - case SAML: - default: - return ; - } -}; - -function EditSSO({ - onRecordUpdateHandler, - record, - setEditModalOpen, -}: EditFormProps): JSX.Element { - const [form] = useForm(); - - const { t } = useTranslation(['common']); - - const { notifications } = useNotifications(); - - const onFinishHandler = useCallback(() => { - form - .validateFields() - .then(async (values) => { - await onRecordUpdateHandler({ - ...record, - ssoEnabled: true, - ssoType: record.ssoType, - samlConfig: parseSamlForm(record, values), - googleAuthConfig: parseGoogleAuthForm(record, values), - }); - }) - .catch(() => { - notifications.error({ - message: t('something_went_wrong', { ns: 'common' }), - }); - }); - }, [form, onRecordUpdateHandler, record, t, notifications]); - - const onResetHandler = useCallback(() => { - form.resetFields(); - setEditModalOpen(false); - }, [setEditModalOpen, form]); - - return ( -
{ - error.errorFields.forEach(({ errors }) => { - notifications.error({ - message: - errors[0].toString() || t('something_went_wrong', { ns: 'common' }), - }); - }); - form.resetFields(); - }} - layout="vertical" - onFinish={onFinishHandler} - autoComplete="off" - form={form} - > - {renderFormInputs(record)} - - - - - - ); -} - -interface EditFormProps { - onRecordUpdateHandler: (record: AuthDomain) => Promise; - record: AuthDomain; - setEditModalOpen: (open: boolean) => void; -} - -export default EditSSO; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Switch/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/Switch/index.tsx deleted file mode 100644 index ca4b3a539e..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Switch/index.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import { Switch } from 'antd'; -import { useMemo, useState } from 'react'; -import { AuthDomain } from 'types/api/SAML/listDomain'; - -import { isSSOConfigValid } from '../helpers'; - -function SwitchComponent({ - isDefaultChecked, - onRecordUpdateHandler, - record, -}: SwitchComponentProps): JSX.Element { - const [isChecked, setIsChecked] = useState(isDefaultChecked); - const [isLoading, setIsLoading] = useState(false); - - const onChangeHandler = async (checked: boolean): Promise => { - setIsLoading(true); - const response = await onRecordUpdateHandler({ - ...record, - ssoEnabled: checked, - }); - - if (response) { - setIsChecked(checked); - } - setIsLoading(false); - }; - - const isInValidVerificate = useMemo(() => !isSSOConfigValid(record), [record]); - - return ( - - ); -} - -interface SwitchComponentProps { - isDefaultChecked: boolean; - onRecordUpdateHandler: (record: AuthDomain) => Promise; - record: AuthDomain; -} - -export default SwitchComponent; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/helpers.test.ts b/frontend/src/container/OrganizationSettings/AuthDomains/helpers.test.ts deleted file mode 100644 index 9a9c7bb41d..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/helpers.test.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { AuthDomain, SAML } from 'types/api/SAML/listDomain'; - -import { isSSOConfigValid } from './helpers'; - -const inValidCase: AuthDomain['samlConfig'][] = [ - { - samlCert: '', - samlEntity: '', - samlIdp: '', - }, - { - samlCert: '', - samlEntity: '', - samlIdp: 'asd', - }, - { - samlCert: 'sample certificate', - samlEntity: '', - samlIdp: '', - }, - { - samlCert: 'sample cert', - samlEntity: 'sample entity', - samlIdp: '', - }, -]; - -const validCase: AuthDomain['samlConfig'][] = [ - { - samlCert: 'sample cert', - samlEntity: 'sample entity', - samlIdp: 'sample idp', - }, -]; - -describe('Utils', () => { - inValidCase.forEach((config) => { - it('should return invalid saml config', () => { - expect( - isSSOConfigValid({ - id: 'test-0', - name: 'test', - orgId: '32ed234', - ssoEnabled: true, - ssoType: SAML, - samlConfig: { - samlCert: config?.samlCert || '', - samlEntity: config?.samlEntity || '', - samlIdp: config?.samlIdp || '', - }, - }), - ).toBe(false); - }); - }); - - validCase.forEach((config) => { - it('should return invalid saml config', () => { - expect( - isSSOConfigValid({ - id: 'test-0', - name: 'test', - orgId: '32ed234', - ssoEnabled: true, - ssoType: SAML, - samlConfig: { - samlCert: config?.samlCert || '', - samlEntity: config?.samlEntity || '', - samlIdp: config?.samlIdp || '', - }, - }), - ).toBe(true); - }); - }); -}); diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/helpers.ts b/frontend/src/container/OrganizationSettings/AuthDomains/helpers.ts deleted file mode 100644 index bde4ed2d44..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/helpers.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { AuthDomain, GOOGLE_AUTH, SAML } from 'types/api/SAML/listDomain'; - -export const ConfigureSsoButtonText = ( - ssoType: AuthDomain['ssoType'], -): string => { - switch (ssoType) { - case SAML: - return 'Edit SAML'; - case GOOGLE_AUTH: - return 'Edit Google Auth'; - default: - return 'Configure SSO'; - } -}; - -export const EditModalTitleText = ( - ssoType: AuthDomain['ssoType'] | undefined, -): string => { - switch (ssoType) { - case SAML: - return 'Edit SAML Configuration'; - case GOOGLE_AUTH: - return 'Edit Google Authentication'; - default: - return 'Configure SSO'; - } -}; - -export const isSSOConfigValid = (domain: AuthDomain): boolean => { - switch (domain.ssoType) { - case SAML: - return ( - domain.samlConfig?.samlCert?.length !== 0 && - domain.samlConfig?.samlEntity?.length !== 0 && - domain.samlConfig?.samlIdp?.length !== 0 - ); - case GOOGLE_AUTH: - return ( - domain.googleAuthConfig?.clientId?.length !== 0 && - domain.googleAuthConfig?.clientSecret?.length !== 0 - ); - default: - return false; - } -}; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx deleted file mode 100644 index 8b9f68a98a..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx +++ /dev/null @@ -1,263 +0,0 @@ -import { Button, Modal, Space, Typography } from 'antd'; -import { ColumnsType } from 'antd/lib/table'; -import deleteDomain from 'api/v1/domains/delete'; -import listAllDomain from 'api/v1/domains/list'; -import updateDomain from 'api/v1/domains/update'; -import { ResizeTable } from 'components/ResizeTable'; -import TextToolTip from 'components/TextToolTip'; -import { useNotifications } from 'hooks/useNotifications'; -import { useAppContext } from 'providers/App/App'; -import { Dispatch, SetStateAction, useCallback, useState } from 'react'; -import { useTranslation } from 'react-i18next'; -import { useQuery } from 'react-query'; -import APIError from 'types/api/error'; -import { AuthDomain } from 'types/api/SAML/listDomain'; -import { v4 } from 'uuid'; - -import AddDomain from './AddDomain'; -import Create from './Create'; -import EditSSO from './Edit'; -import { ConfigureSsoButtonText, EditModalTitleText } from './helpers'; -import { ColumnWithTooltip } from './styles'; -import SwitchComponent from './Switch'; - -function AuthDomains(): JSX.Element { - const { t } = useTranslation(['common', 'organizationsettings']); - const [isSettingsOpen, setIsSettingsOpen] = useState(false); - const { org } = useAppContext(); - const [currentDomain, setCurrentDomain] = useState(); - const [isEditModalOpen, setIsEditModalOpen] = useState(false); - - const { data, isLoading, refetch } = useQuery(['saml'], { - queryFn: () => listAllDomain(), - enabled: org !== null, - }); - - const { notifications } = useNotifications(); - - const assignSsoMethod = useCallback( - (typ: AuthDomain['ssoType']): void => { - setCurrentDomain({ ...currentDomain, ssoType: typ } as AuthDomain); - }, - [currentDomain, setCurrentDomain], - ); - - const onCloseHandler = useCallback( - (func: Dispatch>) => (): void => { - func(false); - }, - [], - ); - - const onRecordUpdateHandler = useCallback( - async (record: AuthDomain): Promise => { - try { - await updateDomain(record); - notifications.success({ - message: t('saml_settings', { - ns: 'organizationsettings', - }), - }); - refetch(); - onCloseHandler(setIsEditModalOpen)(); - return true; - } catch (error) { - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), - }); - return false; - } - }, - [refetch, t, onCloseHandler, notifications], - ); - - const onOpenHandler = useCallback( - (func: Dispatch>) => (): void => { - func(true); - }, - [], - ); - - const onEditHandler = useCallback( - (record: AuthDomain) => (): void => { - if (!record.ssoType) { - onOpenHandler(setIsSettingsOpen)(); - } else { - onOpenHandler(setIsEditModalOpen)(); - } - - setCurrentDomain(record); - }, - [onOpenHandler], - ); - - const onDeleteHandler = useCallback( - (record: AuthDomain) => (): void => { - Modal.confirm({ - centered: true, - title: t('delete_domain', { - ns: 'organizationsettings', - }), - content: t('delete_domain_message', { - ns: 'organizationsettings', - }), - onOk: async () => { - try { - await deleteDomain({ - ...record, - }); - - notifications.success({ - message: t('common:success'), - }); - refetch(); - } catch (error) { - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), - }); - } - }, - }); - }, - [refetch, t, notifications], - ); - - const columns: ColumnsType = [ - { - title: 'Domain', - dataIndex: 'name', - key: 'name', - width: 100, - }, - { - title: ( - - Enforce SSO - {' '} - - ), - dataIndex: 'ssoEnabled', - key: 'ssoEnabled', - width: 80, - render: (value: boolean, record: AuthDomain): JSX.Element => ( - - ), - }, - { - title: '', - dataIndex: 'description', - key: 'description', - width: 100, - render: (_, record: AuthDomain): JSX.Element => ( - - ), - }, - { - title: 'Action', - dataIndex: 'action', - key: 'action', - width: 50, - render: (_, record): JSX.Element => ( - - ), - }, - ]; - - if (!isLoading && data?.data?.length === 0) { - return ( - - - - - - - record.name + v4()} - dataSource={[]} - tableLayout="fixed" - bordered - /> - - ); - } - - const tableData = data?.data || []; - return ( - <> - - - - - - - - -
- - - record.name + v4()} - bordered - /> -
- - ); -} - -export default AuthDomains; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/styles.ts b/frontend/src/container/OrganizationSettings/AuthDomains/styles.ts deleted file mode 100644 index 6958f06569..0000000000 --- a/frontend/src/container/OrganizationSettings/AuthDomains/styles.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Row } from 'antd'; -import styled from 'styled-components'; - -export const Container = styled.div` - display: flex; - justify-content: space-between; - align-items: center; - gap: 8px; -`; - -export const ColumnWithTooltip = styled(Row)` - &&& > article { - margin-right: 0.5rem; - } -`; diff --git a/frontend/src/container/OrganizationSettings/InviteUserModal/InviteUserModal.tsx b/frontend/src/container/OrganizationSettings/InviteUserModal/InviteUserModal.tsx index 896da7fe3b..91f8699372 100644 --- a/frontend/src/container/OrganizationSettings/InviteUserModal/InviteUserModal.tsx +++ b/frontend/src/container/OrganizationSettings/InviteUserModal/InviteUserModal.tsx @@ -1,23 +1,10 @@ import { Button, Form, Modal } from 'antd'; import { FormInstance } from 'antd/lib'; import sendInvite from 'api/v1/invite/create'; -import get from 'api/v1/invite/get'; -import ROUTES from 'constants/routes'; import { useNotifications } from 'hooks/useNotifications'; -import { useAppContext } from 'providers/App/App'; -import { - Dispatch, - SetStateAction, - useCallback, - useEffect, - useState, -} from 'react'; +import { useCallback, useState } from 'react'; import { useTranslation } from 'react-i18next'; -import { useQuery } from 'react-query'; -import { SuccessResponseV2 } from 'types/api'; import APIError from 'types/api/error'; -import { PendingInvite } from 'types/api/user/getPendingInvites'; -import { ROLES } from 'types/roles'; import InviteTeamMembers from '../InviteTeamMembers'; import { InviteMemberFormValues } from '../PendingInvitesContainer'; @@ -26,17 +13,7 @@ export interface InviteUserModalProps { isInviteTeamMemberModalOpen: boolean; toggleModal: (value: boolean) => void; form: FormInstance; - setDataSource?: Dispatch>; - shouldCallApi?: boolean; -} - -interface DataProps { - key: number; - name: string; - id: string; - email: string; - accessLevel: ROLES; - inviteLink: string; + onClose: () => void; } function InviteUserModal(props: InviteUserModalProps): JSX.Element { @@ -44,54 +21,15 @@ function InviteUserModal(props: InviteUserModalProps): JSX.Element { isInviteTeamMemberModalOpen, toggleModal, form, - setDataSource, - shouldCallApi = false, + + onClose, } = props; const { notifications } = useNotifications(); const { t } = useTranslation(['organizationsettings', 'common']); - const { user } = useAppContext(); + const [isInvitingMembers, setIsInvitingMembers] = useState(false); const [modalForm] = Form.useForm(form); - const getPendingInvitesResponse = useQuery< - SuccessResponseV2, - APIError - >({ - queryFn: get, - queryKey: ['getPendingInvites', user?.accessJwt], - enabled: shouldCallApi, - }); - - const getParsedInviteData = useCallback( - (payload: PendingInvite[] = []) => - payload?.map((data) => ({ - key: data.createdAt, - name: data?.name, - id: data.id, - email: data.email, - accessLevel: data.role, - inviteLink: `${window.location.origin}${ROUTES.SIGN_UP}?token=${data.token}`, - })), - [], - ); - - useEffect(() => { - if ( - getPendingInvitesResponse.status === 'success' && - getPendingInvitesResponse?.data?.data - ) { - const data = getParsedInviteData( - getPendingInvitesResponse?.data?.data || [], - ); - setDataSource?.(data); - } - }, [ - getParsedInviteData, - getPendingInvitesResponse?.data?.data, - getPendingInvitesResponse.status, - setDataSource, - ]); - const onInviteClickHandler = useCallback( async (values: InviteMemberFormValues): Promise => { try { @@ -119,10 +57,7 @@ function InviteUserModal(props: InviteUserModalProps): JSX.Element { ); setTimeout(async () => { - const { data, status } = await getPendingInvitesResponse.refetch(); - if (status === 'success' && data.data) { - setDataSource?.(getParsedInviteData(data?.data || [])); - } + onClose(); setIsInvitingMembers?.(false); toggleModal(false); }, 2000); @@ -134,15 +69,7 @@ function InviteUserModal(props: InviteUserModalProps): JSX.Element { }); } }, - [ - getParsedInviteData, - getPendingInvitesResponse, - notifications, - setDataSource, - setIsInvitingMembers, - t, - toggleModal, - ], + [notifications, onClose, t, toggleModal], ); return ( @@ -177,9 +104,4 @@ function InviteUserModal(props: InviteUserModalProps): JSX.Element { ); } -InviteUserModal.defaultProps = { - setDataSource: (): void => {}, - shouldCallApi: false, -}; - export default InviteUserModal; diff --git a/frontend/src/container/OrganizationSettings/Members/index.tsx b/frontend/src/container/OrganizationSettings/Members/index.tsx index 2255f099a8..3bba43a66c 100644 --- a/frontend/src/container/OrganizationSettings/Members/index.tsx +++ b/frontend/src/container/OrganizationSettings/Members/index.tsx @@ -3,6 +3,7 @@ import { ColumnsType } from 'antd/lib/table'; import getAll from 'api/v1/user/get'; import deleteUser from 'api/v1/user/id/delete'; import update from 'api/v1/user/id/update'; +import ErrorContent from 'components/ErrorModal/components/ErrorContent'; import { ResizeTable } from 'components/ResizeTable'; import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats'; import dayjs from 'dayjs'; @@ -11,9 +12,7 @@ import { useAppContext } from 'providers/App/App'; import { Dispatch, SetStateAction, useEffect, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { useQuery } from 'react-query'; -import { SuccessResponseV2 } from 'types/api'; import APIError from 'types/api/error'; -import { UserResponse } from 'types/api/user/getUsers'; import { ROLES } from 'types/roles'; import DeleteMembersDetails from '../DeleteMembersDetails'; @@ -210,10 +209,8 @@ function UserFunction({ function Members(): JSX.Element { const { org } = useAppContext(); - const { status, data, isLoading } = useQuery< - SuccessResponseV2, - APIError - >({ + + const { data, isLoading, error } = useQuery({ queryFn: () => getAll(), queryKey: ['getOrgUser', org?.[0].id], }); @@ -221,7 +218,7 @@ function Members(): JSX.Element { const [dataSource, setDataSource] = useState([]); useEffect(() => { - if (status === 'success' && data?.data && Array.isArray(data.data)) { + if (data?.data && Array.isArray(data.data)) { const updatedData: DataType[] = data?.data?.map((e) => ({ accessLevel: e.role, email: e.email, @@ -231,7 +228,7 @@ function Members(): JSX.Element { })); setDataSource(updatedData); } - }, [data?.data, status]); + }, [data]); const columns: ColumnsType = [ { @@ -293,14 +290,17 @@ function Members(): JSX.Element {
({dataSource.length})
)} - + {!(error as APIError) && ( + + )} + {(error as APIError) && } ); } diff --git a/frontend/src/container/OrganizationSettings/PendingInvitesContainer/index.tsx b/frontend/src/container/OrganizationSettings/PendingInvitesContainer/index.tsx index c2c9ca6802..3b58ca9ef2 100644 --- a/frontend/src/container/OrganizationSettings/PendingInvitesContainer/index.tsx +++ b/frontend/src/container/OrganizationSettings/PendingInvitesContainer/index.tsx @@ -3,6 +3,7 @@ import { Button, Form, Space, Typography } from 'antd'; import { ColumnsType } from 'antd/lib/table'; import get from 'api/v1/invite/get'; import deleteInvite from 'api/v1/invite/id/delete'; +import ErrorContent from 'components/ErrorModal/components/ErrorContent'; import { ResizeTable } from 'components/ResizeTable'; import { INVITE_MEMBERS_HASH } from 'constants/app'; import ROUTES from 'constants/routes'; @@ -13,7 +14,6 @@ import { useTranslation } from 'react-i18next'; import { useQuery } from 'react-query'; import { useLocation } from 'react-router-dom'; import { useCopyToClipboard } from 'react-use'; -import { SuccessResponseV2 } from 'types/api'; import APIError from 'types/api/error'; import { PendingInvite } from 'types/api/user/getPendingInvites'; import { ROLES } from 'types/roles'; @@ -48,10 +48,7 @@ function PendingInvitesContainer(): JSX.Element { } }, [state.error, state.value, t, notifications]); - const getPendingInvitesResponse = useQuery< - SuccessResponseV2, - APIError - >({ + const { data, isLoading, error, isError, refetch } = useQuery({ queryFn: get, queryKey: ['getPendingInvites', user?.accessJwt], }); @@ -90,20 +87,11 @@ function PendingInvitesContainer(): JSX.Element { }, [hash, toggleModal]); useEffect(() => { - if ( - getPendingInvitesResponse.status === 'success' && - getPendingInvitesResponse?.data?.data - ) { - const data = getParsedInviteData( - getPendingInvitesResponse?.data?.data || [], - ); - setDataSource(data); + if (data?.data) { + const parsedData = getParsedInviteData(data?.data || []); + setDataSource(parsedData); } - }, [ - getParsedInviteData, - getPendingInvitesResponse?.data?.data, - getPendingInvitesResponse.status, - ]); + }, [data, getParsedInviteData]); const onRevokeHandler = async (id: string): Promise => { try { @@ -184,16 +172,15 @@ function PendingInvitesContainer(): JSX.Element {
{t('pending_invites')} - {getPendingInvitesResponse.status !== 'loading' && dataSource && ( + {dataSource && (
({dataSource.length})
)}
@@ -210,14 +197,17 @@ function PendingInvitesContainer(): JSX.Element {
- + {!isError && ( + + )} + {isError && }
); diff --git a/frontend/src/container/OrganizationSettings/index.tsx b/frontend/src/container/OrganizationSettings/index.tsx index 014b88c1bd..5de2daa3ef 100644 --- a/frontend/src/container/OrganizationSettings/index.tsx +++ b/frontend/src/container/OrganizationSettings/index.tsx @@ -3,7 +3,7 @@ import './OrganizationSettings.styles.scss'; import { Space } from 'antd'; import { useAppContext } from 'providers/App/App'; -import AuthDomains from './AuthDomains'; +import AuthDomain from './AuthDomain'; import DisplayName from './DisplayName'; import Members from './Members'; import PendingInvitesContainer from './PendingInvitesContainer'; @@ -26,7 +26,7 @@ function OrganizationSettings(): JSX.Element { - + ); } diff --git a/frontend/src/hooks/useActiveLicenseV3/useActiveLicenseV3.tsx b/frontend/src/hooks/useActiveLicenseV3/useActiveLicenseV3.tsx index 9948a7a195..d6e0801278 100644 --- a/frontend/src/hooks/useActiveLicenseV3/useActiveLicenseV3.tsx +++ b/frontend/src/hooks/useActiveLicenseV3/useActiveLicenseV3.tsx @@ -10,6 +10,7 @@ const useActiveLicenseV3 = (isLoggedIn: boolean): UseLicense => queryFn: getActive, queryKey: [REACT_QUERY_KEY.GET_ACTIVE_LICENSE_V3], enabled: !!isLoggedIn, + retry: false, }); type UseLicense = UseQueryResult, APIError>; diff --git a/frontend/src/index.tsx b/frontend/src/index.tsx index 25cfbfb5ab..4ed5800fb1 100644 --- a/frontend/src/index.tsx +++ b/frontend/src/index.tsx @@ -11,6 +11,7 @@ import { HelmetProvider } from 'react-helmet-async'; import { QueryClient, QueryClientProvider } from 'react-query'; import { Provider } from 'react-redux'; import store from 'store'; +import APIError from 'types/api/error'; const queryClient = new QueryClient({ defaultOptions: { @@ -19,9 +20,13 @@ const queryClient = new QueryClient({ retry(failureCount, error): boolean { if ( // in case of manually throwing errors please make sure to send error.response.status - error instanceof AxiosError && - error.response?.status && - (error.response?.status >= 400 || error.response?.status <= 499) + (error instanceof AxiosError && + error.response?.status && + error.response?.status >= 400 && + error.response?.status <= 499) || + (error instanceof APIError && + error.getHttpStatusCode() >= 400 && + error.getHttpStatusCode() <= 499) ) { return false; } diff --git a/frontend/src/pages/Login/index.tsx b/frontend/src/pages/Login/index.tsx index 8a78de4e29..5c69c5efd5 100644 --- a/frontend/src/pages/Login/index.tsx +++ b/frontend/src/pages/Login/index.tsx @@ -1,16 +1,8 @@ import './Login.styles.scss'; import LoginContainer from 'container/Login'; -import useURLQuery from 'hooks/useUrlQuery'; function Login(): JSX.Element { - const urlQueryParams = useURLQuery(); - const jwt = urlQueryParams.get('jwt') || ''; - const refreshJwt = urlQueryParams.get('refreshjwt') || ''; - const userId = urlQueryParams.get('usr') || ''; - const ssoerror = urlQueryParams.get('ssoerror') || ''; - const withPassword = urlQueryParams.get('password') || ''; - return (
@@ -25,13 +17,7 @@ function Login(): JSX.Element {
SigNoz
- +
); diff --git a/frontend/src/pages/ResetPassword/index.tsx b/frontend/src/pages/ResetPassword/index.tsx index 03eb173ca2..d00c509b14 100644 --- a/frontend/src/pages/ResetPassword/index.tsx +++ b/frontend/src/pages/ResetPassword/index.tsx @@ -1,45 +1,33 @@ -import { Typography } from 'antd'; -import getUserVersion from 'api/v1/version/getVersion'; +import getUserVersion from 'api/v1/version/get'; import Spinner from 'components/Spinner'; import ResetPasswordContainer from 'container/ResetPassword'; import { useAppContext } from 'providers/App/App'; -import { useTranslation } from 'react-i18next'; -import { useQueries } from 'react-query'; +import { useErrorModal } from 'providers/ErrorModalProvider'; +import { useEffect } from 'react'; +import { useQuery } from 'react-query'; +import APIError from 'types/api/error'; function ResetPassword(): JSX.Element { - const { t } = useTranslation('common'); const { user, isLoggedIn } = useAppContext(); + const { showErrorModal } = useErrorModal(); - const [versionResponse] = useQueries([ - { - queryFn: getUserVersion, - queryKey: ['getUserVersion', user?.accessJwt], - enabled: !isLoggedIn, - }, - ]); + const { data, isLoading, error } = useQuery({ + queryFn: getUserVersion, + queryKey: ['getUserVersion', user?.accessJwt], + enabled: !isLoggedIn, + }); - if ( - versionResponse.status === 'error' || - (versionResponse.status === 'success' && - versionResponse.data?.statusCode !== 200) - ) { - return ( - - {versionResponse.data?.error || t('something_went_wrong')} - - ); - } + useEffect(() => { + if (error) { + showErrorModal(error as APIError); + } + }, [error, showErrorModal]); - if ( - versionResponse.status === 'loading' || - !(versionResponse.data && versionResponse.data.payload) - ) { + if (isLoading) { return ; } - const { version } = versionResponse.data.payload; - - return ; + return ; } export default ResetPassword; diff --git a/frontend/src/pages/SignUp/SignUp.tsx b/frontend/src/pages/SignUp/SignUp.tsx index 370955fd52..2942289e69 100644 --- a/frontend/src/pages/SignUp/SignUp.tsx +++ b/frontend/src/pages/SignUp/SignUp.tsx @@ -4,12 +4,10 @@ import { Button, Form, Input, Typography } from 'antd'; import logEvent from 'api/common/logEvent'; import accept from 'api/v1/invite/id/accept'; import getInviteDetails from 'api/v1/invite/id/get'; -import loginApi from 'api/v1/login/login'; -import signUpApi from 'api/v1/register/signup'; +import signUpApi from 'api/v1/register/post'; +import passwordAuthNContext from 'api/v2/sessions/email_password/post'; import afterLogin from 'AppRoutes/utils'; -import ROUTES from 'constants/routes'; import { useNotifications } from 'hooks/useNotifications'; -import history from 'lib/history'; import { useErrorModal } from 'providers/ErrorModalProvider'; import { useEffect, useState } from 'react'; import { useQuery } from 'react-query'; @@ -17,10 +15,8 @@ import { useLocation } from 'react-router-dom'; import { SuccessResponseV2 } from 'types/api'; import APIError from 'types/api/error'; import { InviteDetails } from 'types/api/user/getInviteDetails'; -import { Signup as LoginPrecheckPayloadProps } from 'types/api/user/loginPrecheck'; import { FormContainer, Label } from './styles'; -import { isPasswordNotValidMessage, isPasswordValid } from './utils'; type FormValues = { email: string; @@ -34,17 +30,9 @@ type FormValues = { function SignUp(): JSX.Element { const [loading, setLoading] = useState(false); - const [precheck, setPrecheck] = useState({ - sso: false, - isUser: false, - }); - const [confirmPasswordError, setConfirmPasswordError] = useState( false, ); - const [isPasswordPolicyError, setIsPasswordPolicyError] = useState( - false, - ); const { search } = useLocation(); const params = new URLSearchParams(search); const token = params.get('token'); @@ -71,7 +59,6 @@ function SignUp(): JSX.Element { getInviteDetailsResponse.data.data ) { const responseDetails = getInviteDetailsResponse.data.data; - if (responseDetails.precheck) setPrecheck(responseDetails.precheck); form.setFieldValue('firstName', responseDetails.name); form.setFieldValue('email', responseDetails.email); form.setFieldValue('organizationName', responseDetails.organization); @@ -115,20 +102,20 @@ function SignUp(): JSX.Element { const signUp = async (values: FormValues): Promise => { try { const { organizationName, password, email } = values; - await signUpApi({ + const user = await signUpApi({ email, orgDisplayName: organizationName, password, token: params.get('token') || undefined, }); - const loginResponse = await loginApi({ + const token = await passwordAuthNContext({ email, password, + orgId: user.data.orgId, }); - const { data } = loginResponse; - await afterLogin(data.userId, data.accessJwt, data.refreshJwt); + await afterLogin(token.data.accessToken, token.data.refreshToken); } catch (error) { showErrorModal(error as APIError); } @@ -137,16 +124,17 @@ function SignUp(): JSX.Element { const acceptInvite = async (values: FormValues): Promise => { try { const { password, email } = values; - await accept({ + const user = await accept({ password, token: params.get('token') || '', }); - const loginResponse = await loginApi({ + const token = await passwordAuthNContext({ email, password, + orgId: user.data.orgId, }); - const { data } = loginResponse; - await afterLogin(data.userId, data.accessJwt, data.refreshJwt); + + await afterLogin(token.data.accessToken, token.data.refreshToken); } catch (error) { notifications.error({ message: (error as APIError).getErrorCode(), @@ -155,42 +143,6 @@ function SignUp(): JSX.Element { } }; - const handleSubmitSSO = async (): Promise => { - if (!params.get('token')) { - notifications.error({ - message: - 'Invite token is required for signup, please request one from your admin', - }); - return; - } - setLoading(true); - try { - const response = await accept({ - password: '', - token: params.get('token') || '', - sourceUrl: encodeURIComponent(window.location.href), - }); - - if (response.data?.sso) { - if (response.data?.ssoUrl) { - window.location.href = response.data?.ssoUrl; - } else { - notifications.error({ - message: 'Signup completed but failed to initiate login', - }); - // take user to login page as there is nothing to do here - history.push(ROUTES.LOGIN); - } - } - } catch (error) { - notifications.error({ - message: 'Something went wrong', - }); - } - - setLoading(false); - }; - // eslint-disable-next-line sonarjs/cognitive-complexity const handleSubmit = (): void => { (async (): Promise => { @@ -198,15 +150,6 @@ function SignUp(): JSX.Element { const values = form.getFieldsValue(); setLoading(true); - if (!isPasswordValid(values.password)) { - logEvent('Account Creation Page - Invalid Password', { - email: values.email, - }); - setIsPasswordPolicyError(true); - setLoading(false); - return; - } - if (isSignUp) { await signUp(values); logEvent('Account Created Successfully', { @@ -232,9 +175,6 @@ function SignUp(): JSX.Element { if ('password' in changedValues || 'confirmPassword' in changedValues) { const { password, confirmPassword } = form.getFieldsValue(); - const isInvalidPassword = !isPasswordValid(password) && password.length > 0; - setIsPasswordPolicyError(isInvalidPassword); - const isSamePassword = password === confirmPassword; setConfirmPasswordError(!isSamePassword); } @@ -245,9 +185,9 @@ function SignUp(): JSX.Element { return ( loading || !values.email || - (!precheck.sso && (!values.password || !values.confirmPassword)) || - confirmPasswordError || - isPasswordPolicyError + !values.password || + !values.confirmPassword || + confirmPasswordError ); }; @@ -266,7 +206,7 @@ function SignUp(): JSX.Element { - {!precheck.sso && ( - <> -
- - - - -
+
+ + + + +
-
- - - - -
- - )} +
+ + + + +
{confirmPasswordError && ( @@ -319,12 +255,6 @@ function SignUp(): JSX.Element { Passwords don’t match. Please try again )} - - {isPasswordPolicyError && ( - - {isPasswordNotValidMessage} - - )}
{isSignUp && ( diff --git a/frontend/src/pages/SignUp/utils.ts b/frontend/src/pages/SignUp/utils.ts deleted file mode 100644 index b6c746fcd6..0000000000 --- a/frontend/src/pages/SignUp/utils.ts +++ /dev/null @@ -1,13 +0,0 @@ -/** - * @function - * @description to check whether password is valid or not - * @reference stackoverflow.com/a/69807687 - * @returns Boolean - */ -export const isPasswordValid = (value: string): boolean => { - // eslint-disable-next-line prefer-regex-literals - const pattern = new RegExp('^.{8,}$'); - return pattern.test(value); -}; - -export const isPasswordNotValidMessage = `Password must a have minimum of 8 characters`; diff --git a/frontend/src/providers/App/App.tsx b/frontend/src/providers/App/App.tsx index 9b01f1a5bf..e517b4902a 100644 --- a/frontend/src/providers/App/App.tsx +++ b/frontend/src/providers/App/App.tsx @@ -1,14 +1,13 @@ import getLocalStorageApi from 'api/browser/localstorage/get'; -import { Logout } from 'api/utils'; import listOrgPreferences from 'api/v1/org/preferences/list'; +import get from 'api/v1/user/me/get'; import listUserPreferences from 'api/v1/user/preferences/list'; -import getUserVersion from 'api/v1/version/getVersion'; +import getUserVersion from 'api/v1/version/get'; import { LOCALSTORAGE } from 'constants/localStorage'; import dayjs from 'dayjs'; import useActiveLicenseV3 from 'hooks/useActiveLicenseV3/useActiveLicenseV3'; import { useGetFeatureFlag } from 'hooks/useGetFeatureFlag'; import { useGlobalEventListener } from 'hooks/useGlobalEventListener'; -import useGetUser from 'hooks/user/useGetUser'; import { createContext, PropsWithChildren, @@ -40,7 +39,7 @@ import { getUserDefaults } from './utils'; export const AppContext = createContext(undefined); export function AppProvider({ children }: PropsWithChildren): JSX.Element { - // on load of the provider set the user defaults with access jwt , refresh jwt and user id from local storage + // on load of the provider set the user defaults with access token , refresh token from local storage const [user, setUser] = useState(() => getUserDefaults()); const [activeLicense, setActiveLicense] = useState( null, @@ -63,13 +62,6 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { const [showChangelogModal, setShowChangelogModal] = useState(false); - // if the user.id is not present, for migration older cases then we need to logout only for current logged in users! - useEffect(() => { - if (!user.id && isLoggedIn) { - Logout(); - } - }, [isLoggedIn, user]); - // fetcher for user // user will only be fetched if the user id and token is present // if logged out and trying to hit any route none of these calls will trigger @@ -77,7 +69,12 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { data: userData, isFetching: isFetchingUser, error: userFetchError, - } = useGetUser(user.id, isLoggedIn); + } = useQuery({ + queryFn: get, + queryKey: ['/api/v1/user/me'], + enabled: isLoggedIn, + }); + useEffect(() => { if (!isFetchingUser && userData && userData.data) { setUser((prev) => ({ @@ -320,7 +317,7 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { updateOrg, updateChangelog, toggleChangelogModal, - versionData: versionData?.payload || null, + versionData: versionData?.data || null, hasEditPermission: user?.role === USER_ROLES.ADMIN || user?.role === USER_ROLES.EDITOR, }), diff --git a/frontend/src/providers/App/types.ts b/frontend/src/providers/App/types.ts index 40fdd1ac50..3ab26b8608 100644 --- a/frontend/src/providers/App/types.ts +++ b/frontend/src/providers/App/types.ts @@ -8,7 +8,7 @@ import { } from 'types/api/preferences/preference'; import { Organization } from 'types/api/user/getOrganization'; import { UserResponse as User } from 'types/api/user/getUser'; -import { PayloadProps } from 'types/api/user/getVersion'; +import { Info } from 'types/api/v1/version/get'; export interface IAppContext { user: IUser; @@ -36,7 +36,7 @@ export interface IAppContext { updateOrg(orgId: string, updatedOrgName: string): void; updateChangelog(payload: ChangelogSchema): void; toggleChangelogModal(): void; - versionData: PayloadProps | null; + versionData: Info | null; hasEditPermission: boolean; } diff --git a/frontend/src/providers/App/utils.ts b/frontend/src/providers/App/utils.ts index 8b919569d3..ddd7ba3a58 100644 --- a/frontend/src/providers/App/utils.ts +++ b/frontend/src/providers/App/utils.ts @@ -10,12 +10,11 @@ function getUserDefaults(): IUser { getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN), '', ); - const userId = defaultTo(getLocalStorageApi(LOCALSTORAGE.USER_ID), ''); return { accessJwt, refreshJwt, - id: userId, + id: '', email: '', displayName: '', createdAt: 0, diff --git a/frontend/src/providers/EventSource.tsx b/frontend/src/providers/EventSource.tsx index 0c54c966e4..85ddcd7e01 100644 --- a/frontend/src/providers/EventSource.tsx +++ b/frontend/src/providers/EventSource.tsx @@ -1,7 +1,7 @@ import { apiV3 } from 'api/apiV1'; import getLocalStorageApi from 'api/browser/localstorage/get'; import { Logout } from 'api/utils'; -import loginApi from 'api/v1/login/login'; +import post from 'api/v2/sessions/rotate/post'; import afterLogin from 'AppRoutes/utils'; import { ENVIRONMENT } from 'constants/env'; import { LIVE_TAIL_HEARTBEAT_TIMEOUT } from 'constants/liveTail'; @@ -18,6 +18,7 @@ import { useRef, useState, } from 'react'; +import { useQueryClient } from 'react-query'; import APIError from 'types/api/error'; interface IEventSourceContext { @@ -58,6 +59,7 @@ export function EventSourceProvider({ const eventSourceRef = useRef(null); const { notifications } = useNotifications(); + const queryClient = useQueryClient(); const handleSetInitialLoading = useCallback((value: boolean) => { setInitialLoading(value); @@ -75,15 +77,15 @@ export function EventSourceProvider({ setInitialLoading(false); try { - const response = await loginApi({ - refreshToken: getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN) || '', + const accessToken = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN); + const refreshToken = getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN); + + const response = await queryClient.fetchQuery({ + queryFn: () => post({ refreshToken: refreshToken || '' }), + queryKey: ['/api/v2/sessions/rotate', accessToken, refreshToken], }); - afterLogin( - response.data.userId, - response.data.accessJwt, - response.data.refreshJwt, - true, - ); + afterLogin(response.data.accessToken, response.data.refreshToken, true); + // If token refresh was successful, we'll let the component // handle reconnection through the reconnectDueToError state setReconnectDueToError(true); @@ -101,7 +103,7 @@ export function EventSourceProvider({ eventSourceRef.current.close(); Logout(); } - }, [notifications]); + }, [notifications, queryClient]); const destroyEventSourceSession = useCallback(() => { if (!eventSourceRef.current) return; diff --git a/frontend/src/types/api/index.ts b/frontend/src/types/api/index.ts index 04a10444ff..46be28207d 100644 --- a/frontend/src/types/api/index.ts +++ b/frontend/src/types/api/index.ts @@ -55,3 +55,8 @@ export interface Warning { url: string; warnings: AdditionalWarnings[]; } + +export interface RawSuccessResponse { + status: string; + data: T; +} diff --git a/frontend/src/types/api/user/accept.ts b/frontend/src/types/api/user/accept.ts index dcbcf73761..71da965ad9 100644 --- a/frontend/src/types/api/user/accept.ts +++ b/frontend/src/types/api/user/accept.ts @@ -1,3 +1,5 @@ +import { UserResponse } from './getUser'; + export interface Props { token: string; password: string; @@ -13,6 +15,6 @@ export interface LoginPrecheckResponse { } export interface PayloadProps { - data: LoginPrecheckResponse; + data: UserResponse; status: string; } diff --git a/frontend/src/types/api/user/getInviteDetails.ts b/frontend/src/types/api/user/getInviteDetails.ts index 6bc3754061..1d84f59812 100644 --- a/frontend/src/types/api/user/getInviteDetails.ts +++ b/frontend/src/types/api/user/getInviteDetails.ts @@ -2,7 +2,6 @@ import { User } from 'types/reducer/app'; import { ROLES } from 'types/roles'; import { Organization } from './getOrganization'; -import { Signup as LoginPrecheckPayloadProps } from './loginPrecheck'; export interface Props { inviteId: string; @@ -20,5 +19,4 @@ export interface InviteDetails { role: ROLES; token: string; organization: Organization['displayName']; - precheck?: LoginPrecheckPayloadProps; } diff --git a/frontend/src/types/api/user/login.ts b/frontend/src/types/api/user/login.ts deleted file mode 100644 index db2d03f42d..0000000000 --- a/frontend/src/types/api/user/login.ts +++ /dev/null @@ -1,18 +0,0 @@ -export interface PayloadProps { - data: UserLoginResponse; - status: string; -} - -export interface Props { - email?: string; - password?: string; - refreshToken?: UserLoginResponse['refreshJwt']; -} - -export interface UserLoginResponse { - accessJwt: string; - accessJwtExpiry: number; - refreshJwt: string; - refreshJwtExpiry: number; - userId: string; -} diff --git a/frontend/src/types/api/user/loginPrecheck.ts b/frontend/src/types/api/user/loginPrecheck.ts deleted file mode 100644 index 2082013f07..0000000000 --- a/frontend/src/types/api/user/loginPrecheck.ts +++ /dev/null @@ -1,16 +0,0 @@ -export interface PayloadProps { - data: Signup; - status: string; -} - -export interface Signup { - sso: boolean; - ssoUrl?: string; - canSelfRegister?: boolean; - isUser: boolean; -} - -export interface Props { - email: string; - path?: string; -} diff --git a/frontend/src/types/api/v1/domains/list.ts b/frontend/src/types/api/v1/domains/list.ts new file mode 100644 index 0000000000..14dd3e3617 --- /dev/null +++ b/frontend/src/types/api/v1/domains/list.ts @@ -0,0 +1,44 @@ +export const SSOType = new Map([ + ['google_auth', 'Google Auth'], + ['saml', 'SAML'], + ['email_password', 'Email Password'], + ['oidc', 'OIDC'], +]); + +export interface GettableAuthDomain { + id: string; + name: string; + orgId: string; + ssoEnabled: boolean; + ssoType: string; + samlConfig?: SAMLConfig; + googleAuthConfig?: GoogleAuthConfig; + oidcConfig?: OIDCConfig; +} + +export interface SAMLConfig { + samlEntity: string; + samlIdp: string; + samlCert: string; + insecureSkipAuthNRequestsSigned: boolean; +} + +export interface GoogleAuthConfig { + clientId: string; + clientSecret: string; + redirectURI: string; +} + +export interface OIDCConfig { + issuer: string; + issuerAlias: string; + clientId: string; + clientSecret: string; + claimMapping: ClaimMapping; + insecureSkipEmailVerified: boolean; + getUserInfo: boolean; +} + +export interface ClaimMapping { + email: string; +} diff --git a/frontend/src/types/api/v1/domains/post.ts b/frontend/src/types/api/v1/domains/post.ts new file mode 100644 index 0000000000..a2046dd2d2 --- /dev/null +++ b/frontend/src/types/api/v1/domains/post.ts @@ -0,0 +1,39 @@ +export interface PostableAuthDomain { + name: string; + config: Config; +} + +export interface Config { + ssoEnabled: boolean; + ssoType: string; + samlConfig?: SAMLConfig; + googleAuthConfig?: GoogleAuthConfig; + oidcConfig?: OIDCConfig; +} + +export interface SAMLConfig { + samlEntity: string; + samlIdp: string; + samlCert: string; + insecureSkipAuthNRequestsSigned: boolean; +} + +export interface GoogleAuthConfig { + clientId: string; + clientSecret: string; + redirectURI: string; +} + +export interface OIDCConfig { + issuer: string; + issuerAlias: string; + clientId: string; + clientSecret: string; + claimMapping: ClaimMapping; + insecureSkipEmailVerified: boolean; + getUserInfo: boolean; +} + +export interface ClaimMapping { + email: string; +} diff --git a/frontend/src/types/api/v1/domains/put.ts b/frontend/src/types/api/v1/domains/put.ts new file mode 100644 index 0000000000..986f427a3e --- /dev/null +++ b/frontend/src/types/api/v1/domains/put.ts @@ -0,0 +1,37 @@ +export interface UpdatableAuthDomain { + config: { + ssoEnabled: boolean; + ssoType: string; + samlConfig?: SAMLConfig; + googleAuthConfig?: GoogleAuthConfig; + oidcConfig?: OIDCConfig; + }; + id: string; +} + +export interface SAMLConfig { + samlEntity: string; + samlIdp: string; + samlCert: string; + insecureSkipAuthNRequestsSigned: boolean; +} + +export interface GoogleAuthConfig { + clientId: string; + clientSecret: string; + redirectURI: string; +} + +export interface OIDCConfig { + issuer: string; + issuerAlias: string; + clientId: string; + clientSecret: string; + claimMapping: ClaimMapping; + insecureSkipEmailVerified: boolean; + getUserInfo: boolean; +} + +export interface ClaimMapping { + email: string; +} diff --git a/frontend/src/types/api/v1/register/post.ts b/frontend/src/types/api/v1/register/post.ts new file mode 100644 index 0000000000..e8063fce5e --- /dev/null +++ b/frontend/src/types/api/v1/register/post.ts @@ -0,0 +1,10 @@ +import { ROLES } from 'types/roles'; + +export interface SignupResponse { + createdAt: number; + email: string; + id: string; + displayName: string; + orgId: string; + role: ROLES; +} diff --git a/frontend/src/types/api/user/getVersion.ts b/frontend/src/types/api/v1/version/get.ts similarity index 65% rename from frontend/src/types/api/user/getVersion.ts rename to frontend/src/types/api/v1/version/get.ts index 5419a038c1..77fd28829e 100644 --- a/frontend/src/types/api/user/getVersion.ts +++ b/frontend/src/types/api/v1/version/get.ts @@ -1,4 +1,4 @@ -export interface PayloadProps { +export interface Info { version: string; ee: 'Y' | 'N'; setupCompleted: boolean; diff --git a/frontend/src/types/api/v2/sessions/context/get.ts b/frontend/src/types/api/v2/sessions/context/get.ts new file mode 100644 index 0000000000..62d5e47aa7 --- /dev/null +++ b/frontend/src/types/api/v2/sessions/context/get.ts @@ -0,0 +1,32 @@ +import { ErrorV2 } from 'types/api'; + +export interface Props { + email: string; + ref: string; +} + +export interface PasswordAuthN { + provider: string; +} + +export interface CallbackAuthN { + provider: string; + url: string; +} + +export interface AuthNSupport { + password: PasswordAuthN[]; + callback: CallbackAuthN[]; +} + +export interface OrgSessionContext { + id: string; + name: string; + authNSupport: AuthNSupport; + warning?: ErrorV2; +} + +export interface SessionsContext { + exists: boolean; + orgs: OrgSessionContext[]; +} diff --git a/frontend/src/types/api/v2/sessions/email_password/post.ts b/frontend/src/types/api/v2/sessions/email_password/post.ts new file mode 100644 index 0000000000..6957d72887 --- /dev/null +++ b/frontend/src/types/api/v2/sessions/email_password/post.ts @@ -0,0 +1,10 @@ +export interface Props { + email: string; + password: string; + orgId: string; +} + +export interface Token { + accessToken: string; + refreshToken: string; +} diff --git a/frontend/src/types/api/v2/sessions/rotate/post.ts b/frontend/src/types/api/v2/sessions/rotate/post.ts new file mode 100644 index 0000000000..64ec1414d0 --- /dev/null +++ b/frontend/src/types/api/v2/sessions/rotate/post.ts @@ -0,0 +1,8 @@ +export interface Props { + refreshToken: string; +} + +export interface Token { + accessToken: string; + refreshToken: string; +} diff --git a/go.mod b/go.mod index 0b70e74e0a..bf72bda027 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd github.com/SigNoz/signoz-otel-collector v0.129.4 + github.com/allegro/bigcache/v3 v3.1.0 github.com/antlr4-go/antlr/v4 v4.13.1 github.com/antonmedv/expr v1.15.3 github.com/cespare/xxhash/v2 v2.3.0 @@ -17,8 +18,7 @@ require ( github.com/go-co-op/gocron v1.30.1 github.com/go-openapi/runtime v0.28.0 github.com/go-openapi/strfmt v0.23.0 - github.com/go-redis/redis/v8 v8.11.5 - github.com/go-redis/redismock/v8 v8.11.5 + github.com/go-redis/redismock/v9 v9.2.0 github.com/go-viper/mapstructure/v2 v2.4.0 github.com/gojek/heimdall/v7 v7.0.3 github.com/golang-jwt/jwt/v5 v5.3.0 @@ -44,6 +44,8 @@ require ( github.com/prometheus/client_golang v1.23.2 github.com/prometheus/common v0.66.1 github.com/prometheus/prometheus v0.304.1 + github.com/redis/go-redis/extra/redisotel/v9 v9.15.1 + github.com/redis/go-redis/v9 v9.15.1 github.com/rs/cors v1.11.1 github.com/russellhaering/gosaml2 v0.9.0 github.com/russellhaering/goxmldsig v1.2.0 @@ -59,10 +61,12 @@ require ( github.com/uptrace/bun v1.2.9 github.com/uptrace/bun/dialect/pgdialect v1.2.9 github.com/uptrace/bun/dialect/sqlitedialect v1.2.9 + github.com/uptrace/bun/extra/bunotel v1.2.9 go.opentelemetry.io/collector/confmap v1.34.0 go.opentelemetry.io/collector/otelcol v0.128.0 go.opentelemetry.io/collector/pdata v1.34.0 go.opentelemetry.io/contrib/config v0.10.0 + go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux v0.63.0 go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 go.opentelemetry.io/otel v1.38.0 go.opentelemetry.io/otel/metric v1.38.0 @@ -85,7 +89,9 @@ require ( require ( github.com/mattn/go-isatty v0.0.20 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect + github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect + github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect go.yaml.in/yaml/v2 v2.4.2 // indirect modernc.org/libc v1.66.3 // indirect modernc.org/mathutil v1.7.1 // indirect @@ -297,7 +303,7 @@ require ( go.opentelemetry.io/collector/receiver/receiverhelper v0.128.0 // indirect go.opentelemetry.io/collector/receiver/receivertest v0.128.0 // indirect go.opentelemetry.io/collector/receiver/xreceiver v0.128.0 // indirect - go.opentelemetry.io/collector/semconv v0.128.0 // indirect + go.opentelemetry.io/collector/semconv v0.128.0 go.opentelemetry.io/collector/service v0.128.0 // indirect go.opentelemetry.io/collector/service/hostcapabilities v0.128.0 // indirect go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 // indirect @@ -314,7 +320,7 @@ require ( go.opentelemetry.io/otel/exporters/prometheus v0.58.0 go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.12.2 // indirect go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.36.0 // indirect - go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.36.0 // indirect + go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 // indirect go.opentelemetry.io/otel/log v0.12.2 // indirect go.opentelemetry.io/otel/sdk/log v0.12.2 // indirect go.opentelemetry.io/otel/sdk/metric v1.38.0 diff --git a/go.sum b/go.sum index f6c6a857f3..804ffc42a2 100644 --- a/go.sum +++ b/go.sum @@ -118,6 +118,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b h1:mimo19zliBX/vSQ6PWWSL9lK8qwHozUj03+zLoEB8O0= github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b/go.mod h1:fvzegU4vN3H1qMT+8wDmzjAcDONcgo2/SZ/TyfdUOFs= +github.com/allegro/bigcache/v3 v3.1.0 h1:H2Vp8VOvxcrB91o86fUSVJFqeuz8kpyyB02eH3bSzwk= +github.com/allegro/bigcache/v3 v3.1.0/go.mod h1:aPyh7jEvrog9zAwx5N7+JUQX5dZTSGpxF1LAR4dr35I= github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ= github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= @@ -158,6 +160,10 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= github.com/cactus/go-statsd-client/statsd v0.0.0-20200423205355-cb0885a1018c/go.mod h1:l/bIBLeOl9eX+wxJAzxS4TveKRtAqlyDpHjhkfO0MEI= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= @@ -166,7 +172,6 @@ github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F9 github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= @@ -267,7 +272,6 @@ github.com/foxboron/go-tpm-keyfiles v0.0.0-20250323135004-b31fac66206e/go.mod h1 github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= @@ -325,16 +329,13 @@ github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvSc github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= -github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= -github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= -github.com/go-redis/redismock/v8 v8.11.5 h1:RJFIiua58hrBrSpXhnGX3on79AU3S271H4ZhRI1wyVo= -github.com/go-redis/redismock/v8 v8.11.5/go.mod h1:UaAU9dEe1C+eGr+FHV5prCWIt0hafyPWbGMEWE0UWdA= +github.com/go-redis/redismock/v9 v9.2.0 h1:ZrMYQeKPECZPjOj5u9eyOjg8Nnb0BS9lkVIZ6IpsKLw= +github.com/go-redis/redismock/v9 v9.2.0/go.mod h1:18KHfGDK4Y6c2R0H38EUGWAdc7ZQS9gfYxc94k7rWT0= github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM= github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA= github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo= github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= @@ -439,7 +440,6 @@ github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= @@ -562,7 +562,6 @@ github.com/hetznercloud/hcloud-go/v2 v2.21.0 h1:wUpQT+fgAxIcdMtFvuCJ78ziqc/VARub github.com/hetznercloud/hcloud-go/v2 v2.21.0/go.mod h1:WSM7w+9tT86sJTNcF8a/oHljC3HUmQfcLxYsgx6PpSc= github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs= github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/huandu/go-assert v1.1.6 h1:oaAfYxq9KNDi9qswn/6aE0EydfxSa+tWZC1KabNitYs= github.com/huandu/go-assert v1.1.6/go.mod h1:JuIfbmYG9ykwvuxoJ3V8TB5QP+3+ajIA54Y44TmkMxs= github.com/huandu/go-sqlbuilder v1.35.0 h1:ESvxFHN8vxCTudY1Vq63zYpU5yJBESn19sf6k4v2T5Q= @@ -736,7 +735,6 @@ github.com/natefinch/wrap v0.2.0/go.mod h1:6gMHlAl12DwYEfKP3TkuykYUfLSEAvHw67itm github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= github.com/npillmayer/nestext v0.1.3/go.mod h1:h2lrijH8jpicr25dFY+oAJLyzlya6jhnuG+zWp9L0Uk= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= @@ -746,16 +744,8 @@ github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/oklog/ulid/v2 v2.1.1 h1:suPZ4ARWLOJLegGFiZZ1dFAkqzhMjL3J1TzI+5wHz8s= github.com/oklog/ulid/v2 v2.1.1/go.mod h1:rcEKHmBBKfef9DhnvX7y1HZBYxjXb0cP5ExxNsTT1QQ= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/onsi/ginkgo/v2 v2.0.0/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4= github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog= github.com/open-telemetry/opamp-go v0.19.0 h1:8LvQKDwqi+BU3Yy159SU31e2XB0vgnk+PN45pnKilPs= @@ -869,8 +859,12 @@ github.com/prometheus/sigv4 v0.1.2/go.mod h1:GF9fwrvLgkQwDdQ5BXeV9XUSCH/IPNqzvAo github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg= github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= -github.com/redis/go-redis/v9 v9.9.0 h1:URbPQ4xVQSQhZ27WMQVmZSo3uT3pL+4IdHVcYq2nVfM= -github.com/redis/go-redis/v9 v9.9.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw= +github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 h1:G3pzZlMvMX9VX9TBB8zr03CAkeyMtbyW2D59PdyaGkM= +github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1/go.mod h1:JiJ4f0bngycE8LQqzY/4TB23witBbFnlUS6hPvHn6Zc= +github.com/redis/go-redis/extra/redisotel/v9 v9.15.1 h1:gvNK57rhjwIjAiGTSZH2+XO37mcLyYCsJC1qlNUnBjs= +github.com/redis/go-redis/extra/redisotel/v9 v9.15.1/go.mod h1:O41kV1OVBXIT0Tipo902iT8+rbqF0zL5v5paLxp5/7s= +github.com/redis/go-redis/v9 v9.15.1 h1:BVn5z3pdIKIr5WI4Yv1MRXslB616gqBLBgVmhykiHIw= +github.com/redis/go-redis/v9 v9.15.1/go.mod h1:u410H11HMLoB+TP67dz8rL9s6QW2j76l0//kSOd3370= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA= @@ -1005,6 +999,10 @@ github.com/uptrace/bun/dialect/pgdialect v1.2.9 h1:caf5uFbOGiXvadV6pA5gn87k0awFF github.com/uptrace/bun/dialect/pgdialect v1.2.9/go.mod h1:m7L9JtOp/Lt8HccET70ULxplMweE/u0S9lNUSxz2duo= github.com/uptrace/bun/dialect/sqlitedialect v1.2.9 h1:HLzGWXBh07sT8zhVPy6veYbbGrAtYq0KzyRHXBj+GjA= github.com/uptrace/bun/dialect/sqlitedialect v1.2.9/go.mod h1:dUR+ecoCWA0FIa9vhQVRnGtYYPpuCLJoEEtX9E1aiBU= +github.com/uptrace/bun/extra/bunotel v1.2.9 h1:BGGrBga+iVL78SGiMpLt2N9MAKvrG3f8wLk8zCLwFJg= +github.com/uptrace/bun/extra/bunotel v1.2.9/go.mod h1:6dVl5Ko6xOhuoqUPWHpfFrntBDwmOnq0OMiR/SGwAC8= +github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 h1:ZjUj9BLYf9PEqBn8W/OapxhPjVRdC6CsXTdULHsyk5c= +github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2/go.mod h1:O8bHQfyinKwTXKkiKNGmLQS7vRsqRxIQTFZpYpHK3IQ= github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ= github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY= github.com/vjeantet/grok v1.0.1 h1:2rhIR7J4gThTgcZ1m2JY4TrJZNgjn985U28kT2wQrJ4= @@ -1164,6 +1162,8 @@ go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 h1:u2E32P7j1a/gRgZDWhIXC+Shd go.opentelemetry.io/contrib/bridges/otelzap v0.11.0/go.mod h1:pJPCLM8gzX4ASqLlyAXjHBEYxgbOQJ/9bidWxD6PEPQ= go.opentelemetry.io/contrib/config v0.10.0 h1:2JknAzMaYjxrHkTnZh3eOme/Y2P5eHE2SWfhfV6Xd6c= go.opentelemetry.io/contrib/config v0.10.0/go.mod h1:aND2M6/KfNkntI5cyvHriR/zvZgPf8j9yETdSmvpfmc= +go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux v0.63.0 h1:rATLgFjv0P9qyXQR/aChJ6JVbMtXOQjt49GgT36cBbk= +go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux v0.63.0/go.mod h1:34csimR1lUhdT5HH4Rii9aKPrvBcnFRwxLwcevsU+Kk= go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0 h1:0tY123n7CdWMem7MOVdKOt0YfshufLCwfE5Bob+hQuM= go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0/go.mod h1:CosX/aS4eHnG9D7nESYpV753l4j9q5j3SL/PUYd2lR8= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG0FI8OiXhBfcRtqqHcZcka+gU3cskNuf05R18= @@ -1196,8 +1196,8 @@ go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.12.2 h1:12vMqzLLNZtXuXbJh go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.12.2/go.mod h1:ZccPZoPOoq8x3Trik/fCsba7DEYDUnN6yX79pgp2BUQ= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.36.0 h1:rixTyDGXFxRy1xzhKrotaHy3/KXdPhlWARrCgK+eqUY= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.36.0/go.mod h1:dowW6UsM9MKbJq5JTz2AMVp3/5iW5I/TStsk8S+CfHw= -go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.36.0 h1:G8Xec/SgZQricwWBJF/mHZc7A02YHedfFDENwJEdRA0= -go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.36.0/go.mod h1:PD57idA/AiFD5aqoxGxCvT/ILJPeHy3MjqU/NS7KogY= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 h1:kJxSDN4SgWWTjG/hPp3O7LCGLcHXFlvS2/FFOrwL+SE= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0/go.mod h1:mgIOzS7iZeKJdeB8/NYHrJ48fdGc71Llo5bJ1J4DWUE= go.opentelemetry.io/otel/log v0.12.2 h1:yob9JVHn2ZY24byZeaXpTVoPS6l+UrrxmxmPKohXTwc= go.opentelemetry.io/otel/log v0.12.2/go.mod h1:ShIItIxSYxufUMt+1H5a2wbckGli3/iCfuEbVZi/98E= go.opentelemetry.io/otel/log/logtest v0.0.0-20250526142609-aa5bd0e64989 h1:4JF7oY9CcHrPGfBLijDcXZyCzGckVEyOjuat5ktmQRg= @@ -1291,7 +1291,6 @@ golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1315,7 +1314,6 @@ golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= @@ -1331,7 +1329,6 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= -golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= @@ -1381,7 +1378,6 @@ golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1395,7 +1391,6 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1403,7 +1398,6 @@ golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1430,7 +1424,6 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1539,7 +1532,6 @@ golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82u golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= @@ -1752,7 +1744,6 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EV gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/evanphx/json-patch.v4 v4.12.0 h1:n6jtcsulIzXPJaxegRbvFNNrZDjbij7ny3gmSPG+6V4= gopkg.in/evanphx/json-patch.v4 v4.12.0/go.mod h1:p8EYWUEYMpynmqDbY58zCKCFZw8pRWMG4EsWvDvM72M= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= diff --git a/pkg/authn/authn.go b/pkg/authn/authn.go new file mode 100644 index 0000000000..b9aca5989e --- /dev/null +++ b/pkg/authn/authn.go @@ -0,0 +1,25 @@ +package authn + +import ( + "context" + "net/url" + + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +// This can either be a password authn or a callback authn. +type AuthN interface{} + +type PasswordAuthN interface { + // Authenticate a user using email, password and orgID + Authenticate(context.Context, string, string, valuer.UUID) (*authtypes.Identity, error) +} + +type CallbackAuthN interface { + // The initial URL to redirect the user to. Takes the site url and org domain to be used in the callback. + LoginURL(context.Context, *url.URL, *authtypes.AuthDomain) (string, error) + + // Handle the callback from the provider. + HandleCallback(context.Context, url.Values) (*authtypes.CallbackIdentity, error) +} diff --git a/pkg/authn/authnstore/sqlauthnstore/store.go b/pkg/authn/authnstore/sqlauthnstore/store.go new file mode 100644 index 0000000000..c4d2b795c7 --- /dev/null +++ b/pkg/authn/authnstore/sqlauthnstore/store.go @@ -0,0 +1,65 @@ +package sqlauthnstore + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type store struct { + sqlstore sqlstore.SQLStore +} + +func NewStore(sqlstore sqlstore.SQLStore) authtypes.AuthNStore { + return &store{sqlstore: sqlstore} +} + +func (store *store) GetUserAndFactorPasswordByEmailAndOrgID(ctx context.Context, email string, orgID valuer.UUID) (*types.User, *types.FactorPassword, error) { + user := new(types.User) + factorPassword := new(types.FactorPassword) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(user). + Where("email = ?", email). + Where("org_id = ?", orgID). + Scan(ctx) + if err != nil { + return nil, nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with email %s in org %s not found", email, orgID) + } + + err = store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(factorPassword). + Where("user_id = ?", user.ID). + Scan(ctx) + if err != nil { + return nil, nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodePasswordNotFound, "user with email %s in org %s does not have password", email, orgID) + } + + return user, factorPassword, nil +} + +func (store *store) GetAuthDomainFromID(ctx context.Context, domainID valuer.UUID) (*authtypes.AuthDomain, error) { + storableAuthDomain := new(authtypes.StorableAuthDomain) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(storableAuthDomain). + Where("id = ?", domainID). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeAuthDomainNotFound, "auth domain with id %s does not exist", domainID) + } + + return authtypes.NewAuthDomainFromStorableAuthDomain(storableAuthDomain) +} diff --git a/pkg/authn/callbackauthn/googlecallbackauthn/authn.go b/pkg/authn/callbackauthn/googlecallbackauthn/authn.go new file mode 100644 index 0000000000..d9ffee902a --- /dev/null +++ b/pkg/authn/callbackauthn/googlecallbackauthn/authn.go @@ -0,0 +1,129 @@ +package googlecallbackauthn + +import ( + "context" + "net/url" + + "github.com/SigNoz/signoz/pkg/authn" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/coreos/go-oidc/v3/oidc" + "golang.org/x/oauth2" +) + +const ( + issuerURL string = "https://accounts.google.com" + redirectPath string = "/api/v1/complete/google" +) + +var ( + scopes []string = []string{"email"} +) + +var _ authn.CallbackAuthN = (*AuthN)(nil) + +type AuthN struct { + oidcProvider *oidc.Provider + store authtypes.AuthNStore +} + +func New(ctx context.Context, store authtypes.AuthNStore) (*AuthN, error) { + oidcProvider, err := oidc.NewProvider(ctx, issuerURL) + if err != nil { + return nil, err + } + + return &AuthN{ + oidcProvider: oidcProvider, + store: store, + }, nil +} + +func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) { + if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderGoogleAuth { + return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "domain type is not google") + } + + oauth2Config := a.oauth2Config(siteURL, authDomain) + + return oauth2Config.AuthCodeURL( + authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String(), + oauth2.SetAuthURLParam("hd", authDomain.StorableAuthDomain().Name), + ), nil +} + +func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtypes.CallbackIdentity, error) { + if err := query.Get("error"); err != "" { + return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: error while authenticating").WithAdditional(query.Get("error_description")) + } + + state, err := authtypes.NewStateFromString(query.Get("state")) + if err != nil { + return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "google: invalid state").WithAdditional(err.Error()) + } + + authDomain, err := a.store.GetAuthDomainFromID(ctx, state.DomainID) + if err != nil { + return nil, err + } + + oauth2Config := a.oauth2Config(state.URL, authDomain) + token, err := oauth2Config.Exchange(ctx, query.Get("code")) + if err != nil { + var retrieveError *oauth2.RetrieveError + if errors.As(err, &retrieveError) { + return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to get token").WithAdditional(retrieveError.ErrorDescription).WithAdditional(string(retrieveError.Body)) + } + + return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to get token").WithAdditional(err.Error()) + } + + rawIDToken, ok := token.Extra("id_token").(string) + if !ok { + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "google: no id_token in token response") + } + + verifier := a.oidcProvider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().Google.ClientID}) + idToken, err := verifier.Verify(ctx, rawIDToken) + if err != nil { + return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to verify token").WithAdditional(err.Error()) + } + + var claims struct { + Name string `json:"name"` + Email string `json:"email"` + EmailVerified bool `json:"email_verified"` + HostedDomain string `json:"hd"` + } + + if err := idToken.Claims(&claims); err != nil { + return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: missing or invalid claims").WithAdditional(err.Error()) + } + + if claims.HostedDomain != authDomain.StorableAuthDomain().Name { + return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: unexpected hd claim %s", claims.HostedDomain) + } + + email, err := valuer.NewEmail(claims.Email) + if err != nil { + return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "google: failed to parse email").WithAdditional(err.Error()) + } + + return authtypes.NewCallbackIdentity(claims.Name, email, authDomain.StorableAuthDomain().OrgID, state), nil + +} + +func (a *AuthN) oauth2Config(siteURL *url.URL, authDomain *authtypes.AuthDomain) *oauth2.Config { + return &oauth2.Config{ + ClientID: authDomain.AuthDomainConfig().Google.ClientID, + ClientSecret: authDomain.AuthDomainConfig().Google.ClientSecret, + Endpoint: a.oidcProvider.Endpoint(), + Scopes: scopes, + RedirectURL: (&url.URL{ + Scheme: siteURL.Scheme, + Host: siteURL.Host, + Path: redirectPath, + }).String(), + } +} diff --git a/pkg/authn/passwordauthn/emailpasswordauthn/authn.go b/pkg/authn/passwordauthn/emailpasswordauthn/authn.go new file mode 100644 index 0000000000..bf78b80213 --- /dev/null +++ b/pkg/authn/passwordauthn/emailpasswordauthn/authn.go @@ -0,0 +1,34 @@ +package emailpasswordauthn + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/authn" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +var _ authn.PasswordAuthN = (*AuthN)(nil) + +type AuthN struct { + store authtypes.AuthNStore +} + +func New(store authtypes.AuthNStore) *AuthN { + return &AuthN{store: store} +} + +func (a *AuthN) Authenticate(ctx context.Context, email string, password string, orgID valuer.UUID) (*authtypes.Identity, error) { + user, factorPassword, err := a.store.GetUserAndFactorPasswordByEmailAndOrgID(ctx, email, orgID) + if err != nil { + return nil, err + } + + if !factorPassword.Equals(password) { + return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email orpassword") + } + + return authtypes.NewIdentity(user.ID, orgID, user.Email, user.Role), nil +} diff --git a/pkg/cache/cache.go b/pkg/cache/cache.go index 7133fb2321..45d13e36b6 100644 --- a/pkg/cache/cache.go +++ b/pkg/cache/cache.go @@ -12,10 +12,14 @@ import ( type Cache interface { // Set sets the cacheable entity in cache. Set(ctx context.Context, orgID valuer.UUID, cacheKey string, data cachetypes.Cacheable, ttl time.Duration) error - // Get gets the cacheble entity in the dest entity passed + + // Get gets the cacheble entity in the dest entity passed. + // TODO: Remove allowExpired from Get. Get(ctx context.Context, orgID valuer.UUID, cacheKey string, dest cachetypes.Cacheable, allowExpired bool) error + // Delete deletes the cacheable entity from cache Delete(ctx context.Context, orgID valuer.UUID, cacheKey string) + // DeleteMany deletes multiple cacheble entities from cache DeleteMany(ctx context.Context, orgID valuer.UUID, cacheKeys []string) } diff --git a/pkg/cache/memorycache/provider.go b/pkg/cache/memorycache/provider.go index 6deb4eb0e3..5ddb80d01d 100644 --- a/pkg/cache/memorycache/provider.go +++ b/pkg/cache/memorycache/provider.go @@ -12,6 +12,9 @@ import ( "github.com/SigNoz/signoz/pkg/types/cachetypes" "github.com/SigNoz/signoz/pkg/valuer" gocache "github.com/patrickmn/go-cache" + semconv "go.opentelemetry.io/collector/semconv/v1.6.1" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" ) type provider struct { @@ -35,17 +38,26 @@ func New(ctx context.Context, settings factory.ProviderSettings, config cache.Co } func (provider *provider) Set(ctx context.Context, orgID valuer.UUID, cacheKey string, data cachetypes.Cacheable, ttl time.Duration) error { + _, span := provider.settings.Tracer().Start(ctx, "memory.set", trace.WithAttributes( + attribute.String(semconv.AttributeDBSystem, "memory"), + attribute.String(semconv.AttributeDBStatement, "set "+strings.Join([]string{orgID.StringValue(), cacheKey}, "::")), + attribute.String(semconv.AttributeDBOperation, "SET"), + )) + defer span.End() + err := cachetypes.CheckCacheablePointer(data) if err != nil { return err } if cloneable, ok := data.(cachetypes.Cloneable); ok { + span.SetAttributes(attribute.Bool("db.cloneable", true)) toCache := cloneable.Clone() provider.cc.Set(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), toCache, ttl) return nil } + span.SetAttributes(attribute.Bool("db.cloneable", false)) toCache, err := data.MarshalBinary() if err != nil { return err @@ -55,7 +67,14 @@ func (provider *provider) Set(ctx context.Context, orgID valuer.UUID, cacheKey s return nil } -func (provider *provider) Get(_ context.Context, orgID valuer.UUID, cacheKey string, dest cachetypes.Cacheable, allowExpired bool) error { +func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, cacheKey string, dest cachetypes.Cacheable, allowExpired bool) error { + _, span := provider.settings.Tracer().Start(ctx, "memory.get", trace.WithAttributes( + attribute.String(semconv.AttributeDBSystem, "memory"), + attribute.String(semconv.AttributeDBStatement, "get "+strings.Join([]string{orgID.StringValue(), cacheKey}, "::")), + attribute.String(semconv.AttributeDBOperation, "GET"), + )) + defer span.End() + err := cachetypes.CheckCacheablePointer(dest) if err != nil { return err @@ -67,6 +86,7 @@ func (provider *provider) Get(_ context.Context, orgID valuer.UUID, cacheKey str } if cloneable, ok := cachedData.(cachetypes.Cloneable); ok { + span.SetAttributes(attribute.Bool("db.cloneable", true)) // check if the destination value is settable dstv := reflect.ValueOf(dest) if !dstv.Elem().CanSet() { @@ -87,6 +107,7 @@ func (provider *provider) Get(_ context.Context, orgID valuer.UUID, cacheKey str } if fromCache, ok := cachedData.([]byte); ok { + span.SetAttributes(attribute.Bool("db.cloneable", false)) if err = dest.UnmarshalBinary(fromCache); err != nil { return err } @@ -97,7 +118,14 @@ func (provider *provider) Get(_ context.Context, orgID valuer.UUID, cacheKey str return errors.NewInternalf(errors.CodeInternal, "unrecognized: (value: \"%s\")", reflect.TypeOf(cachedData).String()) } -func (provider *provider) Delete(_ context.Context, orgID valuer.UUID, cacheKey string) { +func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, cacheKey string) { + _, span := provider.settings.Tracer().Start(ctx, "memory.delete", trace.WithAttributes( + attribute.String(semconv.AttributeDBSystem, "memory"), + attribute.String(semconv.AttributeDBStatement, "delete "+strings.Join([]string{orgID.StringValue(), cacheKey}, "::")), + attribute.String(semconv.AttributeDBOperation, "DELETE"), + )) + defer span.End() + provider.cc.Delete(strings.Join([]string{orgID.StringValue(), cacheKey}, "::")) } diff --git a/pkg/cache/rediscache/provider.go b/pkg/cache/rediscache/provider.go index 3557b73ba3..c3880f9243 100644 --- a/pkg/cache/rediscache/provider.go +++ b/pkg/cache/rediscache/provider.go @@ -12,7 +12,8 @@ import ( "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/types/cachetypes" "github.com/SigNoz/signoz/pkg/valuer" - "github.com/go-redis/redis/v8" + "github.com/redis/go-redis/extra/redisotel/v9" + "github.com/redis/go-redis/v9" ) type provider struct { @@ -36,6 +37,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config return nil, err } + if err := redisotel.InstrumentTracing(client, redisotel.WithTracerProvider(providerSettings.TracerProvider), redisotel.WithDBStatement(true)); err != nil { + return nil, err + } + + if err := redisotel.InstrumentMetrics(client, redisotel.WithMeterProvider(providerSettings.MeterProvider)); err != nil { + return nil, err + } + return &provider{client: client, settings: settings}, nil } diff --git a/pkg/cache/rediscache/provider_test.go b/pkg/cache/rediscache/provider_test.go index 6359fc2d8c..9ab3f0ec33 100644 --- a/pkg/cache/rediscache/provider_test.go +++ b/pkg/cache/rediscache/provider_test.go @@ -11,7 +11,7 @@ import ( "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/types/cachetypes" "github.com/SigNoz/signoz/pkg/valuer" - "github.com/go-redis/redismock/v8" + "github.com/go-redis/redismock/v9" "github.com/stretchr/testify/assert" ) diff --git a/pkg/errors/code.go b/pkg/errors/code.go index aeecbd000b..9b7e74cdd9 100644 --- a/pkg/errors/code.go +++ b/pkg/errors/code.go @@ -6,17 +6,18 @@ import ( ) var ( - CodeInvalidInput Code = Code{"invalid_input"} - CodeInternal = Code{"internal"} - CodeUnsupported = Code{"unsupported"} - CodeNotFound = Code{"not_found"} - CodeMethodNotAllowed = Code{"method_not_allowed"} - CodeAlreadyExists = Code{"already_exists"} - CodeUnauthenticated = Code{"unauthenticated"} - CodeForbidden = Code{"forbidden"} - CodeCanceled = Code{"canceled"} - CodeTimeout = Code{"timeout"} - CodeUnknown = Code{"unknown"} + CodeInvalidInput Code = Code{"invalid_input"} + CodeInternal = Code{"internal"} + CodeUnsupported = Code{"unsupported"} + CodeNotFound = Code{"not_found"} + CodeMethodNotAllowed = Code{"method_not_allowed"} + CodeAlreadyExists = Code{"already_exists"} + CodeUnauthenticated = Code{"unauthenticated"} + CodeForbidden = Code{"forbidden"} + CodeCanceled = Code{"canceled"} + CodeTimeout = Code{"timeout"} + CodeUnknown = Code{"unknown"} + CodeLicenseUnavailable = Code{"license_unavailable"} ) var ( diff --git a/pkg/errors/errors.go b/pkg/errors/errors.go index 64d4266674..a118fd9816 100644 --- a/pkg/errors/errors.go +++ b/pkg/errors/errors.go @@ -85,9 +85,8 @@ func Wrap(cause error, t typ, code Code, message string) *base { } } -// WithAdditional wraps an existing base error with a new formatted message. -// It is used when the original error already contains type and code. -func WithAdditional(cause error, format string, args ...any) *base { +// WithAdditionalf adds an additional error message to the existing error. +func WithAdditionalf(cause error, format string, args ...any) *base { t, c, m, e, u, a := Unwrapb(cause) b := &base{ t: t, diff --git a/pkg/errors/http.go b/pkg/errors/http.go new file mode 100644 index 0000000000..adc2fff22e --- /dev/null +++ b/pkg/errors/http.go @@ -0,0 +1,60 @@ +package errors + +import ( + "encoding/json" + "net/url" +) + +type JSON struct { + Code string `json:"code"` + Message string `json:"message"` + Url string `json:"url,omitempty"` + Errors []responseerroradditional `json:"errors,omitempty"` +} + +type responseerroradditional struct { + Message string `json:"message"` +} + +func AsJSON(cause error) *JSON { + // See if this is an instance of the base error or not + _, c, m, _, u, a := Unwrapb(cause) + + rea := make([]responseerroradditional, len(a)) + for k, v := range a { + rea[k] = responseerroradditional{v} + } + + return &JSON{ + Code: c.String(), + Message: m, + Url: u, + Errors: rea, + } +} + +func AsURLValues(cause error) url.Values { + // See if this is an instance of the base error or not + _, c, m, _, u, a := Unwrapb(cause) + + rea := make([]responseerroradditional, len(a)) + for k, v := range a { + rea[k] = responseerroradditional{v} + } + + errors, err := json.Marshal(rea) + if err != nil { + return url.Values{ + "code": {c.String()}, + "message": {m}, + "url": {u}, + } + } + + return url.Values{ + "code": {c.String()}, + "message": {m}, + "url": {u}, + "errors": {string(errors)}, + } +} diff --git a/pkg/errors/type.go b/pkg/errors/type.go index 80d0dbbefa..5a4c452848 100644 --- a/pkg/errors/type.go +++ b/pkg/errors/type.go @@ -1,18 +1,23 @@ package errors var ( - TypeInvalidInput typ = typ{"invalid-input"} - TypeInternal = typ{"internal"} - TypeUnsupported = typ{"unsupported"} - TypeNotFound = typ{"not-found"} - TypeMethodNotAllowed = typ{"method-not-allowed"} - TypeAlreadyExists = typ{"already-exists"} - TypeUnauthenticated = typ{"unauthenticated"} - TypeForbidden = typ{"forbidden"} - TypeCanceled = typ{"canceled"} - TypeTimeout = typ{"timeout"} - TypeUnexpected = typ{"unexpected"} // Generic mismatch of expectations + TypeInvalidInput typ = typ{"invalid-input"} + TypeInternal = typ{"internal"} + TypeUnsupported = typ{"unsupported"} + TypeNotFound = typ{"not-found"} + TypeMethodNotAllowed = typ{"method-not-allowed"} + TypeAlreadyExists = typ{"already-exists"} + TypeUnauthenticated = typ{"unauthenticated"} + TypeForbidden = typ{"forbidden"} + TypeCanceled = typ{"canceled"} + TypeTimeout = typ{"timeout"} + TypeUnexpected = typ{"unexpected"} // Generic mismatch of expectations + TypeLicenseUnavailable = typ{"license-unavailable"} ) // Defines custom error types type typ struct{ s string } + +func (t typ) String() string { + return t.s +} diff --git a/pkg/http/middleware/api_key.go b/pkg/http/middleware/api_key.go index 107237a429..055d25a074 100644 --- a/pkg/http/middleware/api_key.go +++ b/pkg/http/middleware/api_key.go @@ -80,8 +80,8 @@ func (a *APIKey) Wrap(next http.Handler) http.Handler { jwt := authtypes.Claims{ UserID: user.ID.String(), Role: apiKey.Role, - Email: user.Email, - OrgID: user.OrgID, + Email: user.Email.String(), + OrgID: user.OrgID.String(), } ctx = authtypes.NewContextWithClaims(ctx, jwt) diff --git a/pkg/http/middleware/auth.go b/pkg/http/middleware/auth.go deleted file mode 100644 index 19feeb41f7..0000000000 --- a/pkg/http/middleware/auth.go +++ /dev/null @@ -1,66 +0,0 @@ -package middleware - -import ( - "log/slog" - "net/http" - - "github.com/SigNoz/signoz/pkg/sharder" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/SigNoz/signoz/pkg/types/ctxtypes" - "github.com/SigNoz/signoz/pkg/valuer" -) - -const ( - authCrossOrgMessage string = "::AUTH-CROSS-ORG::" -) - -type Auth struct { - jwt *authtypes.JWT - headers []string - sharder sharder.Sharder - logger *slog.Logger -} - -func NewAuth(jwt *authtypes.JWT, headers []string, sharder sharder.Sharder, logger *slog.Logger) *Auth { - return &Auth{jwt: jwt, headers: headers, sharder: sharder, logger: logger} -} - -func (a *Auth) Wrap(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - var values []string - for _, header := range a.headers { - values = append(values, r.Header.Get(header)) - } - - ctx, err := a.jwt.ContextFromRequest(r.Context(), values...) - if err != nil { - next.ServeHTTP(w, r) - return - } - - claims, err := authtypes.ClaimsFromContext(ctx) - if err != nil { - next.ServeHTTP(w, r) - return - } - - if err := a.sharder.IsMyOwnedKey(r.Context(), types.NewOrganizationKey(valuer.MustNewUUID(claims.OrgID))); err != nil { - a.logger.ErrorContext(r.Context(), authCrossOrgMessage, "claims", claims, "error", err) - next.ServeHTTP(w, r) - return - } - - ctx = ctxtypes.SetAuthType(ctx, ctxtypes.AuthTypeJWT) - - comment := ctxtypes.CommentFromContext(ctx) - comment.Set("auth_type", ctxtypes.AuthTypeJWT.StringValue()) - comment.Set("user_id", claims.UserID) - comment.Set("org_id", claims.OrgID) - - r = r.WithContext(ctxtypes.NewContextWithComment(ctx, comment)) - - next.ServeHTTP(w, r) - }) - -} diff --git a/pkg/http/middleware/authn.go b/pkg/http/middleware/authn.go new file mode 100644 index 0000000000..f2ae10d261 --- /dev/null +++ b/pkg/http/middleware/authn.go @@ -0,0 +1,150 @@ +package middleware + +import ( + "context" + "log/slog" + "net/http" + "strings" + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/sharder" + "github.com/SigNoz/signoz/pkg/tokenizer" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/types/ctxtypes" + "github.com/SigNoz/signoz/pkg/valuer" + "golang.org/x/sync/singleflight" +) + +const ( + authCrossOrgMessage string = "::AUTH-CROSS-ORG::" +) + +type AuthN struct { + tokenizer tokenizer.Tokenizer + headers []string + sharder sharder.Sharder + logger *slog.Logger + sfGroup *singleflight.Group +} + +func NewAuthN(headers []string, sharder sharder.Sharder, tokenizer tokenizer.Tokenizer, logger *slog.Logger) *AuthN { + return &AuthN{ + headers: headers, + sharder: sharder, + tokenizer: tokenizer, + logger: logger, + sfGroup: &singleflight.Group{}, + } +} + +func (a *AuthN) Wrap(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + var values []string + for _, header := range a.headers { + values = append(values, r.Header.Get(header)) + } + + ctx, err := a.contextFromRequest(r.Context(), values...) + if err != nil { + r = r.WithContext(ctx) + next.ServeHTTP(w, r) + return + } + + r = r.WithContext(ctx) + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + next.ServeHTTP(w, r) + return + } + + if err := a.sharder.IsMyOwnedKey(r.Context(), types.NewOrganizationKey(valuer.MustNewUUID(claims.OrgID))); err != nil { + a.logger.ErrorContext(r.Context(), authCrossOrgMessage, "claims", claims, "error", err) + next.ServeHTTP(w, r) + return + } + + ctx = ctxtypes.SetAuthType(ctx, ctxtypes.AuthTypeTokenizer) + + comment := ctxtypes.CommentFromContext(ctx) + comment.Set("auth_type", ctxtypes.AuthTypeTokenizer.StringValue()) + comment.Set("tokenizer_provider", a.tokenizer.Config().Provider) + comment.Set("user_id", claims.UserID) + comment.Set("org_id", claims.OrgID) + + r = r.WithContext(ctxtypes.NewContextWithComment(ctx, comment)) + + next.ServeHTTP(w, r) + + accessToken, err := authtypes.AccessTokenFromContext(r.Context()) + if err != nil { + next.ServeHTTP(w, r) + return + } + + lastObservedAtCtx := context.WithoutCancel(r.Context()) + _, _, _ = a.sfGroup.Do(accessToken, func() (any, error) { + if err := a.tokenizer.SetLastObservedAt(lastObservedAtCtx, accessToken, time.Now()); err != nil { + a.logger.ErrorContext(lastObservedAtCtx, "failed to set last observed at", "error", err) + return false, err + } + + return true, nil + }) + }) +} + +func (a *AuthN) contextFromRequest(ctx context.Context, values ...string) (context.Context, error) { + ctx, err := a.contextFromAccessToken(ctx, values...) + if err != nil { + return ctx, err + } + + accessToken, err := authtypes.AccessTokenFromContext(ctx) + if err != nil { + return ctx, err + } + + authenticatedUser, err := a.tokenizer.GetIdentity(ctx, accessToken) + if err != nil { + return ctx, err + } + + return authtypes.NewContextWithClaims(ctx, authenticatedUser.ToClaims()), nil +} + +func (a *AuthN) contextFromAccessToken(ctx context.Context, values ...string) (context.Context, error) { + var value string + for _, v := range values { + if v != "" { + value = v + break + } + } + + if value == "" { + return ctx, errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "missing authorization header") + } + + // parse from + bearerToken, ok := parseBearerAuth(value) + if !ok { + // this will take care that if the value is not of type bearer token, directly use it + bearerToken = value + } + + return authtypes.NewContextWithAccessToken(ctx, bearerToken), nil +} + +func parseBearerAuth(auth string) (string, bool) { + const prefix = "Bearer " + // Case insensitive prefix match + if len(auth) < len(prefix) || !strings.EqualFold(auth[:len(prefix)], prefix) { + return "", false + } + + return auth[len(prefix):], true +} diff --git a/pkg/http/middleware/doc.go b/pkg/http/middleware/doc.go deleted file mode 100644 index 911746777c..0000000000 --- a/pkg/http/middleware/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// package middleware contains an implementation of all middlewares. -package middleware diff --git a/pkg/http/render/render.go b/pkg/http/render/render.go index a9231d6bd6..f78c4a4d43 100644 --- a/pkg/http/render/render.go +++ b/pkg/http/render/render.go @@ -16,20 +16,9 @@ const ( var json = jsoniter.ConfigCompatibleWithStandardLibrary type response struct { - Status string `json:"status"` - Data interface{} `json:"data,omitempty"` - Error *responseerror `json:"error,omitempty"` -} - -type responseerror struct { - Code string `json:"code"` - Message string `json:"message"` - Url string `json:"url,omitempty"` - Errors []responseerroradditional `json:"errors,omitempty"` -} - -type responseerroradditional struct { - Message string `json:"message"` + Status string `json:"status"` + Data interface{} `json:"data,omitempty"` + Error *errors.JSON `json:"error,omitempty"` } func Success(rw http.ResponseWriter, httpCode int, data interface{}) { @@ -50,10 +39,9 @@ func Success(rw http.ResponseWriter, httpCode int, data interface{}) { } func Error(rw http.ResponseWriter, cause error) { - // See if this is an instance of the base error or not - t, c, m, _, u, a := errors.Unwrapb(cause) - // Derive the http code from the error type + t, _, _, _, _, _ := errors.Unwrapb(cause) + httpCode := http.StatusInternalServerError switch t { case errors.TypeInvalidInput: @@ -72,22 +60,11 @@ func Error(rw http.ResponseWriter, cause error) { httpCode = statusClientClosedConnection case errors.TypeTimeout: httpCode = http.StatusGatewayTimeout + case errors.TypeLicenseUnavailable: + httpCode = http.StatusUnavailableForLegalReasons } - rea := make([]responseerroradditional, len(a)) - for k, v := range a { - rea[k] = responseerroradditional{v} - } - - body, err := json.Marshal(&response{ - Status: StatusError.s, - Error: &responseerror{ - Code: c.String(), - Url: u, - Message: m, - Errors: rea, - }, - }) + body, err := json.Marshal(&response{Status: StatusError.s, Error: errors.AsJSON(cause)}) if err != nil { // this should never be the case http.Error(rw, err.Error(), http.StatusInternalServerError) diff --git a/pkg/instrumentation/loghandler/correlation.go b/pkg/instrumentation/loghandler/correlation.go index e24a48b94b..a07a3c7948 100644 --- a/pkg/instrumentation/loghandler/correlation.go +++ b/pkg/instrumentation/loghandler/correlation.go @@ -4,7 +4,6 @@ import ( "context" "log/slog" - "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/trace" ) @@ -33,12 +32,6 @@ func (h *correlation) Wrap(next LogHandler) LogHandler { record.AddAttrs(slog.String("span_id", spanID)) } - // Setting span status if the log is an error. - // Purposely leaving as codes.Unset (default) otherwise. - if record.Level >= slog.LevelError { - span.SetStatus(codes.Error, record.Message) - } - return next.Handle(ctx, record) }) } diff --git a/pkg/instrumentation/sdk.go b/pkg/instrumentation/sdk.go index ec3317cc2e..d0349b7f83 100644 --- a/pkg/instrumentation/sdk.go +++ b/pkg/instrumentation/sdk.go @@ -4,7 +4,9 @@ import ( "context" "log/slog" + "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/instrumentation/loghandler" "github.com/SigNoz/signoz/pkg/version" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/collectors" @@ -20,11 +22,12 @@ var _ Instrumentation = (*SDK)(nil) // SDK holds the core components for application instrumentation. type SDK struct { - logger *slog.Logger - sdk contribsdkconfig.SDK - meterProvider sdkmetric.MeterProvider - prometheusRegistry *prometheus.Registry - startCh chan struct{} + logger *slog.Logger + sdk contribsdkconfig.SDK + meterProvider sdkmetric.MeterProvider + prometheusRegistry *prometheus.Registry + meterProviderShutdownFunc func(context.Context) error + startCh chan struct{} } // New creates a new Instrumentation instance with configured providers. @@ -75,6 +78,7 @@ func New(ctx context.Context, cfg Config, build version.Build, serviceName strin // Use contrib config approach but with custom Prometheus registry var meterProvider sdkmetric.MeterProvider + var meterProviderShutdownFunc func(context.Context) error if cfg.Metrics.Enabled { meterProviderConfig := &contribsdkconfig.MeterProvider{ Readers: []contribsdkconfig.MetricReader{ @@ -82,11 +86,10 @@ func New(ctx context.Context, cfg Config, build version.Build, serviceName strin }, } - mp, _, err := meterProviderWithCustomRegistry(ctx, meterProviderConfig, resource, prometheusRegistry) + meterProvider, meterProviderShutdownFunc, err = meterProviderWithCustomRegistry(ctx, meterProviderConfig, resource, prometheusRegistry) if err != nil { return nil, err } - meterProvider = mp } sdk, err := contribsdkconfig.NewSDK( @@ -101,11 +104,12 @@ func New(ctx context.Context, cfg Config, build version.Build, serviceName strin } return &SDK{ - sdk: sdk, - meterProvider: meterProvider, - prometheusRegistry: prometheusRegistry, - logger: NewLogger(cfg), - startCh: make(chan struct{}), + sdk: sdk, + meterProvider: meterProvider, + meterProviderShutdownFunc: meterProviderShutdownFunc, + prometheusRegistry: prometheusRegistry, + logger: NewLogger(cfg, loghandler.NewCorrelation()), + startCh: make(chan struct{}), }, nil } @@ -116,7 +120,10 @@ func (i *SDK) Start(ctx context.Context) error { func (i *SDK) Stop(ctx context.Context) error { close(i.startCh) - return i.sdk.Shutdown(ctx) + return errors.Join( + i.sdk.Shutdown(ctx), + i.meterProviderShutdownFunc(ctx), + ) } func (i *SDK) Logger() *slog.Logger { @@ -124,7 +131,7 @@ func (i *SDK) Logger() *slog.Logger { } func (i *SDK) MeterProvider() sdkmetric.MeterProvider { - return i.sdk.MeterProvider() + return i.meterProvider } func (i *SDK) TracerProvider() sdktrace.TracerProvider { diff --git a/pkg/modules/authdomain/authdomain.go b/pkg/modules/authdomain/authdomain.go new file mode 100644 index 0000000000..ebebfbe9ca --- /dev/null +++ b/pkg/modules/authdomain/authdomain.go @@ -0,0 +1,39 @@ +package authdomain + +import ( + "context" + "net/http" + + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type Module interface { + // List all auth domains for an organization. + ListByOrgID(context.Context, valuer.UUID) ([]*authtypes.AuthDomain, error) + + // Get an auth domain by id. + Get(context.Context, valuer.UUID) (*authtypes.AuthDomain, error) + + // Get an auth domain by orgID and id. + GetByOrgIDAndID(context.Context, valuer.UUID, valuer.UUID) (*authtypes.AuthDomain, error) + + // Get an auth domain by name and orgID. + GetByNameAndOrgID(context.Context, string, valuer.UUID) (*authtypes.AuthDomain, error) + + // Create a new auth domain for an organization. + Create(context.Context, *authtypes.AuthDomain) error + + // Update an existing auth domain. + Update(context.Context, *authtypes.AuthDomain) error + + // Delete an existing auth domain by id. + Delete(context.Context, valuer.UUID, valuer.UUID) error +} + +type Handler interface { + List(http.ResponseWriter, *http.Request) + Create(http.ResponseWriter, *http.Request) + Update(http.ResponseWriter, *http.Request) + Delete(http.ResponseWriter, *http.Request) +} diff --git a/pkg/modules/authdomain/implauthdomain/handler.go b/pkg/modules/authdomain/implauthdomain/handler.go new file mode 100644 index 0000000000..cd271dff78 --- /dev/null +++ b/pkg/modules/authdomain/implauthdomain/handler.go @@ -0,0 +1,144 @@ +package implauthdomain + +import ( + "context" + "net/http" + "time" + + "github.com/SigNoz/signoz/pkg/http/binding" + "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/modules/authdomain" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/gorilla/mux" +) + +type handler struct { + module authdomain.Module +} + +func NewHandler(module authdomain.Module) authdomain.Handler { + return &handler{module: module} +} + +func (handler *handler) Create(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + body := new(authtypes.PostableAuthDomain) + if err := binding.JSON.BindBody(req.Body, body); err != nil { + render.Error(rw, err) + return + } + + authDomain, err := authtypes.NewAuthDomainFromConfig(body.Name, &body.Config, valuer.MustNewUUID(claims.OrgID)) + if err != nil { + render.Error(rw, err) + return + } + + err = handler.module.Create(ctx, authDomain) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusCreated, types.Identifiable{ID: authDomain.StorableAuthDomain().ID}) +} + +func (handler *handler) Delete(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + domainId, err := valuer.NewUUID(mux.Vars(req)["id"]) + if err != nil { + render.Error(rw, err) + return + } + err = handler.module.Delete(ctx, valuer.MustNewUUID(claims.OrgID), domainId) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusNoContent, nil) +} + +func (handler *handler) List(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + domains, err := handler.module.ListByOrgID(ctx, valuer.MustNewUUID(claims.OrgID)) + if err != nil { + render.Error(rw, err) + return + } + + authDomains := make([]*authtypes.GettableAuthDomain, len(domains)) + for i, domain := range domains { + authDomains[i] = authtypes.NewGettableAuthDomainFromAuthDomain(domain) + } + + render.Success(rw, http.StatusOK, authDomains) +} + +func (handler *handler) Update(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + domainID, err := valuer.NewUUID(mux.Vars(r)["id"]) + if err != nil { + render.Error(rw, err) + return + } + + body := new(authtypes.UpdateableAuthDomain) + if err := binding.JSON.BindBody(r.Body, body); err != nil { + render.Error(rw, err) + return + } + + authDomain, err := handler.module.GetByOrgIDAndID(ctx, valuer.MustNewUUID(claims.OrgID), domainID) + if err != nil { + render.Error(rw, err) + return + } + + err = authDomain.Update(&body.Config) + if err != nil { + render.Error(rw, err) + return + } + + if err := handler.module.Update(ctx, authDomain); err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusNoContent, nil) +} diff --git a/pkg/modules/authdomain/implauthdomain/module.go b/pkg/modules/authdomain/implauthdomain/module.go new file mode 100644 index 0000000000..2532ac0f5b --- /dev/null +++ b/pkg/modules/authdomain/implauthdomain/module.go @@ -0,0 +1,45 @@ +package implauthdomain + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/modules/authdomain" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type module struct { + store authtypes.AuthDomainStore +} + +func NewModule(store authtypes.AuthDomainStore) authdomain.Module { + return &module{store: store} +} + +func (module *module) Create(ctx context.Context, domain *authtypes.AuthDomain) error { + return module.store.Create(ctx, domain) +} + +func (module *module) Get(ctx context.Context, id valuer.UUID) (*authtypes.AuthDomain, error) { + return module.store.Get(ctx, id) +} + +func (module *module) GetByOrgIDAndID(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*authtypes.AuthDomain, error) { + return module.store.GetByOrgIDAndID(ctx, orgID, id) +} + +func (module *module) GetByNameAndOrgID(ctx context.Context, name string, orgID valuer.UUID) (*authtypes.AuthDomain, error) { + return module.store.GetByNameAndOrgID(ctx, name, orgID) +} + +func (module *module) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error { + return module.store.Delete(ctx, orgID, id) +} + +func (module *module) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*authtypes.AuthDomain, error) { + return module.store.ListByOrgID(ctx, orgID) +} + +func (module *module) Update(ctx context.Context, domain *authtypes.AuthDomain) error { + return module.store.Update(ctx, domain) +} diff --git a/pkg/modules/authdomain/implauthdomain/store.go b/pkg/modules/authdomain/implauthdomain/store.go new file mode 100644 index 0000000000..93941d8d97 --- /dev/null +++ b/pkg/modules/authdomain/implauthdomain/store.go @@ -0,0 +1,159 @@ +package implauthdomain + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type store struct { + sqlstore sqlstore.SQLStore +} + +func NewStore(sqlstore sqlstore.SQLStore) authtypes.AuthDomainStore { + return &store{sqlstore: sqlstore} +} + +func (store *store) Create(ctx context.Context, domain *authtypes.AuthDomain) error { + _, err := store. + sqlstore. + BunDBCtx(ctx). + NewInsert(). + Model(domain.StorableAuthDomain()). + Exec(ctx) + if err != nil { + return store.sqlstore.WrapAlreadyExistsErrf(err, authtypes.ErrCodeAuthDomainAlreadyExists, "domain with name %s already exists", domain.StorableAuthDomain().Name) + } + + return nil +} + +func (store *store) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error { + authDomain := new(authtypes.StorableAuthDomain) + + _, err := store. + sqlstore. + BunDBCtx(ctx). + NewDelete(). + Model(authDomain). + Where("id = ?", id). + Where("org_id = ?", orgID). + Exec(ctx) + if err != nil { + return err + } + + return nil +} + +func (store *store) Get(ctx context.Context, id valuer.UUID) (*authtypes.AuthDomain, error) { + authDomain := new(authtypes.StorableAuthDomain) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(authDomain). + Where("id = ?", id). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeAuthDomainNotFound, "auth domain with id %s does not exist", id) + } + + return authtypes.NewAuthDomainFromStorableAuthDomain(authDomain) +} + +func (store *store) GetByOrgIDAndID(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*authtypes.AuthDomain, error) { + authDomain := new(authtypes.StorableAuthDomain) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(authDomain). + Where("org_id = ?", orgID). + Where("id = ?", id). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeAuthDomainNotFound, "auth domain with id %s does not exist", orgID, id) + } + + return authtypes.NewAuthDomainFromStorableAuthDomain(authDomain) +} + +func (store *store) GetByName(ctx context.Context, name string) (*authtypes.AuthDomain, error) { + authDomain := new(authtypes.StorableAuthDomain) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(authDomain). + Where("name = ?", name). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeAuthDomainNotFound, "auth domain with name %s does not exist", name) + } + + return authtypes.NewAuthDomainFromStorableAuthDomain(authDomain) +} + +func (store *store) GetByNameAndOrgID(ctx context.Context, name string, orgID valuer.UUID) (*authtypes.AuthDomain, error) { + authDomain := new(authtypes.StorableAuthDomain) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(authDomain). + Where("name = ?", name). + Where("org_id = ?", orgID). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeAuthDomainNotFound, "auth domain with name %s and org id %s does not exist", name, orgID) + } + + return authtypes.NewAuthDomainFromStorableAuthDomain(authDomain) +} + +func (store *store) ListByOrgID(ctx context.Context, orgId valuer.UUID) ([]*authtypes.AuthDomain, error) { + var storableAuthDomains []*authtypes.StorableAuthDomain + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(&storableAuthDomains). + Where("org_id = ?", orgId). + Scan(ctx) + if err != nil { + return nil, err + } + + authDomains := make([]*authtypes.AuthDomain, len(storableAuthDomains)) + for i, storableAuthDomain := range storableAuthDomains { + authDomains[i], err = authtypes.NewAuthDomainFromStorableAuthDomain(storableAuthDomain) + if err != nil { + return nil, err + } + } + + return authDomains, nil +} + +func (store *store) Update(ctx context.Context, domain *authtypes.AuthDomain) error { + _, err := store. + sqlstore. + BunDBCtx(ctx). + NewUpdate(). + Model(domain.StorableAuthDomain()). + WherePK(). + Exec(ctx) + if err != nil { + return err + } + + return nil +} diff --git a/pkg/modules/dashboard/impldashboard/handler.go b/pkg/modules/dashboard/impldashboard/handler.go index 139f4c3a6a..21b600a04a 100644 --- a/pkg/modules/dashboard/impldashboard/handler.go +++ b/pkg/modules/dashboard/impldashboard/handler.go @@ -106,7 +106,7 @@ func (handler *handler) Update(rw http.ResponseWriter, r *http.Request) { diff := 0 // Allow multiple deletions for API key requests; enforce for others - if authType, ok := ctxtypes.AuthTypeFromContext(ctx); ok && authType == ctxtypes.AuthTypeJWT { + if authType, ok := ctxtypes.AuthTypeFromContext(ctx); ok && authType == ctxtypes.AuthTypeTokenizer { diff = 1 } diff --git a/pkg/modules/session/implsession/handler.go b/pkg/modules/session/implsession/handler.go new file mode 100644 index 0000000000..efb6ff6b97 --- /dev/null +++ b/pkg/modules/session/implsession/handler.go @@ -0,0 +1,192 @@ +package implsession + +import ( + "context" + "net/http" + "net/url" + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/http/binding" + "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/modules/session" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type handler struct { + module session.Module +} + +func NewHandler(module session.Module) session.Handler { + return &handler{module: module} +} + +func (handler *handler) GetSessionContext(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 10*time.Second) + defer cancel() + + email, err := valuer.NewEmail(req.URL.Query().Get("email")) + if err != nil { + render.Error(rw, err) + return + } + + siteURL, err := url.Parse(req.URL.Query().Get("ref")) + if err != nil { + render.Error(rw, err) + return + } + + sessionContext, err := handler.module.GetSessionContext(ctx, email, siteURL) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusOK, sessionContext) +} + +func (handler *handler) DeprecatedCreateSessionByEmailPassword(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 15*time.Second) + defer cancel() + + body := new(authtypes.DeprecatedPostableLogin) + if err := binding.JSON.BindBody(req.Body, body); err != nil { + render.Error(rw, err) + return + } + + token, err := handler.module.DeprecatedCreateSessionByEmailPassword(ctx, body.Email, body.Password) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusOK, &authtypes.DeprecatedGettableLogin{ + AccessJWT: token.AccessToken, + UserID: token.UserID.String(), + }) +} + +func (handler *handler) CreateSessionByEmailPassword(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 15*time.Second) + defer cancel() + + body := new(authtypes.PostableEmailPasswordSession) + if err := binding.JSON.BindBody(req.Body, body); err != nil { + render.Error(rw, err) + return + } + + token, err := handler.module.CreatePasswordAuthNSession(ctx, authtypes.AuthNProviderEmailPassword, body.Email, body.Password, body.OrgID) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusOK, authtypes.NewGettableTokenFromToken(token, handler.module.GetRotationInterval(ctx))) +} + +func (handler *handler) CreateSessionByGoogleCallback(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 15*time.Second) + defer cancel() + + values := req.URL.Query() + + redirectURL, err := handler.module.CreateCallbackAuthNSession(ctx, authtypes.AuthNProviderGoogleAuth, values) + if err != nil { + http.Redirect(rw, req, handler.getRedirectURLFromErr(err), http.StatusSeeOther) + return + } + + http.Redirect(rw, req, redirectURL, http.StatusSeeOther) +} + +func (handler *handler) CreateSessionBySAMLCallback(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 15*time.Second) + defer cancel() + + err := req.ParseForm() + if err != nil { + render.Error(rw, err) + return + } + + redirectURL, err := handler.module.CreateCallbackAuthNSession(ctx, authtypes.AuthNProviderSAML, req.Form) + if err != nil { + http.Redirect(rw, req, handler.getRedirectURLFromErr(err), http.StatusSeeOther) + return + } + + http.Redirect(rw, req, redirectURL, http.StatusSeeOther) +} + +func (handler *handler) CreateSessionByOIDCCallback(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 15*time.Second) + defer cancel() + + values := req.URL.Query() + redirectURL, err := handler.module.CreateCallbackAuthNSession(ctx, authtypes.AuthNProviderOIDC, values) + if err != nil { + http.Redirect(rw, req, handler.getRedirectURLFromErr(err), http.StatusSeeOther) + return + } + + http.Redirect(rw, req, redirectURL, http.StatusSeeOther) +} + +func (handler *handler) RotateSession(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 10*time.Second) + defer cancel() + + body := new(authtypes.PostableRotateToken) + if err := binding.JSON.BindBody(req.Body, body); err != nil { + render.Error(rw, err) + return + } + + accessToken, err := authtypes.AccessTokenFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + token, err := handler.module.RotateSession(ctx, accessToken, body.RefreshToken) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusOK, authtypes.NewGettableTokenFromToken(token, handler.module.GetRotationInterval(ctx))) +} + +func (handler *handler) DeleteSession(rw http.ResponseWriter, req *http.Request) { + ctx, cancel := context.WithTimeout(req.Context(), 10*time.Second) + defer cancel() + + accessToken, err := authtypes.AccessTokenFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + err = handler.module.DeleteSession(ctx, accessToken) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusNoContent, nil) +} + +func (*handler) getRedirectURLFromErr(err error) string { + values := errors.AsURLValues(err) + values.Add("callbackauthnerr", "true") + + return (&url.URL{ + // When UI is being served on a prefix, we need to redirect to the login page on the prefix. + Path: "/login", + RawQuery: values.Encode(), + }).String() +} diff --git a/pkg/modules/session/implsession/module.go b/pkg/modules/session/implsession/module.go new file mode 100644 index 0000000000..c0712b8ed6 --- /dev/null +++ b/pkg/modules/session/implsession/module.go @@ -0,0 +1,233 @@ +package implsession + +import ( + "context" + "net/url" + "slices" + "strings" + "time" + + "github.com/SigNoz/signoz/pkg/authn" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/modules/authdomain" + "github.com/SigNoz/signoz/pkg/modules/organization" + "github.com/SigNoz/signoz/pkg/modules/session" + "github.com/SigNoz/signoz/pkg/modules/user" + "github.com/SigNoz/signoz/pkg/tokenizer" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type module struct { + settings factory.ScopedProviderSettings + authNs map[authtypes.AuthNProvider]authn.AuthN + user user.Module + userGetter user.Getter + authDomain authdomain.Module + tokenizer tokenizer.Tokenizer + orgGetter organization.Getter +} + +func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.AuthNProvider]authn.AuthN, user user.Module, userGetter user.Getter, authDomain authdomain.Module, tokenizer tokenizer.Tokenizer, orgGetter organization.Getter) session.Module { + return &module{ + settings: factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/session/implsession"), + authNs: authNs, + user: user, + userGetter: userGetter, + authDomain: authDomain, + tokenizer: tokenizer, + orgGetter: orgGetter, + } +} + +func (module *module) GetSessionContext(ctx context.Context, email valuer.Email, siteURL *url.URL) (*authtypes.SessionContext, error) { + context := authtypes.NewSessionContext() + + orgs, err := module.orgGetter.ListByOwnedKeyRange(ctx) + if err != nil { + return nil, err + } + + if len(orgs) == 0 { + context.Exists = false + return context, nil + } + + var orgIDs []valuer.UUID + for _, org := range orgs { + orgIDs = append(orgIDs, org.ID) + } + + users, err := module.userGetter.ListUsersByEmailAndOrgIDs(ctx, email, orgIDs) + if err != nil { + return nil, err + } + + // Since email is a valuer, we can be sure that it is a valid email and we can split it to get the domain name. + name := strings.Split(email.String(), "@")[1] + + if len(users) == 0 { + context.Exists = false + + for _, org := range orgs { + orgContext, err := module.getOrgSessionContext(ctx, org, name, siteURL) + if err != nil { + // For some reason, there was an error in getting the org session context. Instead of failing the context call, we create a PasswordAuthNSupport for the org and add a warning. + orgContext = authtypes.NewOrgSessionContext(org.ID, org.Name).AddPasswordAuthNSupport(authtypes.AuthNProviderEmailPassword).AddWarning(err) + } + + context = context.AddOrgContext(orgContext) + } + + return context, nil + } + + context.Exists = true + for _, user := range users { + idx := slices.IndexFunc(orgs, func(org *types.Organization) bool { + return org.ID == user.OrgID + }) + + if idx == -1 { + continue + } + + org := orgs[idx] + orgContext, err := module.getOrgSessionContext(ctx, org, name, siteURL) + if err != nil { + // For some reason, there was an error in getting the org session context. Instead of failing the context call, we create a PasswordAuthNSupport for the org and add a warning. + orgContext = authtypes.NewOrgSessionContext(org.ID, org.Name).AddPasswordAuthNSupport(authtypes.AuthNProviderEmailPassword).AddWarning(err) + } + + context = context.AddOrgContext(orgContext) + } + + return context, nil +} + +func (module *module) DeprecatedCreateSessionByEmailPassword(ctx context.Context, email valuer.Email, password string) (*authtypes.Token, error) { + users, err := module.userGetter.GetUsersByEmail(ctx, email) + if err != nil { + return nil, err + } + + if len(users) == 0 { + return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email or password") + } + + factorPassword, err := module.userGetter.GetFactorPasswordByUserID(ctx, users[0].ID) + if err != nil { + return nil, err + } + + if !factorPassword.Equals(password) { + return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email orpassword") + } + + identity := authtypes.NewIdentity(users[0].ID, users[0].OrgID, users[0].Email, users[0].Role) + + return module.tokenizer.CreateToken(ctx, identity, map[string]string{}) +} + +func (module *module) CreatePasswordAuthNSession(ctx context.Context, authNProvider authtypes.AuthNProvider, email valuer.Email, password string, orgID valuer.UUID) (*authtypes.Token, error) { + passwordAuthN, err := getProvider[authn.PasswordAuthN](authNProvider, module.authNs) + if err != nil { + return nil, err + } + + identity, err := passwordAuthN.Authenticate(ctx, email.String(), password, orgID) + if err != nil { + return nil, err + } + + return module.tokenizer.CreateToken(ctx, identity, map[string]string{}) +} + +func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvider authtypes.AuthNProvider, values url.Values) (string, error) { + callbackAuthN, err := getProvider[authn.CallbackAuthN](authNProvider, module.authNs) + if err != nil { + return "", err + } + + callbackIdentity, err := callbackAuthN.HandleCallback(ctx, values) + if err != nil { + module.settings.Logger().ErrorContext(ctx, "failed to handle callback", "error", err, "authn_provider", authNProvider) + return "", err + } + + user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, types.RoleViewer, callbackIdentity.OrgID) + if err != nil { + return "", err + } + + user, err = module.user.GetOrCreateUser(ctx, user) + if err != nil { + return "", err + } + + token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{}) + if err != nil { + return "", err + } + + redirectURL := &url.URL{ + Scheme: callbackIdentity.State.URL.Scheme, + Host: callbackIdentity.State.URL.Host, + Path: callbackIdentity.State.URL.Path, + RawQuery: authtypes.NewURLValuesFromToken(token, module.GetRotationInterval(ctx)).Encode(), + } + + return redirectURL.String(), nil +} + +func (module *module) RotateSession(ctx context.Context, accessToken string, refreshToken string) (*authtypes.Token, error) { + return module.tokenizer.RotateToken(ctx, accessToken, refreshToken) +} + +func (module *module) DeleteSession(ctx context.Context, accessToken string) error { + return module.tokenizer.DeleteToken(ctx, accessToken) +} + +func (module *module) GetRotationInterval(context.Context) time.Duration { + return module.tokenizer.Config().Rotation.Interval +} + +func (module *module) getOrgSessionContext(ctx context.Context, org *types.Organization, name string, siteURL *url.URL) (*authtypes.OrgSessionContext, error) { + authDomain, err := module.authDomain.GetByNameAndOrgID(ctx, name, org.ID) + if err != nil && !errors.Ast(err, errors.TypeNotFound) { + return nil, err + } + + if authDomain == nil { + return authtypes.NewOrgSessionContext(org.ID, org.Name).AddPasswordAuthNSupport(authtypes.AuthNProviderEmailPassword), nil + } + + if !authDomain.AuthDomainConfig().SSOEnabled { + return authtypes.NewOrgSessionContext(org.ID, org.Name).AddPasswordAuthNSupport(authtypes.AuthNProviderEmailPassword), nil + } + + provider, err := getProvider[authn.CallbackAuthN](authDomain.AuthDomainConfig().AuthNProvider, module.authNs) + if err != nil { + return nil, err + } + + loginURL, err := provider.LoginURL(ctx, siteURL, authDomain) + if err != nil { + return nil, err + } + + return authtypes.NewOrgSessionContext(org.ID, org.Name).AddCallbackAuthNSupport(authDomain.AuthDomainConfig().AuthNProvider, loginURL), nil +} + +func getProvider[T authn.AuthN](authNProvider authtypes.AuthNProvider, authNs map[authtypes.AuthNProvider]authn.AuthN) (T, error) { + var provider T + + provider, ok := authNs[authNProvider].(T) + if !ok { + return provider, errors.New(errors.TypeNotFound, errors.CodeNotFound, "authn provider not found") + } + + return provider, nil +} diff --git a/pkg/modules/session/session.go b/pkg/modules/session/session.go new file mode 100644 index 0000000000..3b3dc40872 --- /dev/null +++ b/pkg/modules/session/session.go @@ -0,0 +1,60 @@ +package session + +import ( + "context" + "net/http" + "net/url" + "time" + + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type Module interface { + // This is soon be removed. + DeprecatedCreateSessionByEmailPassword(ctx context.Context, email valuer.Email, password string) (*authtypes.Token, error) + + // Gets the session context for the user. The context contains information on what the user has to do in order to create a session. + GetSessionContext(ctx context.Context, email valuer.Email, siteURL *url.URL) (*authtypes.SessionContext, error) + + // Create a session for a user using password authn provider. + CreatePasswordAuthNSession(ctx context.Context, authNProvider authtypes.AuthNProvider, email valuer.Email, password string, orgID valuer.UUID) (*authtypes.Token, error) + + // Create a session for a user using callback authn providers. + CreateCallbackAuthNSession(ctx context.Context, authNProvider authtypes.AuthNProvider, values url.Values) (string, error) + + // Rotate a token. + RotateSession(ctx context.Context, accessToken string, refreshToken string) (*authtypes.Token, error) + + // Delete a session. + DeleteSession(ctx context.Context, accessToken string) error + + // Get the rotation interval for the session. + GetRotationInterval(ctx context.Context) time.Duration +} + +type Handler interface { + // Get the session context for the user. + GetSessionContext(http.ResponseWriter, *http.Request) + + // Create a session for a user using email and password. + DeprecatedCreateSessionByEmailPassword(http.ResponseWriter, *http.Request) + + // Create a session for a user using email and password. + CreateSessionByEmailPassword(http.ResponseWriter, *http.Request) + + // Create a session for a user using google callback. + CreateSessionByGoogleCallback(http.ResponseWriter, *http.Request) + + // Create a session for a user using saml callback. + CreateSessionBySAMLCallback(http.ResponseWriter, *http.Request) + + // Create a session for a user using oidc callback. + CreateSessionByOIDCCallback(http.ResponseWriter, *http.Request) + + // Rotate a token. + RotateSession(http.ResponseWriter, *http.Request) + + // Delete a session. + DeleteSession(http.ResponseWriter, *http.Request) +} diff --git a/pkg/modules/user/impluser/getter.go b/pkg/modules/user/impluser/getter.go index d91e946e8b..eb3d8d7446 100644 --- a/pkg/modules/user/impluser/getter.go +++ b/pkg/modules/user/impluser/getter.go @@ -17,15 +17,64 @@ func NewGetter(store types.UserStore) user.Getter { } func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) { - gettableUsers, err := module.store.ListUsers(ctx, orgID.StringValue()) + users, err := module.store.ListUsersByOrgID(ctx, orgID) if err != nil { return nil, err } - users := make([]*types.User, len(gettableUsers)) - for i, user := range gettableUsers { - users[i] = &user.User + return users, nil +} + +func (module *getter) GetUsersByEmail(ctx context.Context, email valuer.Email) ([]*types.User, error) { + users, err := module.store.GetUsersByEmail(ctx, email) + if err != nil { + return nil, err } return users, nil } + +func (module *getter) GetByOrgIDAndID(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*types.User, error) { + user, err := module.store.GetByOrgIDAndID(ctx, orgID, id) + if err != nil { + return nil, err + } + + return user, nil +} + +func (module *getter) Get(ctx context.Context, id valuer.UUID) (*types.User, error) { + user, err := module.store.GetUser(ctx, id) + if err != nil { + return nil, err + } + + return user, nil +} + +func (module *getter) ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*types.User, error) { + users, err := module.store.ListUsersByEmailAndOrgIDs(ctx, email, orgIDs) + if err != nil { + return nil, err + } + + return users, nil +} + +func (module *getter) CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error) { + count, err := module.store.CountByOrgID(ctx, orgID) + if err != nil { + return 0, err + } + + return count, nil +} + +func (module *getter) GetFactorPasswordByUserID(ctx context.Context, userID valuer.UUID) (*types.FactorPassword, error) { + factorPassword, err := module.store.GetPasswordByUserID(ctx, userID) + if err != nil { + return nil, err + } + + return factorPassword, nil +} diff --git a/pkg/modules/user/impluser/handler.go b/pkg/modules/user/impluser/handler.go index e3b995f66e..63e9d44baf 100644 --- a/pkg/modules/user/impluser/handler.go +++ b/pkg/modules/user/impluser/handler.go @@ -14,16 +14,16 @@ import ( "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/valuer" - "github.com/google/uuid" "github.com/gorilla/mux" ) type handler struct { module root.Module + getter root.Getter } -func NewHandler(module root.Module) root.Handler { - return &handler{module: module} +func NewHandler(module root.Module, getter root.Getter) root.Handler { + return &handler{module: module, getter: getter} } func (h *handler) AcceptInvite(w http.ResponseWriter, r *http.Request) { @@ -36,72 +36,13 @@ func (h *handler) AcceptInvite(w http.ResponseWriter, r *http.Request) { return } - // get invite object - invite, err := h.module.GetInviteByToken(ctx, req.InviteToken) + user, err := h.module.AcceptInvite(ctx, req.InviteToken, req.Password) if err != nil { render.Error(w, err) return } - orgDomain, err := h.module.GetAuthDomainByEmail(ctx, invite.Email) - if err != nil && !errors.Ast(err, errors.TypeNotFound) { - render.Error(w, err) - return - } - - precheckResp := &types.GettableLoginPrecheck{ - SSO: false, - IsUser: false, - } - - if invite.Name == "" && req.DisplayName != "" { - invite.Name = req.DisplayName - } - - user, err := types.NewUser(invite.Name, invite.Email, invite.Role, invite.OrgID) - if err != nil { - render.Error(w, err) - return - } - - if orgDomain != nil && orgDomain.SsoEnabled { - // sso is enabled, create user and respond precheck data - err = h.module.CreateUser(ctx, user) - if err != nil { - render.Error(w, err) - return - } - - // check if sso is enforced for the org - precheckResp, err = h.module.LoginPrecheck(ctx, invite.OrgID, user.Email, req.SourceURL) - if err != nil { - render.Error(w, err) - return - } - - } else { - password, err := types.NewFactorPassword(req.Password, user.ID.StringValue()) - if err != nil { - render.Error(w, err) - return - } - - err = h.module.CreateUser(ctx, user, root.WithFactorPassword(password)) - if err != nil { - render.Error(w, err) - return - } - - precheckResp.IsUser = true - } - - // delete the invite - if err := h.module.DeleteInvite(ctx, invite.OrgID, invite.ID); err != nil { - render.Error(w, err) - return - } - - render.Success(w, http.StatusOK, precheckResp) + render.Success(w, http.StatusCreated, user) } func (h *handler) CreateInvite(rw http.ResponseWriter, r *http.Request) { @@ -120,7 +61,7 @@ func (h *handler) CreateInvite(rw http.ResponseWriter, r *http.Request) { return } - invites, err := h.module.CreateBulkInvite(ctx, claims.OrgID, claims.UserID, &types.PostableBulkInviteRequest{ + invites, err := h.module.CreateBulkInvite(ctx, valuer.MustNewUUID(claims.OrgID), valuer.MustNewUUID(claims.UserID), &types.PostableBulkInviteRequest{ Invites: []types.PostableInvite{req}, }) if err != nil { @@ -153,7 +94,7 @@ func (h *handler) CreateBulkInvite(rw http.ResponseWriter, r *http.Request) { return } - _, err = h.module.CreateBulkInvite(ctx, claims.OrgID, claims.UserID, &req) + _, err = h.module.CreateBulkInvite(ctx, valuer.MustNewUUID(claims.OrgID), valuer.MustNewUUID(claims.UserID), &req) if err != nil { render.Error(rw, err) return @@ -167,26 +108,13 @@ func (h *handler) GetInvite(w http.ResponseWriter, r *http.Request) { defer cancel() token := mux.Vars(r)["token"] - sourceUrl := r.URL.Query().Get("ref") invite, err := h.module.GetInviteByToken(ctx, token) if err != nil { render.Error(w, err) return } - // precheck the user - precheckResp, err := h.module.LoginPrecheck(ctx, invite.OrgID, invite.Email, sourceUrl) - if err != nil { - render.Error(w, err) - return - } - - gettableInvite := &types.GettableEEInvite{ - GettableInvite: *invite, - PreCheck: precheckResp, - } - - render.Success(w, http.StatusOK, gettableInvite) + render.Success(w, http.StatusOK, invite) } func (h *handler) ListInvite(w http.ResponseWriter, r *http.Request) { @@ -232,6 +160,7 @@ func (h *handler) DeleteInvite(w http.ResponseWriter, r *http.Request) { } render.Success(w, http.StatusNoContent, nil) } + func (h *handler) GetUser(w http.ResponseWriter, r *http.Request) { ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) defer cancel() @@ -244,7 +173,26 @@ func (h *handler) GetUser(w http.ResponseWriter, r *http.Request) { return } - user, err := h.module.GetUserByID(ctx, claims.OrgID, id) + user, err := h.getter.GetByOrgIDAndID(ctx, valuer.MustNewUUID(claims.OrgID), valuer.MustNewUUID(id)) + if err != nil { + render.Error(w, err) + return + } + + render.Success(w, http.StatusOK, user) +} + +func (h *handler) GetMyUser(w http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(w, err) + return + } + + user, err := h.getter.GetByOrgIDAndID(ctx, valuer.MustNewUUID(claims.OrgID), valuer.MustNewUUID(claims.UserID)) if err != nil { render.Error(w, err) return @@ -263,7 +211,7 @@ func (h *handler) ListUsers(w http.ResponseWriter, r *http.Request) { return } - users, err := h.module.ListUsers(ctx, claims.OrgID) + users, err := h.getter.ListByOrgID(ctx, valuer.MustNewUUID(claims.OrgID)) if err != nil { render.Error(w, err) return @@ -290,7 +238,7 @@ func (h *handler) UpdateUser(w http.ResponseWriter, r *http.Request) { return } - updatedUser, err := h.module.UpdateUser(ctx, claims.OrgID, id, &user, claims.UserID) + updatedUser, err := h.module.UpdateUser(ctx, valuer.MustNewUUID(claims.OrgID), id, &user, claims.UserID) if err != nil { render.Error(w, err) return @@ -311,7 +259,7 @@ func (h *handler) DeleteUser(w http.ResponseWriter, r *http.Request) { return } - if err := h.module.DeleteUser(ctx, claims.OrgID, id, claims.UserID); err != nil { + if err := h.module.DeleteUser(ctx, valuer.MustNewUUID(claims.OrgID), id, claims.UserID); err != nil { render.Error(w, err) return } @@ -319,23 +267,6 @@ func (h *handler) DeleteUser(w http.ResponseWriter, r *http.Request) { render.Success(w, http.StatusNoContent, nil) } -func (h *handler) LoginPrecheck(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - email := r.URL.Query().Get("email") - sourceUrl := r.URL.Query().Get("ref") - orgID := r.URL.Query().Get("orgID") - - resp, err := h.module.LoginPrecheck(ctx, orgID, email, sourceUrl) - if err != nil { - render.Error(w, err) - return - } - - render.Success(w, http.StatusOK, resp) -} - func (handler *handler) GetResetPasswordToken(w http.ResponseWriter, r *http.Request) { ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) defer cancel() @@ -348,8 +279,7 @@ func (handler *handler) GetResetPasswordToken(w http.ResponseWriter, r *http.Req return } - // check if the id lies in the same org as the claims - user, err := handler.module.GetUserByID(ctx, claims.OrgID, id) + user, err := handler.getter.GetByOrgIDAndID(ctx, valuer.MustNewUUID(claims.OrgID), valuer.MustNewUUID(id)) if err != nil { render.Error(w, err) return @@ -402,64 +332,6 @@ func (handler *handler) ChangePassword(w http.ResponseWriter, r *http.Request) { render.Success(w, http.StatusNoContent, nil) } -func (h *handler) Login(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - var req types.PostableLoginRequest - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - render.Error(w, err) - return - } - - if req.RefreshToken == "" { - _, err := h.module.CanUsePassword(ctx, req.Email) - if err != nil { - render.Error(w, err) - return - } - } - - user, err := h.module.GetAuthenticatedUser(ctx, req.OrgID, req.Email, req.Password, req.RefreshToken) - if err != nil { - render.Error(w, err) - return - } - - jwt, err := h.module.GetJWTForUser(ctx, user) - if err != nil { - render.Error(w, err) - return - } - - gettableLoginResponse := &types.GettableLoginResponse{ - GettableUserJwt: jwt, - UserID: user.ID.String(), - } - - render.Success(w, http.StatusOK, gettableLoginResponse) -} - -func (h *handler) GetCurrentUserFromJWT(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - claims, err := authtypes.ClaimsFromContext(ctx) - if err != nil { - render.Error(w, err) - return - } - - user, err := h.module.GetUserByID(ctx, claims.OrgID, claims.UserID) - if err != nil { - render.Error(w, err) - return - } - - render.Success(w, http.StatusOK, user) - -} - func (h *handler) CreateAPIKey(w http.ResponseWriter, r *http.Request) { ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) defer cancel() @@ -470,18 +342,6 @@ func (h *handler) CreateAPIKey(w http.ResponseWriter, r *http.Request) { return } - orgID, err := valuer.NewUUID(claims.OrgID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) - return - } - - userID, err := valuer.NewUUID(claims.UserID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7")) - return - } - req := new(types.PostableAPIKey) if err := json.NewDecoder(r.Body).Decode(req); err != nil { render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode api key")) @@ -490,7 +350,7 @@ func (h *handler) CreateAPIKey(w http.ResponseWriter, r *http.Request) { apiKey, err := types.NewStorableAPIKey( req.Name, - userID, + valuer.MustNewUUID(claims.UserID), req.Role, req.ExpiresInDays, ) @@ -505,7 +365,7 @@ func (h *handler) CreateAPIKey(w http.ResponseWriter, r *http.Request) { return } - createdApiKey, err := h.module.GetAPIKey(ctx, orgID, apiKey.ID) + createdApiKey, err := h.module.GetAPIKey(ctx, valuer.MustNewUUID(claims.OrgID), apiKey.ID) if err != nil { render.Error(w, err) return @@ -525,13 +385,7 @@ func (h *handler) ListAPIKeys(w http.ResponseWriter, r *http.Request) { return } - orgID, err := valuer.NewUUID(claims.OrgID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) - return - } - - apiKeys, err := h.module.ListAPIKeys(ctx, orgID) + apiKeys, err := h.module.ListAPIKeys(ctx, valuer.MustNewUUID(claims.OrgID)) if err != nil { render.Error(w, err) return @@ -562,18 +416,6 @@ func (h *handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) { return } - orgID, err := valuer.NewUUID(claims.OrgID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) - return - } - - userID, err := valuer.NewUUID(claims.UserID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7")) - return - } - req := types.StorableAPIKey{} if err := json.NewDecoder(r.Body).Decode(&req); err != nil { render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode api key")) @@ -588,25 +430,25 @@ func (h *handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) { } //get the API Key - existingAPIKey, err := h.module.GetAPIKey(ctx, orgID, id) + existingAPIKey, err := h.module.GetAPIKey(ctx, valuer.MustNewUUID(claims.OrgID), id) if err != nil { render.Error(w, err) return } // get the user - createdByUser, err := h.module.GetUserByID(ctx, orgID.String(), existingAPIKey.UserID.String()) + createdByUser, err := h.getter.Get(ctx, existingAPIKey.UserID) if err != nil { render.Error(w, err) return } - if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) { + if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) { render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked")) return } - err = h.module.UpdateAPIKey(ctx, id, &req, userID) + err = h.module.UpdateAPIKey(ctx, id, &req, valuer.MustNewUUID(claims.UserID)) if err != nil { render.Error(w, err) return @@ -632,141 +474,29 @@ func (h *handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) { return } - orgID, err := valuer.NewUUID(claims.OrgID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) - return - } - - userID, err := valuer.NewUUID(claims.UserID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7")) - return - } - //get the API Key - existingAPIKey, err := h.module.GetAPIKey(ctx, orgID, id) + existingAPIKey, err := h.module.GetAPIKey(ctx, valuer.MustNewUUID(claims.OrgID), id) if err != nil { render.Error(w, err) return } // get the user - createdByUser, err := h.module.GetUserByID(ctx, orgID.String(), existingAPIKey.UserID.String()) + createdByUser, err := h.getter.Get(ctx, existingAPIKey.UserID) if err != nil { render.Error(w, err) return } - if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) { + if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) { render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked")) return } - if err := h.module.RevokeAPIKey(ctx, id, userID); err != nil { + if err := h.module.RevokeAPIKey(ctx, id, valuer.MustNewUUID(claims.UserID)); err != nil { render.Error(w, err) return } render.Success(w, http.StatusNoContent, nil) } - -func (h *handler) CreateDomain(rw http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - req := types.GettableOrgDomain{} - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - render.Error(rw, err) - return - } - - if err := req.ValidNew(); err != nil { - render.Error(rw, err) - return - } - - err := h.module.CreateDomain(ctx, &req) - if err != nil { - render.Error(rw, err) - return - } - - render.Success(rw, http.StatusAccepted, req) -} - -func (h *handler) DeleteDomain(rw http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - domainIdStr := mux.Vars(r)["id"] - domainId, err := uuid.Parse(domainIdStr) - if err != nil { - render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid domain id")) - return - } - - err = h.module.DeleteDomain(ctx, domainId) - if err != nil { - render.Error(rw, err) - return - } - - render.Success(rw, http.StatusNoContent, nil) -} - -func (h *handler) ListDomains(rw http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - claims, err := authtypes.ClaimsFromContext(ctx) - if err != nil { - render.Error(rw, err) - return - } - - orgID, err := valuer.NewUUID(claims.OrgID) - if err != nil { - render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid")) - return - } - - domains, err := h.module.ListDomains(r.Context(), orgID) - if err != nil { - render.Error(rw, err) - return - } - - render.Success(rw, http.StatusOK, domains) -} - -func (h *handler) UpdateDomain(rw http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - domainIdStr := mux.Vars(r)["id"] - domainId, err := uuid.Parse(domainIdStr) - if err != nil { - render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid domain id")) - return - } - - req := types.GettableOrgDomain{StorableOrgDomain: types.StorableOrgDomain{ID: domainId}} - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - render.Error(rw, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "unable to unmarshal the payload")) - return - } - - req.ID = domainId - if err := req.Valid(nil); err != nil { - render.Error(rw, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request")) - } - - err = h.module.UpdateDomain(ctx, &req) - if err != nil { - render.Error(rw, err) - return - } - - render.Success(rw, http.StatusNoContent, nil) -} diff --git a/pkg/modules/user/impluser/module.go b/pkg/modules/user/impluser/module.go index dd7a782ebc..b83a4e136b 100644 --- a/pkg/modules/user/impluser/module.go +++ b/pkg/modules/user/impluser/module.go @@ -3,7 +3,6 @@ package impluser import ( "context" "fmt" - "net/url" "slices" "strings" "time" @@ -14,19 +13,17 @@ import ( "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/modules/organization" root "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/query-service/constants" + "github.com/SigNoz/signoz/pkg/tokenizer" "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/emailtypes" "github.com/SigNoz/signoz/pkg/valuer" - "github.com/google/uuid" "golang.org/x/text/cases" "golang.org/x/text/language" ) type Module struct { store types.UserStore - jwt *authtypes.JWT + tokenizer tokenizer.Tokenizer emailing emailing.Emailing settings factory.ScopedProviderSettings orgSetter organization.Setter @@ -34,11 +31,11 @@ type Module struct { } // This module is a WIP, don't take inspiration from this. -func NewModule(store types.UserStore, jwt *authtypes.JWT, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, analytics analytics.Analytics) root.Module { +func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, analytics analytics.Analytics) root.Module { settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser") return &Module{ store: store, - jwt: jwt, + tokenizer: tokenizer, emailing: emailing, settings: settings, orgSetter: orgSetter, @@ -46,9 +43,46 @@ func NewModule(store types.UserStore, jwt *authtypes.JWT, emailing emailing.Emai } } +func (m *Module) AcceptInvite(ctx context.Context, token string, password string) (*types.User, error) { + invite, err := m.store.GetInviteByToken(ctx, token) + if err != nil { + return nil, err + } + + user, err := types.NewUser(invite.Name, invite.Email, invite.Role, invite.OrgID) + if err != nil { + return nil, err + } + + factorPassword, err := types.NewFactorPassword(password, user.ID.StringValue()) + if err != nil { + return nil, err + } + + err = m.CreateUser(ctx, user, root.WithFactorPassword(factorPassword)) + if err != nil { + return nil, err + } + + if err := m.DeleteInvite(ctx, invite.OrgID.String(), invite.ID); err != nil { + return nil, err + } + + return user, nil +} + +func (m *Module) GetInviteByToken(ctx context.Context, token string) (*types.Invite, error) { + invite, err := m.store.GetInviteByToken(ctx, token) + if err != nil { + return nil, err + } + + return invite, nil +} + // CreateBulk implements invite.Module. -func (m *Module) CreateBulkInvite(ctx context.Context, orgID, userID string, bulkInvites *types.PostableBulkInviteRequest) ([]*types.Invite, error) { - creator, err := m.GetUserByID(ctx, orgID, userID) +func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, bulkInvites *types.PostableBulkInviteRequest) ([]*types.Invite, error) { + creator, err := m.store.GetUser(ctx, userID) if err != nil { return nil, err } @@ -57,16 +91,17 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID, userID string, bul for _, invite := range bulkInvites.Invites { // check if user exists - existingUser, err := m.GetUserByEmailInOrg(ctx, orgID, invite.Email) + existingUser, err := m.store.GetUserByEmailAndOrgID(ctx, invite.Email, orgID) if err != nil && !errors.Ast(err, errors.TypeNotFound) { return nil, err } + if existingUser != nil { return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "User already exists with the same email") } // Check if an invite already exists - existingInvite, err := m.GetInviteByEmailInOrg(ctx, orgID, invite.Email) + existingInvite, err := m.store.GetInviteByEmailAndOrgID(ctx, invite.Email, orgID) if err != nil && !errors.Ast(err, errors.TypeNotFound) { return nil, err } @@ -79,10 +114,11 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID, userID string, bul return nil, err } - newInvite, err := types.NewInvite(orgID, role.String(), invite.Name, invite.Email) + newInvite, err := types.NewInvite(invite.Name, role, orgID, invite.Email) if err != nil { return nil, err } + newInvite.InviteLink = fmt.Sprintf("%s/signup?token=%s", invite.FrontendBaseUrl, newInvite.Token) invites = append(invites, newInvite) } @@ -93,7 +129,7 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID, userID string, bul } for i := 0; i < len(invites); i++ { - m.analytics.TrackUser(ctx, orgID, creator.ID.String(), "Invite Sent", map[string]any{"invitee_email": invites[i].Email, "invitee_role": invites[i].Role}) + m.analytics.TrackUser(ctx, orgID.String(), creator.ID.String(), "Invite Sent", map[string]any{"invitee_email": invites[i].Email, "invitee_role": invites[i].Role}) // if the frontend base url is not provided, we don't send the email if bulkInvites.Invites[i].FrontendBaseUrl == "" { @@ -101,7 +137,7 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID, userID string, bul continue } - if err := m.emailing.SendHTML(ctx, invites[i].Email, "You are invited to join a team in SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{ + if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You are invited to join a team in SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{ "CustomerName": invites[i].Name, "InviterName": creator.DisplayName, "InviterEmail": creator.Email, @@ -123,14 +159,6 @@ func (m *Module) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) return m.store.DeleteInvite(ctx, orgID, id) } -func (m *Module) GetInviteByToken(ctx context.Context, token string) (*types.GettableInvite, error) { - return m.store.GetInviteByToken(ctx, token) -} - -func (m *Module) GetInviteByEmailInOrg(ctx context.Context, orgID string, email string) (*types.Invite, error) { - return m.store.GetInviteByEmailInOrg(ctx, orgID, email) -} - func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error { createUserOpts := root.NewCreateUserOptions(opts...) @@ -151,39 +179,19 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts .. } traitsOrProperties := types.NewTraitsFromUser(input) - module.analytics.IdentifyUser(ctx, input.OrgID, input.ID.String(), traitsOrProperties) - module.analytics.TrackUser(ctx, input.OrgID, input.ID.String(), "User Created", traitsOrProperties) + module.analytics.IdentifyUser(ctx, input.OrgID.String(), input.ID.String(), traitsOrProperties) + module.analytics.TrackUser(ctx, input.OrgID.String(), input.ID.String(), "User Created", traitsOrProperties) return nil } -func (m *Module) GetUserByID(ctx context.Context, orgID string, id string) (*types.GettableUser, error) { - return m.store.GetUserByID(ctx, orgID, id) -} - -func (m *Module) GetUserByEmailInOrg(ctx context.Context, orgID string, email string) (*types.GettableUser, error) { - return m.store.GetUserByEmailInOrg(ctx, orgID, email) -} - -func (m *Module) GetUsersByEmail(ctx context.Context, email string) ([]*types.GettableUser, error) { - return m.store.GetUsersByEmail(ctx, email) -} - -func (m *Module) GetUsersByRoleInOrg(ctx context.Context, orgID string, role types.Role) ([]*types.GettableUser, error) { - return m.store.GetUsersByRoleInOrg(ctx, orgID, role) -} - -func (m *Module) ListUsers(ctx context.Context, orgID string) ([]*types.GettableUser, error) { - return m.store.ListUsers(ctx, orgID) -} - -func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user *types.User, updatedBy string) (*types.User, error) { - existingUser, err := m.GetUserByID(ctx, orgID, id) +func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User, updatedBy string) (*types.User, error) { + existingUser, err := m.store.GetUser(ctx, valuer.MustNewUUID(id)) if err != nil { return nil, err } - requestor, err := m.GetUserByID(ctx, orgID, updatedBy) + requestor, err := m.store.GetUser(ctx, valuer.MustNewUUID(updatedBy)) if err != nil { return nil, err } @@ -197,14 +205,14 @@ func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user * user.Role = existingUser.Role } - if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin.String() { + if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin { return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles") } // Make sure that th e request is not demoting the last admin user. // also an admin user can only change role of their own or other user - if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin.String() { - adminUsers, err := m.GetUsersByRoleInOrg(ctx, orgID, types.RoleAdmin) + if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin { + adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID) if err != nil { return nil, err } @@ -222,51 +230,55 @@ func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user * } traits := types.NewTraitsFromUser(updatedUser) - m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traits) + m.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits) traits["updated_by"] = updatedBy - m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Updated", traits) + m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits) // if the role is updated then send an email if existingUser.Role != updatedUser.Role { - if err := m.emailing.SendHTML(ctx, existingUser.Email, "Your Role Has Been Updated in SigNoz", emailtypes.TemplateNameUpdateRole, map[string]any{ + if err := m.emailing.SendHTML(ctx, existingUser.Email.String(), "Your Role Has Been Updated in SigNoz", emailtypes.TemplateNameUpdateRole, map[string]any{ "CustomerName": existingUser.DisplayName, "UpdatedByEmail": requestor.Email, - "OldRole": cases.Title(language.English).String(strings.ToLower(existingUser.Role)), - "NewRole": cases.Title(language.English).String(strings.ToLower(updatedUser.Role)), + "OldRole": cases.Title(language.English).String(strings.ToLower(existingUser.Role.String())), + "NewRole": cases.Title(language.English).String(strings.ToLower(updatedUser.Role.String())), }); err != nil { m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err) } } + if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil { + return nil, err + } + return updatedUser, nil } -func (m *Module) DeleteUser(ctx context.Context, orgID string, id string, deletedBy string) error { - user, err := m.store.GetUserByID(ctx, orgID, id) +func (m *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error { + user, err := m.store.GetUser(ctx, valuer.MustNewUUID(id)) if err != nil { return err } - if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email)) { + if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) { return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted") } // don't allow to delete the last admin user - adminUsers, err := m.GetUsersByRoleInOrg(ctx, orgID, types.RoleAdmin) + adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID) if err != nil { return err } - if len(adminUsers) == 1 && user.Role == types.RoleAdmin.String() { + if len(adminUsers) == 1 && user.Role == types.RoleAdmin { return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin") } - if err := m.store.DeleteUser(ctx, orgID, user.ID.StringValue()); err != nil { + if err := m.store.DeleteUser(ctx, orgID.String(), user.ID.StringValue()); err != nil { return err } - m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Deleted", map[string]any{ + m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Deleted", map[string]any{ "deleted_by": deletedBy, }) @@ -343,247 +355,37 @@ func (module *Module) UpdatePassword(ctx context.Context, userID valuer.UUID, ol return err } - return module.store.UpdatePassword(ctx, password) + if err := module.store.UpdatePassword(ctx, password); err != nil { + return err + } + + return module.tokenizer.DeleteTokensByUserID(ctx, userID) } -func (m *Module) GetAuthenticatedUser(ctx context.Context, orgID, email, password, refreshToken string) (*types.User, error) { - if refreshToken != "" { - // parse the refresh token - claims, err := m.jwt.Claims(refreshToken) - if err != nil { +func (module *Module) GetOrCreateUser(ctx context.Context, user *types.User, opts ...root.CreateUserOption) (*types.User, error) { + existingUser, err := module.store.GetUserByEmailAndOrgID(ctx, user.Email, user.OrgID) + if err != nil { + if !errors.Ast(err, errors.TypeNotFound) { return nil, err } - - user, err := m.store.GetUserByID(ctx, claims.OrgID, claims.UserID) - if err != nil { - return nil, err - } - return &user.User, nil } - var dbUser *types.User - // when the orgID is not provided we login if the user exists in just one org - users, err := m.store.GetUsersByEmail(ctx, email) + if existingUser != nil { + return existingUser, nil + } + + newUser, err := types.NewUser(user.DisplayName, user.Email, user.Role, user.OrgID) if err != nil { return nil, err } - if len(users) == 0 { - return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "user with email: %s does not exist", email) - } else if len(users) == 1 { - dbUser = &users[0].User - } else { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "please provide an orgID") - } - - existingPassword, err := m.store.GetPasswordByUserID(ctx, dbUser.ID) + err = module.CreateUser(ctx, newUser, opts...) if err != nil { return nil, err } - if !existingPassword.Equals(password) { - return nil, errors.New(errors.TypeInvalidInput, types.ErrCodeIncorrectPassword, "password is incorrect") - } + return newUser, nil - return dbUser, nil -} - -func (m *Module) LoginPrecheck(ctx context.Context, orgID, email, sourceUrl string) (*types.GettableLoginPrecheck, error) { - // assume user is valid unless proven otherwise and assign default values for rest of the fields - resp := &types.GettableLoginPrecheck{IsUser: true, CanSelfRegister: false, SSO: false, SSOUrl: "", SSOError: ""} - - // check if email is a valid user - users, err := m.GetUsersByEmail(ctx, email) - if err != nil { - return nil, err - } - - if len(users) == 0 { - resp.IsUser = false - } - - if len(users) > 1 { - resp.SelectOrg = true - resp.Orgs = make([]string, len(users)) - for i, user := range users { - resp.Orgs[i] = user.OrgID - } - } - - // TODO(Nitya): in multitenancy this should use orgId as well. - orgDomain, err := m.GetAuthDomainByEmail(ctx, email) - if err != nil && !errors.Ast(err, errors.TypeNotFound) { - return nil, err - } - - if orgDomain != nil && orgDomain.SsoEnabled { - // this is to allow self registration - resp.IsUser = true - - // saml is enabled for this domain, lets prepare sso url - if sourceUrl == "" { - sourceUrl = constants.GetDefaultSiteURL() - } - - // parse source url that generated the login request - var err error - escapedUrl, _ := url.QueryUnescape(sourceUrl) - siteUrl, err := url.Parse(escapedUrl) - if err != nil { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse referer") - } - - // build Idp URL that will authenticat the user - // the front-end will redirect user to this url - resp.SSOUrl, err = orgDomain.BuildSsoUrl(siteUrl) - if err != nil { - m.settings.Logger().ErrorContext(ctx, "failed to prepare saml request for domain", "domain", orgDomain.Name, "error", err) - return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to prepare saml request for domain") - } - - // set SSO to true, as the url is generated correctly - resp.SSO = true - } - - return resp, nil -} - -func (m *Module) GetJWTForUser(ctx context.Context, user *types.User) (types.GettableUserJwt, error) { - role, err := types.NewRole(user.Role) - if err != nil { - return types.GettableUserJwt{}, err - } - - accessJwt, accessClaims, err := m.jwt.AccessToken(user.OrgID, user.ID.String(), user.Email, role) - if err != nil { - return types.GettableUserJwt{}, err - } - - refreshJwt, refreshClaims, err := m.jwt.RefreshToken(user.OrgID, user.ID.String(), user.Email, role) - if err != nil { - return types.GettableUserJwt{}, err - } - - return types.GettableUserJwt{ - AccessJwt: accessJwt, - RefreshJwt: refreshJwt, - AccessJwtExpiry: accessClaims.ExpiresAt.Unix(), - RefreshJwtExpiry: refreshClaims.ExpiresAt.Unix(), - }, nil -} - -func (m *Module) CreateUserForSAMLRequest(ctx context.Context, email string) (*types.User, error) { - // get auth domain from email domain - _, err := m.GetAuthDomainByEmail(ctx, email) - if err != nil && !errors.Ast(err, errors.TypeNotFound) { - return nil, err - } - - // get name from email - parts := strings.Split(email, "@") - if len(parts) < 2 { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format") - } - name := parts[0] - - defaultOrgID, err := m.store.GetDefaultOrgID(ctx) - if err != nil { - return nil, err - } - - user, err := types.NewUser(name, email, types.RoleViewer.String(), defaultOrgID) - if err != nil { - return nil, err - } - - err = m.CreateUser(ctx, user) - if err != nil { - return nil, err - } - - return user, nil - -} - -func (m *Module) PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (string, error) { - users, err := m.GetUsersByEmail(ctx, email) - if err != nil { - m.settings.Logger().ErrorContext(ctx, "failed to get user with email received from auth provider", "error", err) - return "", err - } - user := &types.User{} - - if len(users) == 0 { - newUser, err := m.CreateUserForSAMLRequest(ctx, email) - user = newUser - if err != nil { - m.settings.Logger().ErrorContext(ctx, "failed to create user with email received from auth provider", "error", err) - return "", err - } - } else { - user = &users[0].User - } - - tokenStore, err := m.GetJWTForUser(ctx, user) - if err != nil { - m.settings.Logger().ErrorContext(ctx, "failed to generate token for SSO login user", "error", err) - return "", err - } - - return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s", - redirectUri, - tokenStore.AccessJwt, - user.ID, - tokenStore.RefreshJwt), nil -} - -func (m *Module) CanUsePassword(ctx context.Context, email string) (bool, error) { - domain, err := m.GetAuthDomainByEmail(ctx, email) - if err != nil && !errors.Ast(err, errors.TypeNotFound) { - return false, err - } - - if domain != nil && domain.SsoEnabled { - // sso is enabled, check if the user has admin role - users, err := m.GetUsersByEmail(ctx, email) - if err != nil { - return false, err - } - - if len(users) == 0 { - return false, errors.New(errors.TypeNotFound, errors.CodeNotFound, "user not found") - } - - if users[0].Role != types.RoleAdmin.String() { - return false, errors.New(errors.TypeForbidden, errors.CodeForbidden, "auth method not supported") - } - - } - - return true, nil -} - -func (m *Module) GetAuthDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, error) { - - if email == "" { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required") - } - - components := strings.Split(email, "@") - if len(components) < 2 { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format") - } - - domain, err := m.store.GetDomainByName(ctx, components[1]) - if err != nil { - return nil, err - } - - gettableDomain := &types.GettableOrgDomain{StorableOrgDomain: *domain} - if err := gettableDomain.LoadConfig(domain.Data); err != nil { - return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to load domain config") - } - return gettableDomain, nil } func (m *Module) CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error { @@ -606,28 +408,8 @@ func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UU return m.store.RevokeAPIKey(ctx, id, removedByUserID) } -func (m *Module) GetDomainFromSsoResponse(ctx context.Context, url *url.URL) (*types.GettableOrgDomain, error) { - return m.store.GetDomainFromSsoResponse(ctx, url) -} - -func (m *Module) CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) error { - return m.store.CreateDomain(ctx, domain) -} - -func (m *Module) DeleteDomain(ctx context.Context, id uuid.UUID) error { - return m.store.DeleteDomain(ctx, id) -} - -func (m *Module) ListDomains(ctx context.Context, orgID valuer.UUID) ([]*types.GettableOrgDomain, error) { - return m.store.ListDomains(ctx, orgID) -} - -func (m *Module) UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) error { - return m.store.UpdateDomain(ctx, domain) -} - -func (module *Module) CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email string, passwd string) (*types.User, error) { - user, err := types.NewUser(name, email, types.RoleAdmin.String(), organization.ID.StringValue()) +func (module *Module) CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email valuer.Email, passwd string) (*types.User, error) { + user, err := types.NewUser(name, email, types.RoleAdmin, organization.ID) if err != nil { return nil, err } @@ -656,14 +438,14 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O return user, nil } -func (m *Module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) { +func (module *Module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) { stats := make(map[string]any) - count, err := m.store.CountByOrgID(ctx, orgID) + count, err := module.store.CountByOrgID(ctx, orgID) if err == nil { stats["user.count"] = count } - count, err = m.store.CountAPIKeyByOrgID(ctx, orgID) + count, err = module.store.CountAPIKeyByOrgID(ctx, orgID) if err == nil { stats["factor.api_key.count"] = count } diff --git a/pkg/modules/user/impluser/store.go b/pkg/modules/user/impluser/store.go index 02278787eb..9ba427d5de 100644 --- a/pkg/modules/user/impluser/store.go +++ b/pkg/modules/user/impluser/store.go @@ -3,18 +3,15 @@ package impluser import ( "context" "database/sql" - "encoding/json" - "net/url" "sort" - "strings" "time" "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/valuer" - "github.com/google/uuid" "github.com/uptrace/bun" ) @@ -52,17 +49,18 @@ func (store *store) DeleteInvite(ctx context.Context, orgID string, id valuer.UU return nil } -// GetInviteByEmailInOrg implements types.InviteStore. -func (store *store) GetInviteByEmailInOrg(ctx context.Context, orgID string, email string) (*types.Invite, error) { +func (store *store) GetInviteByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*types.Invite, error) { invite := new(types.Invite) - err := store.sqlstore.BunDB().NewSelect(). + + err := store. + sqlstore. + BunDBCtx(ctx).NewSelect(). Model(invite). Where("email = ?", email). Where("org_id = ?", orgID). Scan(ctx) - if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with email: %s does not exist in org: %s", email, orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with email %s does not exist in org %s", email, orgID) } return invite, nil @@ -70,26 +68,19 @@ func (store *store) GetInviteByEmailInOrg(ctx context.Context, orgID string, ema func (store *store) GetInviteByToken(ctx context.Context, token string) (*types.GettableInvite, error) { invite := new(types.Invite) - err := store.sqlstore.BunDB().NewSelect(). + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). Model(invite). Where("token = ?", token). Scan(ctx) - if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with token: %s does not exist", token) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite does not exist", token) } - orgName, err := store.getOrgNameByID(ctx, invite.OrgID) - if err != nil { - return nil, err - } - - gettableInvite := &types.GettableInvite{ - Invite: *invite, - Organization: orgName, - } - - return gettableInvite, nil + return invite, nil } func (store *store) ListInvite(ctx context.Context, orgID string) ([]*types.Invite, error) { @@ -131,117 +122,94 @@ func (store *store) CreateUser(ctx context.Context, user *types.User) error { return nil } -func (store *store) GetDefaultOrgID(ctx context.Context) (string, error) { - org := new(types.Organization) - err := store.sqlstore.BunDB().NewSelect(). - Model(org). - Limit(1). +func (store *store) GetUsersByEmail(ctx context.Context, email valuer.Email) ([]*types.User, error) { + var users []*types.User + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(&users). + Where("email = ?", email). Scan(ctx) if err != nil { - return "", store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "default org does not exist") + return nil, err } - return org.ID.String(), nil + + return users, nil } -// this is temporary function, we plan to remove this in the next PR. -func (store *store) getOrgNameByID(ctx context.Context, orgID string) (string, error) { - org := new(types.Organization) - err := store.sqlstore.BunDB().NewSelect(). - Model(org). - Where("id = ?", orgID). - Scan(ctx) - if err != nil { - return "", store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "org with id: %s does not exist", orgID) - } - return org.DisplayName, nil -} - -func (store *store) GetUserByID(ctx context.Context, orgID string, id string) (*types.GettableUser, error) { +func (store *store) GetUser(ctx context.Context, id valuer.UUID) (*types.User, error) { user := new(types.User) - err := store.sqlstore.BunDB().NewSelect(). + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(user). + Where("id = ?", id). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id %s does not exist", id) + } + + return user, nil +} + +func (store *store) GetByOrgIDAndID(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*types.User, error) { + user := new(types.User) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). Model(user). Where("org_id = ?", orgID). Where("id = ?", id). Scan(ctx) if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with id: %s does not exist in org: %s", id, orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id %s does not exist", id) } - // remove this in next PR - orgName, err := store.getOrgNameByID(ctx, orgID) - if err != nil { - return nil, err - } - - return &types.GettableUser{User: *user, Organization: orgName}, nil + return user, nil } -func (store *store) GetUserByEmailInOrg(ctx context.Context, orgID string, email string) (*types.GettableUser, error) { +func (store *store) GetUserByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*types.User, error) { user := new(types.User) - err := store.sqlstore.BunDB().NewSelect(). + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). Model(user). Where("org_id = ?", orgID). Where("email = ?", email). Scan(ctx) if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with email: %s does not exist in org: %s", email, orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with email %s does not exist in org %s", email, orgID) } - // remove this in next PR - orgName, err := store.getOrgNameByID(ctx, orgID) - if err != nil { - return nil, err - } - - return &types.GettableUser{User: *user, Organization: orgName}, nil + return user, nil } -func (store *store) GetUsersByEmail(ctx context.Context, email string) ([]*types.GettableUser, error) { - users := new([]*types.User) - err := store.sqlstore.BunDB().NewSelect(). - Model(users). - Where("email = ?", email). - Scan(ctx) - if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with email: %s does not exist", email) - } +func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role, orgID valuer.UUID) ([]*types.User, error) { + var users []*types.User - // remove this in next PR - usersWithOrg := []*types.GettableUser{} - for _, user := range *users { - orgName, err := store.getOrgNameByID(ctx, user.OrgID) - if err != nil { - return nil, err - } - usersWithOrg = append(usersWithOrg, &types.GettableUser{User: *user, Organization: orgName}) - } - return usersWithOrg, nil -} - -func (store *store) GetUsersByRoleInOrg(ctx context.Context, orgID string, role types.Role) ([]*types.GettableUser, error) { - users := new([]*types.User) - err := store.sqlstore.BunDB().NewSelect(). - Model(users). + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(&users). Where("org_id = ?", orgID). Where("role = ?", role). Scan(ctx) - if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with role: %s does not exist in org: %s", role, orgID) - } - - // remove this in next PR - orgName, err := store.getOrgNameByID(ctx, orgID) if err != nil { return nil, err } - usersWithOrg := []*types.GettableUser{} - for _, user := range *users { - usersWithOrg = append(usersWithOrg, &types.GettableUser{User: *user, Organization: orgName}) - } - return usersWithOrg, nil + + return users, nil } -func (store *store) UpdateUser(ctx context.Context, orgID string, id string, user *types.User) (*types.User, error) { +func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User) (*types.User, error) { user.UpdatedAt = time.Now() _, err := store.sqlstore.BunDB().NewUpdate(). Model(user). @@ -252,35 +220,29 @@ func (store *store) UpdateUser(ctx context.Context, orgID string, id string, use Where("org_id = ?", orgID). Exec(ctx) if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with id: %s does not exist in org: %s", id, orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id: %s does not exist in org: %s", id, orgID) } return user, nil } -func (store *store) ListUsers(ctx context.Context, orgID string) ([]*types.GettableUser, error) { +func (store *store) ListUsersByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.GettableUser, error) { users := []*types.User{} - err := store.sqlstore.BunDB().NewSelect(). + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). Model(&users). Where("org_id = ?", orgID). Scan(ctx) - if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "users with org id: %s does not exist", orgID) - } - - // remove this in next PR - orgName, err := store.getOrgNameByID(ctx, orgID) if err != nil { return nil, err } - usersWithOrg := []*types.GettableUser{} - for _, user := range users { - usersWithOrg = append(usersWithOrg, &types.GettableUser{User: *user, Organization: orgName}) - } - return usersWithOrg, nil + + return users, nil } func (store *store) DeleteUser(ctx context.Context, orgID string, id string) error { - tx, err := store.sqlstore.BunDB().BeginTx(ctx, nil) if err != nil { return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to start transaction") @@ -338,6 +300,15 @@ func (store *store) DeleteUser(ctx context.Context, orgID string, id string) err return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete user") } + // delete tokens + _, err = tx.NewDelete(). + Model(new(authtypes.StorableToken)). + Where("user_id = ?", id). + Exec(ctx) + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete tokens") + } + err = tx.Commit() if err != nil { return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to commit transaction") @@ -463,20 +434,6 @@ func (store *store) UpdatePassword(ctx context.Context, factorPassword *types.Fa return nil } -func (store *store) GetDomainByName(ctx context.Context, name string) (*types.StorableOrgDomain, error) { - domain := new(types.StorableOrgDomain) - err := store.sqlstore.BunDB().NewSelect(). - Model(domain). - Where("name = ?", name). - Limit(1). - Scan(ctx) - - if err != nil { - return nil, errors.Wrapf(err, errors.TypeNotFound, errors.CodeNotFound, "failed to get domain from name") - } - return domain, nil -} - // --- API KEY --- func (store *store) CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error { _, err := store.sqlstore.BunDB().NewInsert(). @@ -582,203 +539,6 @@ func (store *store) GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*type return flattenedAPIKeys[0], nil } -// GetDomainFromSsoResponse uses relay state received from IdP to fetch -// user domain. The domain is further used to process validity of the response. -// when sending login request to IdP we send relay state as URL (site url) -// with domainId or domainName as query parameter. -func (store *store) GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error) { - // derive domain id from relay state now - var domainIdStr string - var domainNameStr string - var domain *types.GettableOrgDomain - - for k, v := range relayState.Query() { - if k == "domainId" && len(v) > 0 { - domainIdStr = strings.Replace(v[0], ":", "-", -1) - } - if k == "domainName" && len(v) > 0 { - domainNameStr = v[0] - } - } - - if domainIdStr != "" { - domainId, err := uuid.Parse(domainIdStr) - if err != nil { - return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse domainID from IdP response") - } - - domain, err = store.GetDomain(ctx, domainId) - if err != nil { - return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to find domain from domainID received in IDP response") - } - } - - if domainNameStr != "" { - domainFromDB, err := store.GetGettableDomainByName(ctx, domainNameStr) - domain = domainFromDB - if err != nil { - return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to find domain from domainName received in IDP response") - } - } - if domain != nil { - return domain, nil - } - - return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to find domain received in IDP response") -} - -// GetDomainByName returns org domain for a given domain name -func (store *store) GetGettableDomainByName(ctx context.Context, name string) (*types.GettableOrgDomain, error) { - - stored := types.StorableOrgDomain{} - err := store.sqlstore.BunDB().NewSelect(). - Model(&stored). - Where("name = ?", name). - Limit(1). - Scan(ctx) - if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "domain with name: %s doesn't exist", name) - } - - domain := &types.GettableOrgDomain{StorableOrgDomain: stored} - if err := domain.LoadConfig(stored.Data); err != nil { - return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to load domain config") - } - return domain, nil -} - -// GetDomain returns org domain for a given domain id -func (store *store) GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, error) { - - stored := types.StorableOrgDomain{} - err := store.sqlstore.BunDB().NewSelect(). - Model(&stored). - Where("id = ?", id). - Limit(1). - Scan(ctx) - - if err != nil { - return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "domain with id: %s doesn't exist", id) - } - - domain := &types.GettableOrgDomain{StorableOrgDomain: stored} - if err := domain.LoadConfig(stored.Data); err != nil { - return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to load domain config") - } - return domain, nil -} - -// ListDomains gets the list of auth domains by org id -func (store *store) ListDomains(ctx context.Context, orgId valuer.UUID) ([]*types.GettableOrgDomain, error) { - domains := make([]*types.GettableOrgDomain, 0) - stored := []types.StorableOrgDomain{} - err := store.sqlstore.BunDB().NewSelect(). - Model(&stored). - Where("org_id = ?", orgId). - Scan(ctx) - - if err != nil { - if err == sql.ErrNoRows { - return domains, nil - } - return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to list domains") - } - - for _, s := range stored { - domain := types.GettableOrgDomain{StorableOrgDomain: s} - if err := domain.LoadConfig(s.Data); err != nil { - store.settings.Logger.ErrorContext(ctx, "ListDomains() failed", "error", err) - } - domains = append(domains, &domain) - } - - return domains, nil -} - -// CreateDomain creates a new auth domain -func (store *store) CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) error { - - if domain.ID == uuid.Nil { - domain.ID = uuid.New() - } - - if domain.OrgID == "" || domain.Name == "" { - return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "domain creation failed, missing fields: OrgID, Name") - } - - configJson, err := json.Marshal(domain) - if err != nil { - return errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "domain creation failed") - } - - storableDomain := types.StorableOrgDomain{ - ID: domain.ID, - Name: domain.Name, - OrgID: domain.OrgID, - Data: string(configJson), - TimeAuditable: types.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()}, - } - - _, err = store.sqlstore.BunDB().NewInsert(). - Model(&storableDomain). - Exec(ctx) - if err != nil { - return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "domain creation failed") - } - return nil -} - -// UpdateDomain updates stored config params for a domain -func (store *store) UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) error { - if domain.ID == uuid.Nil { - return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "missing domain id") - } - configJson, err := json.Marshal(domain) - if err != nil { - return errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to update domain") - } - - storableDomain := &types.StorableOrgDomain{ - ID: domain.ID, - Name: domain.Name, - OrgID: domain.OrgID, - Data: string(configJson), - TimeAuditable: types.TimeAuditable{UpdatedAt: time.Now()}, - } - - _, err = store.sqlstore.BunDB().NewUpdate(). - Model(storableDomain). - Column("data", "updated_at"). - WherePK(). - Exec(ctx) - - if err != nil { - return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to update domain") - } - - return nil -} - -// DeleteDomain deletes an org domain -func (store *store) DeleteDomain(ctx context.Context, id uuid.UUID) error { - - if id == uuid.Nil { - return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "missing domain id") - } - - storableDomain := &types.StorableOrgDomain{ID: id} - _, err := store.sqlstore.BunDB().NewDelete(). - Model(storableDomain). - WherePK(). - Exec(ctx) - - if err != nil { - return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete domain") - } - - return nil -} - func (store *store) CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error) { user := new(types.User) @@ -819,3 +579,20 @@ func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) er return cb(ctx) }) } + +func (store *store) ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*types.User, error) { + users := []*types.User{} + err := store. + sqlstore. + BunDB(). + NewSelect(). + Model(&users). + Where("email = ?", email). + Where("org_id IN (?)", bun.In(orgIDs)). + Scan(ctx) + if err != nil { + return nil, err + } + + return users, nil +} diff --git a/pkg/modules/user/option.go b/pkg/modules/user/option.go index ec26694177..3d17146fb9 100644 --- a/pkg/modules/user/option.go +++ b/pkg/modules/user/option.go @@ -1,6 +1,9 @@ package user -import "github.com/SigNoz/signoz/pkg/types" +import ( + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/valuer" +) type createUserOptions struct { FactorPassword *types.FactorPassword @@ -25,3 +28,27 @@ func NewCreateUserOptions(opts ...CreateUserOption) *createUserOptions { return o } + +type authenticateOptions struct { + OrgID valuer.UUID +} + +type AuthenticateOption func(*authenticateOptions) + +func WithOrgID(orgID valuer.UUID) AuthenticateOption { + return func(o *authenticateOptions) { + o.OrgID = orgID + } +} + +func NewAuthenticateOptions(opts ...AuthenticateOption) *authenticateOptions { + o := &authenticateOptions{ + OrgID: valuer.UUID{}, + } + + for _, opt := range opts { + opt(o) + } + + return o +} diff --git a/pkg/modules/user/user.go b/pkg/modules/user/user.go index cc7653068c..29bea96ed5 100644 --- a/pkg/modules/user/user.go +++ b/pkg/modules/user/user.go @@ -3,21 +3,22 @@ package user import ( "context" "net/http" - "net/url" "github.com/SigNoz/signoz/pkg/statsreporter" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/valuer" - "github.com/google/uuid" ) type Module interface { // Creates the organization and the first user of that organization. - CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email string, password string) (*types.User, error) + CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email valuer.Email, password string) (*types.User, error) // Creates a user and sends an analytics event. CreateUser(ctx context.Context, user *types.User, opts ...CreateUserOption) error + // Get or create a user. If a user with the same email and orgID already exists, it returns the existing user. + GetOrCreateUser(ctx context.Context, user *types.User, opts ...CreateUserOption) (*types.User, error) + // Get or Create a reset password token for a user. If the password does not exist, a new one is randomly generated and inserted. The function // is idempotent and can be called multiple times. GetOrCreateResetPasswordToken(ctx context.Context, userID valuer.UUID) (*types.ResetPasswordToken, error) @@ -29,38 +30,15 @@ type Module interface { // Updates password of user to the new password. It also deletes all reset password tokens for the user. UpdatePassword(ctx context.Context, userID valuer.UUID, oldPassword string, password string) error + UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User, updatedBy string) (*types.User, error) + DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error + // invite - CreateBulkInvite(ctx context.Context, orgID, userID string, bulkInvites *types.PostableBulkInviteRequest) ([]*types.Invite, error) + CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, bulkInvites *types.PostableBulkInviteRequest) ([]*types.Invite, error) ListInvite(ctx context.Context, orgID string) ([]*types.Invite, error) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) error - GetInviteByToken(ctx context.Context, token string) (*types.GettableInvite, error) - GetInviteByEmailInOrg(ctx context.Context, orgID string, email string) (*types.Invite, error) - GetUserByID(ctx context.Context, orgID string, id string) (*types.GettableUser, error) - GetUsersByEmail(ctx context.Context, email string) ([]*types.GettableUser, error) // public function - GetUserByEmailInOrg(ctx context.Context, orgID string, email string) (*types.GettableUser, error) - GetUsersByRoleInOrg(ctx context.Context, orgID string, role types.Role) ([]*types.GettableUser, error) - ListUsers(ctx context.Context, orgID string) ([]*types.GettableUser, error) - UpdateUser(ctx context.Context, orgID string, id string, user *types.User, updatedBy string) (*types.User, error) - DeleteUser(ctx context.Context, orgID string, id string, deletedBy string) error - - // login - GetAuthenticatedUser(ctx context.Context, orgID, email, password, refreshToken string) (*types.User, error) - GetJWTForUser(ctx context.Context, user *types.User) (types.GettableUserJwt, error) - CreateUserForSAMLRequest(ctx context.Context, email string) (*types.User, error) - LoginPrecheck(ctx context.Context, orgID, email, sourceUrl string) (*types.GettableLoginPrecheck, error) - - // sso - PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (string, error) - CanUsePassword(ctx context.Context, email string) (bool, error) - - // Auth Domain - GetAuthDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, error) - GetDomainFromSsoResponse(ctx context.Context, url *url.URL) (*types.GettableOrgDomain, error) - - ListDomains(ctx context.Context, orgID valuer.UUID) ([]*types.GettableOrgDomain, error) - CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) error - UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) error - DeleteDomain(ctx context.Context, id uuid.UUID) error + AcceptInvite(ctx context.Context, token string, password string) (*types.User, error) + GetInviteByToken(ctx context.Context, token string) (*types.Invite, error) // API KEY CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error @@ -75,6 +53,24 @@ type Module interface { type Getter interface { // Get gets the users based on the given id ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error) + + // Get users by email. + GetUsersByEmail(context.Context, valuer.Email) ([]*types.User, error) + + // Get user by orgID and id. + GetByOrgIDAndID(context.Context, valuer.UUID, valuer.UUID) (*types.User, error) + + // Get user by id. + Get(context.Context, valuer.UUID) (*types.User, error) + + // List users by email and org ids. + ListUsersByEmailAndOrgIDs(context.Context, valuer.Email, []valuer.UUID) ([]*types.User, error) + + // Count users by org id. + CountByOrgID(context.Context, valuer.UUID) (int64, error) + + // Get factor password by user id. + GetFactorPasswordByUserID(context.Context, valuer.UUID) (*types.FactorPassword, error) } type Handler interface { @@ -86,15 +82,11 @@ type Handler interface { DeleteInvite(http.ResponseWriter, *http.Request) CreateBulkInvite(http.ResponseWriter, *http.Request) - GetUser(http.ResponseWriter, *http.Request) - GetCurrentUserFromJWT(http.ResponseWriter, *http.Request) ListUsers(http.ResponseWriter, *http.Request) UpdateUser(http.ResponseWriter, *http.Request) DeleteUser(http.ResponseWriter, *http.Request) - - // Login - LoginPrecheck(http.ResponseWriter, *http.Request) - Login(http.ResponseWriter, *http.Request) + GetUser(http.ResponseWriter, *http.Request) + GetMyUser(http.ResponseWriter, *http.Request) // Reset Password GetResetPasswordToken(http.ResponseWriter, *http.Request) @@ -106,9 +98,4 @@ type Handler interface { ListAPIKeys(http.ResponseWriter, *http.Request) UpdateAPIKey(http.ResponseWriter, *http.Request) RevokeAPIKey(http.ResponseWriter, *http.Request) - - ListDomains(http.ResponseWriter, *http.Request) - CreateDomain(http.ResponseWriter, *http.Request) - UpdateDomain(http.ResponseWriter, *http.Request) - DeleteDomain(http.ResponseWriter, *http.Request) } diff --git a/pkg/query-service/app/cloudintegrations/controller_test.go b/pkg/query-service/app/cloudintegrations/controller_test.go deleted file mode 100644 index 345a2599b3..0000000000 --- a/pkg/query-service/app/cloudintegrations/controller_test.go +++ /dev/null @@ -1,372 +0,0 @@ -package cloudintegrations - -import ( - "context" - "testing" - "time" - - "github.com/SigNoz/signoz/pkg/alertmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerserver" - "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest" - "github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager" - "github.com/SigNoz/signoz/pkg/analytics/analyticstest" - "github.com/SigNoz/signoz/pkg/emailing/emailingtest" - "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" - "github.com/SigNoz/signoz/pkg/modules/organization" - "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" - "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/query-service/model" - "github.com/SigNoz/signoz/pkg/query-service/utils" - "github.com/SigNoz/signoz/pkg/sharder" - "github.com/SigNoz/signoz/pkg/sharder/noopsharder" - "github.com/SigNoz/signoz/pkg/signoz" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/google/uuid" - "github.com/stretchr/testify/require" -) - -func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) { - require := require.New(t) - sqlStore := utils.NewQueryServiceDBForTests(t) - controller, err := NewController(sqlStore) - require.NoError(err) - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.TODO(), providerSettings, sharder.Config{}) - require.NoError(err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlStore), sharder) - notificationManager := nfmanagertest.NewMock() - require.NoError(err) - alertmanager, err := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{Provider: "signoz", Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, sqlStore, orgGetter, notificationManager) - require.NoError(err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - emailing := emailingtest.New() - analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) - user, apiErr := createTestUser(modules.OrgSetter, modules.User) - require.Nil(apiErr) - - // should be able to generate connection url for - // same account id again with updated config - testAccountConfig1 := types.AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}} - resp1, apiErr := controller.GenerateConnectionUrl( - context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{ - AccountConfig: testAccountConfig1, - AgentConfig: SigNozAgentConfig{Region: "us-east-2"}, - }, - ) - require.Nil(apiErr) - require.NotEmpty(resp1.ConnectionUrl) - require.NotEmpty(resp1.AccountId) - - testAccountId := resp1.AccountId - account, apiErr := controller.accountsRepo.get( - context.TODO(), user.OrgID, "aws", testAccountId, - ) - require.Nil(apiErr) - require.Equal(testAccountConfig1, *account.Config) - - testAccountConfig2 := types.AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}} - resp2, apiErr := controller.GenerateConnectionUrl( - context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{ - AccountId: &testAccountId, - AccountConfig: testAccountConfig2, - AgentConfig: SigNozAgentConfig{Region: "us-east-2"}, - }, - ) - require.Nil(apiErr) - require.Equal(testAccountId, resp2.AccountId) - - account, apiErr = controller.accountsRepo.get( - context.TODO(), user.OrgID, "aws", testAccountId, - ) - require.Nil(apiErr) - require.Equal(testAccountConfig2, *account.Config) -} - -func TestAgentCheckIns(t *testing.T) { - require := require.New(t) - sqlStore := utils.NewQueryServiceDBForTests(t) - controller, err := NewController(sqlStore) - require.NoError(err) - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.TODO(), providerSettings, sharder.Config{}) - require.NoError(err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlStore), sharder) - notificationManager := nfmanagertest.NewMock() - require.NoError(err) - alertmanager, err := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{Provider: "signoz", Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, sqlStore, orgGetter, notificationManager) - require.NoError(err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - emailing := emailingtest.New() - analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) - user, apiErr := createTestUser(modules.OrgSetter, modules.User) - require.Nil(apiErr) - - // An agent should be able to check in from a cloud account even - // if no connection url was requested (no account with agent's account id exists) - testAccountId1 := uuid.NewString() - testCloudAccountId1 := "546311234" - resp1, err := controller.CheckInAsAgent( - context.TODO(), user.OrgID, "aws", AgentCheckInRequest{ - ID: testAccountId1, - AccountID: testCloudAccountId1, - }, - ) - require.Nil(err) - require.Equal(testAccountId1, resp1.AccountId) - require.Equal(testCloudAccountId1, resp1.CloudAccountId) - - // The agent should not be able to check in with a different - // cloud account id for the same account. - testCloudAccountId2 := "99999999" - _, err = controller.CheckInAsAgent( - context.TODO(), user.OrgID, "aws", AgentCheckInRequest{ - ID: testAccountId1, - AccountID: testCloudAccountId2, - }, - ) - require.NotNil(err) - - // The agent should not be able to check-in with a particular cloud account id - // if another connected AccountRecord exists for same cloud account - // i.e. there can't be 2 connected account records for the same cloud account id - // at any point in time. - existingConnected, apiErr := controller.accountsRepo.getConnectedCloudAccount( - context.TODO(), user.OrgID, "aws", testCloudAccountId1, - ) - require.Nil(apiErr) - require.NotNil(existingConnected) - require.Equal(testCloudAccountId1, *existingConnected.AccountID) - require.Nil(existingConnected.RemovedAt) - - testAccountId2 := uuid.NewString() - _, err = controller.CheckInAsAgent( - context.TODO(), user.OrgID, "aws", AgentCheckInRequest{ - ID: testAccountId2, - AccountID: testCloudAccountId1, - }, - ) - require.NotNil(err) - - // After disconnecting existing account record, the agent should be able to - // connected for a particular cloud account id - _, _ = controller.DisconnectAccount( - context.TODO(), user.OrgID, "aws", testAccountId1, - ) - - existingConnected, apiErr = controller.accountsRepo.getConnectedCloudAccount( - context.TODO(), user.OrgID, "aws", testCloudAccountId1, - ) - require.Nil(existingConnected) - require.NotNil(apiErr) - require.Equal(model.ErrorNotFound, apiErr.Type()) - - _, err = controller.CheckInAsAgent( - context.TODO(), user.OrgID, "aws", AgentCheckInRequest{ - ID: testAccountId2, - AccountID: testCloudAccountId1, - }, - ) - require.Nil(err) - - // should be able to keep checking in - _, err = controller.CheckInAsAgent( - context.TODO(), user.OrgID, "aws", AgentCheckInRequest{ - ID: testAccountId2, - AccountID: testCloudAccountId1, - }, - ) - require.Nil(err) -} - -func TestCantDisconnectNonExistentAccount(t *testing.T) { - require := require.New(t) - sqlStore := utils.NewQueryServiceDBForTests(t) - controller, err := NewController(sqlStore) - require.NoError(err) - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.TODO(), providerSettings, sharder.Config{}) - require.NoError(err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlStore), sharder) - notificationManager := nfmanagertest.NewMock() - require.NoError(err) - alertmanager, err := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{Provider: "signoz", Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, sqlStore, orgGetter, notificationManager) - require.NoError(err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - emailing := emailingtest.New() - analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) - user, apiErr := createTestUser(modules.OrgSetter, modules.User) - require.Nil(apiErr) - - // Attempting to disconnect a non-existent account should return error - account, apiErr := controller.DisconnectAccount( - context.TODO(), user.OrgID, "aws", uuid.NewString(), - ) - require.NotNil(apiErr) - require.Equal(model.ErrorNotFound, apiErr.Type()) - require.Nil(account) -} - -func TestConfigureService(t *testing.T) { - require := require.New(t) - sqlStore := utils.NewQueryServiceDBForTests(t) - controller, err := NewController(sqlStore) - require.NoError(err) - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.TODO(), providerSettings, sharder.Config{}) - require.NoError(err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlStore), sharder) - notificationManager := nfmanagertest.NewMock() - require.NoError(err) - alertmanager, err := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{Provider: "signoz", Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, sqlStore, orgGetter, notificationManager) - require.NoError(err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - emailing := emailingtest.New() - analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) - user, apiErr := createTestUser(modules.OrgSetter, modules.User) - require.Nil(apiErr) - - // create a connected account - testCloudAccountId := "546311234" - testConnectedAccount := makeTestConnectedAccount(t, user.OrgID, controller, testCloudAccountId) - require.Nil(testConnectedAccount.RemovedAt) - require.NotEmpty(testConnectedAccount.AccountID) - require.Equal(testCloudAccountId, *testConnectedAccount.AccountID) - - // should start out without any service config - svcListResp, apiErr := controller.ListServices( - context.TODO(), user.OrgID, "aws", &testCloudAccountId, - ) - require.Nil(apiErr) - - testSvcId := svcListResp.Services[0].Id - require.Nil(svcListResp.Services[0].Config) - - svcDetails, err := controller.GetServiceDetails( - context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId, - ) - require.Nil(err) - require.Equal(testSvcId, svcDetails.Id) - require.Nil(svcDetails.Config) - - // should be able to configure a service for a connected account - testSvcConfig := types.CloudServiceConfig{ - Metrics: &types.CloudServiceMetricsConfig{ - Enabled: true, - }, - } - updateSvcConfigResp, err := controller.UpdateServiceConfig( - context.TODO(), user.OrgID, "aws", testSvcId, &UpdateServiceConfigRequest{ - CloudAccountId: testCloudAccountId, - Config: testSvcConfig, - }, - ) - require.Nil(err) - require.Equal(testSvcId, updateSvcConfigResp.Id) - require.Equal(testSvcConfig, updateSvcConfigResp.Config) - - svcDetails, err = controller.GetServiceDetails( - context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId, - ) - require.Nil(err) - require.Equal(testSvcId, svcDetails.Id) - require.Equal(testSvcConfig, *svcDetails.Config) - - svcListResp, apiErr = controller.ListServices( - context.TODO(), user.OrgID, "aws", &testCloudAccountId, - ) - require.Nil(apiErr) - for _, svc := range svcListResp.Services { - if svc.Id == testSvcId { - require.Equal(testSvcConfig, *svc.Config) - } - } - - // should not be able to configure service after cloud account has been disconnected - _, apiErr = controller.DisconnectAccount( - context.TODO(), user.OrgID, "aws", testConnectedAccount.ID.StringValue(), - ) - require.Nil(apiErr) - - _, err = controller.UpdateServiceConfig( - context.TODO(), user.OrgID, "aws", testSvcId, - &UpdateServiceConfigRequest{ - CloudAccountId: testCloudAccountId, - Config: testSvcConfig, - }, - ) - require.NotNil(err) - - // should not be able to configure a service for a cloud account id that is not connected yet - _, err = controller.UpdateServiceConfig( - context.TODO(), user.OrgID, "aws", testSvcId, - &UpdateServiceConfigRequest{ - CloudAccountId: "9999999999", - Config: testSvcConfig, - }, - ) - require.NotNil(err) - - // should not be able to set config for an unsupported service - _, err = controller.UpdateServiceConfig( - context.TODO(), user.OrgID, "aws", "bad-service", &UpdateServiceConfigRequest{ - CloudAccountId: testCloudAccountId, - Config: testSvcConfig, - }, - ) - require.NotNil(err) - -} - -func makeTestConnectedAccount(t *testing.T, orgId string, controller *Controller, cloudAccountId string) *types.CloudIntegration { - require := require.New(t) - - // a check in from SigNoz agent creates or updates a connected account. - testAccountId := uuid.NewString() - resp, apiErr := controller.CheckInAsAgent( - context.TODO(), orgId, "aws", AgentCheckInRequest{ - ID: testAccountId, - AccountID: cloudAccountId, - }, - ) - require.Nil(apiErr) - require.Equal(testAccountId, resp.AccountId) - require.Equal(cloudAccountId, resp.CloudAccountId) - - acc, err := controller.accountsRepo.get(context.TODO(), orgId, "aws", resp.AccountId) - require.Nil(err) - return acc -} - -func createTestUser(organizationModule organization.Setter, userModule user.Module) (*types.User, *model.ApiError) { - // Create a test user for auth - ctx := context.Background() - organization := types.NewOrganization("test") - err := organizationModule.Create(ctx, organization) - if err != nil { - return nil, model.InternalError(err) - } - - random, err := utils.RandomHex(3) - if err != nil { - return nil, model.InternalError(err) - } - - user, err := types.NewUser("test", random+"test@test.com", types.RoleAdmin.String(), organization.ID.StringValue()) - if err != nil { - return nil, model.InternalError(err) - } - err = userModule.CreateUser(ctx, user) - if err != nil { - return nil, model.InternalError(err) - } - return user, nil -} diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 03785f7473..781b7685c7 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -4,16 +4,15 @@ import ( "bytes" "context" "database/sql" - "encoding/base64" "encoding/json" "errors" "fmt" + "github.com/SigNoz/signoz/pkg/modules/thirdpartyapi" "io" "math" "net/http" - "net/url" "regexp" "slices" "sort" @@ -258,11 +257,12 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) { } // if the first org with the first user is created then the setup is complete. if len(orgs) == 1 { - users, err := opts.Signoz.Modules.User.ListUsers(context.Background(), orgs[0].ID.String()) + count, err := opts.Signoz.Modules.UserGetter.CountByOrgID(context.Background(), orgs[0].ID) if err != nil { zap.L().Warn("unexpected error while fetch user count while initializing base api handler", zap.Error(err)) } - if len(users) > 0 { + + if count > 0 { aH.SetupCompleted = true } } @@ -585,14 +585,17 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { router.HandleFunc("/api/v1/invite/accept", am.OpenAccess(aH.Signoz.Handlers.User.AcceptInvite)).Methods(http.MethodPost) router.HandleFunc("/api/v1/register", am.OpenAccess(aH.registerUser)).Methods(http.MethodPost) - router.HandleFunc("/api/v1/login", am.OpenAccess(aH.Signoz.Handlers.User.Login)).Methods(http.MethodPost) - router.HandleFunc("/api/v1/loginPrecheck", am.OpenAccess(aH.Signoz.Handlers.User.LoginPrecheck)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/complete/google", am.OpenAccess(aH.receiveGoogleAuth)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/login", am.OpenAccess(aH.Signoz.Handlers.Session.DeprecatedCreateSessionByEmailPassword)).Methods(http.MethodPost) + router.HandleFunc("/api/v2/sessions/email_password", am.OpenAccess(aH.Signoz.Handlers.Session.CreateSessionByEmailPassword)).Methods(http.MethodPost) + router.HandleFunc("/api/v2/sessions/context", am.OpenAccess(aH.Signoz.Handlers.Session.GetSessionContext)).Methods(http.MethodGet) + router.HandleFunc("/api/v2/sessions/rotate", am.OpenAccess(aH.Signoz.Handlers.Session.RotateSession)).Methods(http.MethodPost) + router.HandleFunc("/api/v2/sessions", am.OpenAccess(aH.Signoz.Handlers.Session.DeleteSession)).Methods(http.MethodDelete) + router.HandleFunc("/api/v1/complete/google", am.OpenAccess(aH.Signoz.Handlers.Session.CreateSessionByGoogleCallback)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/domains", am.AdminAccess(aH.Signoz.Handlers.User.ListDomains)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/domains", am.AdminAccess(aH.Signoz.Handlers.User.CreateDomain)).Methods(http.MethodPost) - router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(aH.Signoz.Handlers.User.UpdateDomain)).Methods(http.MethodPut) - router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(aH.Signoz.Handlers.User.DeleteDomain)).Methods(http.MethodDelete) + router.HandleFunc("/api/v1/domains", am.AdminAccess(aH.Signoz.Handlers.AuthDomain.List)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/domains", am.AdminAccess(aH.Signoz.Handlers.AuthDomain.Create)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(aH.Signoz.Handlers.AuthDomain.Update)).Methods(http.MethodPut) + router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(aH.Signoz.Handlers.AuthDomain.Delete)).Methods(http.MethodDelete) router.HandleFunc("/api/v1/pats", am.AdminAccess(aH.Signoz.Handlers.User.CreateAPIKey)).Methods(http.MethodPost) router.HandleFunc("/api/v1/pats", am.AdminAccess(aH.Signoz.Handlers.User.ListAPIKeys)).Methods(http.MethodGet) @@ -600,7 +603,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(aH.Signoz.Handlers.User.RevokeAPIKey)).Methods(http.MethodDelete) router.HandleFunc("/api/v1/user", am.AdminAccess(aH.Signoz.Handlers.User.ListUsers)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/user/me", am.OpenAccess(aH.Signoz.Handlers.User.GetCurrentUserFromJWT)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/user/me", am.OpenAccess(aH.Signoz.Handlers.User.GetMyUser)).Methods(http.MethodGet) router.HandleFunc("/api/v1/user/{id}", am.SelfAccess(aH.Signoz.Handlers.User.GetUser)).Methods(http.MethodGet) router.HandleFunc("/api/v1/user/{id}", am.SelfAccess(aH.Signoz.Handlers.User.UpdateUser)).Methods(http.MethodPut) router.HandleFunc("/api/v1/user/{id}", am.AdminAccess(aH.Signoz.Handlers.User.DeleteUser)).Methods(http.MethodDelete) @@ -2084,74 +2087,6 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { aH.Respond(w, user) } -func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) { - ssoError := []byte("Login failed. Please contact your system administrator") - dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError))) - base64.StdEncoding.Encode(dst, ssoError) - - http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther) -} - -// receiveGoogleAuth completes google OAuth response and forwards a request -// to front-end to sign user in -func (aH *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) { - redirectUri := constants.GetDefaultSiteURL() - ctx := context.Background() - - q := r.URL.Query() - if errType := q.Get("error"); errType != "" { - zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description"))) - http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO"), http.StatusMovedPermanently) - return - } - - relayState := q.Get("state") - zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState)) - - parsedState, err := url.Parse(relayState) - if err != nil || relayState == "" { - zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) - handleSsoError(w, r, redirectUri) - return - } - - // upgrade redirect url from the relay state for better accuracy - redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login") - - // fetch domain by parsing relay state. - domain, err := aH.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState) - if err != nil { - handleSsoError(w, r, redirectUri) - return - } - - // now that we have domain, use domain to fetch sso settings. - // prepare google callback handler using parsedState - - // which contains redirect URL (front-end endpoint) - callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState) - if err != nil { - zap.L().Error("[receiveGoogleAuth] failed to prepare google oauth provider", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - identity, err := callbackHandler.HandleCallback(r) - if err != nil { - zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - nextPage, err := aH.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, identity.Email) - if err != nil { - zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - http.Redirect(w, r, nextPage, http.StatusSeeOther) -} - func (aH *APIHandler) HandleError(w http.ResponseWriter, err error, statusCode int) bool { if err == nil { return false diff --git a/pkg/query-service/app/integrations/manager_test.go b/pkg/query-service/app/integrations/manager_test.go deleted file mode 100644 index 43f6706061..0000000000 --- a/pkg/query-service/app/integrations/manager_test.go +++ /dev/null @@ -1,105 +0,0 @@ -package integrations - -import ( - "context" - "testing" - "time" - - "github.com/SigNoz/signoz/pkg/alertmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerserver" - "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest" - "github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager" - "github.com/SigNoz/signoz/pkg/analytics/analyticstest" - "github.com/SigNoz/signoz/pkg/emailing/emailingtest" - "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" - "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" - "github.com/SigNoz/signoz/pkg/sharder" - "github.com/SigNoz/signoz/pkg/sharder/noopsharder" - "github.com/SigNoz/signoz/pkg/signoz" - "github.com/SigNoz/signoz/pkg/types/authtypes" - _ "github.com/mattn/go-sqlite3" - "github.com/stretchr/testify/require" -) - -func TestIntegrationLifecycle(t *testing.T) { - require := require.New(t) - - mgr, store := NewTestIntegrationsManager(t) - ctx := context.Background() - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, _ := noopsharder.New(context.TODO(), providerSettings, sharder.Config{}) - orgGetter := implorganization.NewGetter(implorganization.NewStore(store), sharder) - notificationManager := nfmanagertest.NewMock() - alertmanager, _ := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{Provider: "signoz", Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, store, orgGetter, notificationManager) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - emailing := emailingtest.New() - analytics := analyticstest.New() - modules := signoz.NewModules(store, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) - user, apiErr := createTestUser(modules.OrgSetter, modules.User) - if apiErr != nil { - t.Fatalf("could not create test user: %v", apiErr) - } - - ii := true - installedIntegrationsFilter := &IntegrationsFilter{ - IsInstalled: &ii, - } - - installedIntegrations, apiErr := mgr.ListIntegrations( - ctx, user.OrgID, installedIntegrationsFilter, - ) - require.Nil(apiErr) - require.Equal([]IntegrationsListItem{}, installedIntegrations) - - availableIntegrations, apiErr := mgr.ListIntegrations(ctx, user.OrgID, nil) - require.Nil(apiErr) - require.Equal(2, len(availableIntegrations)) - require.False(availableIntegrations[0].IsInstalled) - require.False(availableIntegrations[1].IsInstalled) - - testIntegrationConfig := map[string]interface{}{} - installed, apiErr := mgr.InstallIntegration( - ctx, user.OrgID, availableIntegrations[1].Id, testIntegrationConfig, - ) - require.Nil(apiErr) - require.Equal(installed.Id, availableIntegrations[1].Id) - - integration, apiErr := mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id) - require.Nil(apiErr) - require.Equal(integration.Id, availableIntegrations[1].Id) - require.NotNil(integration.Installation) - - installedIntegrations, apiErr = mgr.ListIntegrations( - ctx, user.OrgID, installedIntegrationsFilter, - ) - require.Nil(apiErr) - require.Equal(1, len(installedIntegrations)) - require.Equal(availableIntegrations[1].Id, installedIntegrations[0].Id) - - availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil) - require.Nil(apiErr) - require.Equal(2, len(availableIntegrations)) - require.False(availableIntegrations[0].IsInstalled) - require.True(availableIntegrations[1].IsInstalled) - - apiErr = mgr.UninstallIntegration(ctx, user.OrgID, installed.Id) - require.Nil(apiErr) - - integration, apiErr = mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id) - require.Nil(apiErr) - require.Equal(integration.Id, availableIntegrations[1].Id) - require.Nil(integration.Installation) - - installedIntegrations, apiErr = mgr.ListIntegrations( - ctx, user.OrgID, installedIntegrationsFilter, - ) - require.Nil(apiErr) - require.Equal(0, len(installedIntegrations)) - - availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil) - require.Nil(apiErr) - require.Equal(2, len(availableIntegrations)) - require.False(availableIntegrations[0].IsInstalled) - require.False(availableIntegrations[1].IsInstalled) -} diff --git a/pkg/query-service/app/integrations/test_utils.go b/pkg/query-service/app/integrations/test_utils.go deleted file mode 100644 index 27ba204b96..0000000000 --- a/pkg/query-service/app/integrations/test_utils.go +++ /dev/null @@ -1,223 +0,0 @@ -package integrations - -import ( - "context" - "slices" - "testing" - - "github.com/SigNoz/signoz/pkg/modules/organization" - "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/query-service/model" - v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" - "github.com/SigNoz/signoz/pkg/query-service/utils" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/dashboardtypes" - "github.com/SigNoz/signoz/pkg/types/pipelinetypes" - ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes" -) - -func NewTestIntegrationsManager(t *testing.T) (*Manager, sqlstore.SQLStore) { - testDB := utils.NewQueryServiceDBForTests(t) - - installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB) - if err != nil { - t.Fatalf("could not init sqlite DB for installed integrations: %v", err) - } - - return &Manager{ - availableIntegrationsRepo: &TestAvailableIntegrationsRepo{}, - installedIntegrationsRepo: installedIntegrationsRepo, - }, testDB -} - -func createTestUser(organizationModule organization.Setter, userModule user.Module) (*types.User, *model.ApiError) { - // Create a test user for auth - ctx := context.Background() - organization := types.NewOrganization("test") - err := organizationModule.Create(ctx, organization) - if err != nil { - return nil, model.InternalError(err) - } - - random, err := utils.RandomHex(3) - if err != nil { - return nil, model.InternalError(err) - } - - user, err := types.NewUser("test", random+"test@test.com", types.RoleAdmin.String(), organization.ID.StringValue()) - if err != nil { - return nil, model.InternalError(err) - } - - err = userModule.CreateUser(ctx, user) - if err != nil { - return nil, model.InternalError(err) - } - return user, nil -} - -type TestAvailableIntegrationsRepo struct{} - -func (t *TestAvailableIntegrationsRepo) list( - ctx context.Context, -) ([]IntegrationDetails, *model.ApiError) { - return []IntegrationDetails{ - { - IntegrationSummary: IntegrationSummary{ - Id: "test-integration-1", - Title: "Test Integration 1", - Description: "A test integration", - Author: IntegrationAuthor{ - Name: "signoz", - Email: "integrations@signoz.io", - HomePage: "https://signoz.io", - }, - Icon: `data:image/svg+xml;utf8, ... `, - }, - Categories: []string{"testcat1", "testcat2"}, - Overview: "test integration overview", - Configuration: []IntegrationConfigStep{ - { - Title: "Step 1", - Instructions: "Set source attrib on your signals", - }, - }, - DataCollected: DataCollectedForIntegration{ - Logs: []CollectedLogAttribute{}, - Metrics: []CollectedMetric{}, - }, - Assets: IntegrationAssets{ - Logs: LogsAssets{ - Pipelines: []pipelinetypes.PostablePipeline{ - { - Name: "pipeline1", - Alias: "pipeline1", - Enabled: true, - Filter: &v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: v3.AttributeKey{ - Key: "source", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - }, - Operator: "=", - Value: "nginx", - }, - }, - }, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "attributes.test", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, - }, - }, - Dashboards: []dashboardtypes.StorableDashboardData{}, - Alerts: []ruletypes.PostableRule{}, - }, - ConnectionTests: &IntegrationConnectionTests{ - Logs: &LogsConnectionTest{ - AttributeKey: "source", - AttributeValue: "nginx", - }, - }, - }, { - IntegrationSummary: IntegrationSummary{ - Id: "test-integration-2", - Title: "Test Integration 2", - Description: "Another test integration", - Author: IntegrationAuthor{ - Name: "signoz", - Email: "integrations@signoz.io", - HomePage: "https://signoz.io", - }, - Icon: `data:image/svg+xml;utf8, ... `, - }, - Categories: []string{"testcat1", "testcat2"}, - Overview: "test integration overview", - Configuration: []IntegrationConfigStep{ - { - Title: "Step 1", - Instructions: "Set source attrib on your signals", - }, - }, - DataCollected: DataCollectedForIntegration{ - Logs: []CollectedLogAttribute{}, - Metrics: []CollectedMetric{}, - }, - Assets: IntegrationAssets{ - Logs: LogsAssets{ - Pipelines: []pipelinetypes.PostablePipeline{ - { - Name: "pipeline2", - Alias: "pipeline2", - Enabled: true, - Filter: &v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: v3.AttributeKey{ - Key: "source", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - }, - Operator: "=", - Value: "redis", - }, - }, - }, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "attributes.test", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, - }, - }, - Dashboards: []dashboardtypes.StorableDashboardData{}, - Alerts: []ruletypes.PostableRule{}, - }, - ConnectionTests: &IntegrationConnectionTests{ - Logs: &LogsConnectionTest{ - AttributeKey: "source", - AttributeValue: "nginx", - }, - }, - }, - }, nil -} - -func (t *TestAvailableIntegrationsRepo) get( - ctx context.Context, ids []string, -) (map[string]IntegrationDetails, *model.ApiError) { - availableIntegrations, apiErr := t.list(ctx) - if apiErr != nil { - return nil, apiErr - } - - result := map[string]IntegrationDetails{} - - for _, ai := range availableIntegrations { - if slices.Contains(ids, ai.Id) { - result[ai.Id] = ai - } - } - - return result, nil -} diff --git a/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go b/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go index 913678d0cb..a6e6dcbac4 100644 --- a/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go +++ b/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go @@ -437,7 +437,7 @@ func fieldNotNilCheck(fieldPath string) (string, error) { // should come out to be (attributes.test != nil && attributes.test["a.b"]?.value != nil) collectionNotNilCheck, err := fieldNotNilCheck(parts[0]) if err != nil { - return "", errors.WithAdditional(err, "couldn't generate nil check for %s", parts[0]) + return "", errors.WithAdditionalf(err, "couldn't generate nil check for %s", parts[0]) } // generate nil check for entire path. diff --git a/pkg/query-service/app/opamp/config_provider_multitenancy_test.go b/pkg/query-service/app/opamp/config_provider_multitenancy_test.go deleted file mode 100644 index ee8d612169..0000000000 --- a/pkg/query-service/app/opamp/config_provider_multitenancy_test.go +++ /dev/null @@ -1 +0,0 @@ -package opamp diff --git a/pkg/query-service/app/opamp/config_provider_test.go b/pkg/query-service/app/opamp/config_provider_test.go deleted file mode 100644 index 72a40ce84a..0000000000 --- a/pkg/query-service/app/opamp/config_provider_test.go +++ /dev/null @@ -1,332 +0,0 @@ -package opamp - -import ( - "context" - "fmt" - "log/slog" - "testing" - - "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" - "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" - "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model" - "github.com/SigNoz/signoz/pkg/query-service/utils" - "github.com/SigNoz/signoz/pkg/sharder" - "github.com/SigNoz/signoz/pkg/sharder/noopsharder" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types/opamptypes" - "github.com/SigNoz/signoz/pkg/valuer" - "github.com/knadh/koanf" - "github.com/knadh/koanf/parsers/yaml" - "github.com/knadh/koanf/providers/rawbytes" - _ "github.com/mattn/go-sqlite3" - "github.com/open-telemetry/opamp-go/protobufs" - "github.com/pkg/errors" - "github.com/stretchr/testify/require" - "golang.org/x/exp/maps" -) - -func TestOpAMPServerToAgentCommunicationWithConfigProvider(t *testing.T) { - require := require.New(t) - - tb := newTestbed(t) - - orgID, err := utils.GetTestOrgId(tb.sqlStore) - require.Nil(err) - - require.Equal( - 0, len(tb.testConfigProvider.ConfigUpdateSubscribers), - "there should be no agent config subscribers at the start", - ) - tb.StartServer() - require.Equal( - 1, len(tb.testConfigProvider.ConfigUpdateSubscribers), - "Opamp server should have subscribed to updates from config provider after being started", - ) - - // Server should always respond with a RemoteConfig when an agent connects. - // Even if there are no recommended changes to the agent's initial config - require.False(tb.testConfigProvider.HasRecommendations()) - agent1Conn := &MockOpAmpConnection{} - agent1Id, err := valuer.GenerateUUID().MarshalBinary() - require.Nil(err) - // get orgId from the db - tb.opampServer.OnMessage( - context.Background(), - agent1Conn, - &protobufs.AgentToServer{ - InstanceUid: agent1Id, - EffectiveConfig: &protobufs.EffectiveConfig{ - ConfigMap: initialAgentConf(), - }, - }, - ) - lastAgent1Msg := agent1Conn.LatestMsgFromServer() - require.NotNil( - lastAgent1Msg, - "Server should always send a remote config to the agent when it connects", - ) - require.Equal( - RemoteConfigBody(lastAgent1Msg), - string(initialAgentConf().ConfigMap[model.CollectorConfigFilename].Body), - ) - - tb.testConfigProvider.ZPagesEndpoint = "localhost:55555" - require.True(tb.testConfigProvider.HasRecommendations()) - agent2IdUUID := valuer.GenerateUUID() - agent2Id, err := agent2IdUUID.MarshalBinary() - require.Nil(err) - agent2Conn := &MockOpAmpConnection{} - tb.opampServer.OnMessage( - context.Background(), - agent2Conn, - &protobufs.AgentToServer{ - InstanceUid: agent2Id, - EffectiveConfig: &protobufs.EffectiveConfig{ - ConfigMap: initialAgentConf(), - }, - }, - ) - lastAgent2Msg := agent2Conn.LatestMsgFromServer() - require.NotNil( - lastAgent2Msg, - "server should recommend a config to agent when it connects", - ) - - recommendedEndpoint, err := GetStringValueFromYaml( - []byte(RemoteConfigBody(lastAgent2Msg)), "extensions.zpages.endpoint", - ) - require.Nil(err) - require.Equal( - tb.testConfigProvider.ZPagesEndpoint, recommendedEndpoint, - "server should send recommended config to agent when it connects", - ) - - agent2Conn.ClearMsgsFromServer() - tb.opampServer.OnMessage(context.Background(), agent2Conn, &protobufs.AgentToServer{ - InstanceUid: agent2Id, - EffectiveConfig: &protobufs.EffectiveConfig{ - ConfigMap: NewAgentConfigMap( - []byte(RemoteConfigBody(lastAgent2Msg)), - ), - }, - RemoteConfigStatus: &protobufs.RemoteConfigStatus{ - Status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED, - LastRemoteConfigHash: lastAgent2Msg.RemoteConfig.ConfigHash, - }, - }) - expectedConfId := tb.testConfigProvider.ZPagesEndpoint - require.True(tb.testConfigProvider.HasReportedDeploymentStatus(orgID, expectedConfId, agent2IdUUID.String()), - "Server should report deployment success to config provider on receiving update from agent.", - ) - require.True(tb.testConfigProvider.ReportedDeploymentStatuses[orgID.String()+expectedConfId][agent2IdUUID.String()]) - require.Nil( - agent2Conn.LatestMsgFromServer(), - "Server should not recommend a RemoteConfig if agent is already running it.", - ) - - // Server should rollout latest config to all agents when notified of a change by config provider - agent1Conn.ClearMsgsFromServer() - agent2Conn.ClearMsgsFromServer() - tb.testConfigProvider.ZPagesEndpoint = "localhost:66666" - tb.testConfigProvider.NotifySubscribersOfChange() - for _, agentConn := range []*MockOpAmpConnection{agent1Conn, agent2Conn} { - lastMsg := agentConn.LatestMsgFromServer() - - recommendedEndpoint, err := GetStringValueFromYaml( - []byte(RemoteConfigBody(lastMsg)), "extensions.zpages.endpoint", - ) - require.Nil(err) - require.Equal(tb.testConfigProvider.ZPagesEndpoint, recommendedEndpoint) - } - - lastAgent2Msg = agent2Conn.LatestMsgFromServer() - tb.opampServer.OnMessage(context.Background(), agent2Conn, &protobufs.AgentToServer{ - InstanceUid: agent2Id, - RemoteConfigStatus: &protobufs.RemoteConfigStatus{ - Status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_FAILED, - LastRemoteConfigHash: lastAgent2Msg.RemoteConfig.ConfigHash, - }, - }) - expectedConfId = tb.testConfigProvider.ZPagesEndpoint - require.True(tb.testConfigProvider.HasReportedDeploymentStatus(orgID, expectedConfId, agent2IdUUID.String()), - "Server should report deployment failure to config provider on receiving update from agent.", - ) - require.False(tb.testConfigProvider.ReportedDeploymentStatuses[orgID.String()+expectedConfId][agent2IdUUID.String()]) - - lastAgent1Msg = agent1Conn.LatestMsgFromServer() - agent1Conn.ClearMsgsFromServer() - response := tb.opampServer.OnMessage(context.Background(), agent1Conn, &protobufs.AgentToServer{ - InstanceUid: agent1Id, - RemoteConfigStatus: &protobufs.RemoteConfigStatus{ - Status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED, - LastRemoteConfigHash: lastAgent1Msg.RemoteConfig.ConfigHash, - }, - }) - require.Nil(response.RemoteConfig) - require.Nil( - agent1Conn.LatestMsgFromServer(), - "server should not recommend a config if agent is reporting back with status on a broadcasted config", - ) - - require.Equal(1, len(tb.testConfigProvider.ConfigUpdateSubscribers)) - tb.opampServer.Stop() - require.Equal( - 0, len(tb.testConfigProvider.ConfigUpdateSubscribers), - "Opamp server should have unsubscribed to config provider updates after shutdown", - ) -} - -func TestOpAMPServerAgentLimit(t *testing.T) { - require := require.New(t) - - tb := newTestbed(t) - // Create 51 agents and check if the first one gets deleted - var agentConnections []*MockOpAmpConnection - var agentIds [][]byte - for i := 0; i < 51; i++ { - agentConn := &MockOpAmpConnection{} - agentIdUUID := valuer.GenerateUUID() - agentId, err := agentIdUUID.MarshalBinary() - require.Nil(err) - agentIds = append(agentIds, agentId) - tb.opampServer.OnMessage( - context.Background(), - agentConn, - &protobufs.AgentToServer{ - InstanceUid: agentId, - EffectiveConfig: &protobufs.EffectiveConfig{ - ConfigMap: initialAgentConf(), - }, - }, - ) - agentConnections = append(agentConnections, agentConn) - } - - // Perform a DB level check to ensure the first agent is removed - count, err := tb.sqlStore.BunDB().NewSelect(). - Model(new(opamptypes.StorableAgent)). - Where("agent_id = ?", agentIds[0]). - Count(context.Background()) - require.Nil(err, "Error querying the database for agent count") - require.Equal(0, count, "First agent should be removed from the database after exceeding the limit of 50 agents") - - // verify there are 50 agents in the db - count, err = tb.sqlStore.BunDB().NewSelect(). - Model(new(opamptypes.StorableAgent)). - Count(context.Background()) - require.Nil(err, "Error querying the database for agent count") - require.Equal(50, count, "There should be 50 agents in the database") - - // Check if the 51st agent received a config - lastAgentConn := agentConnections[50] - lastAgentMsg := lastAgentConn.LatestMsgFromServer() - require.NotNil( - lastAgentMsg, - "51st agent should receive a remote config from the server", - ) - - tb.opampServer.Stop() - require.Equal( - 0, len(tb.testConfigProvider.ConfigUpdateSubscribers), - "Opamp server should have unsubscribed to config provider updates after shutdown", - ) -} - -type testbed struct { - testConfigProvider *MockAgentConfigProvider - opampServer *Server - t *testing.T - sqlStore sqlstore.SQLStore -} - -func newTestbed(t *testing.T) *testbed { - testDB := utils.NewQueryServiceDBForTests(t) - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.Background(), providerSettings, sharder.Config{}) - require.Nil(t, err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(testDB), sharder) - model.Init(testDB, slog.Default(), orgGetter) - testConfigProvider := NewMockAgentConfigProvider() - opampServer := InitializeServer(nil, testConfigProvider, instrumentationtest.New()) - - // create a test org - err = utils.CreateTestOrg(t, testDB) - if err != nil { - t.Fatalf("could not create test org: %v", err) - } - - return &testbed{ - testConfigProvider: testConfigProvider, - opampServer: opampServer, - t: t, - sqlStore: testDB, - } -} - -func (tb *testbed) StartServer() { - testListenPath := GetAvailableLocalAddress() - err := tb.opampServer.Start(testListenPath) - require.Nil(tb.t, err, "should be able to start opamp server") -} - -// Test helper -func GetStringValueFromYaml( - serializedYaml []byte, path string, -) (string, error) { - if len(serializedYaml) < 1 { - return "", fmt.Errorf("yaml data is empty") - } - - k := koanf.New(".") - err := k.Load(rawbytes.Provider(serializedYaml), yaml.Parser()) - if err != nil { - return "", errors.Wrap(err, "could not unmarshal collector config") - } - - return k.String("extensions.zpages.endpoint"), nil -} - -// Returns body of a ServerToAgent.RemoteConfig or "" -func RemoteConfigBody(msg *protobufs.ServerToAgent) string { - if msg == nil { - return "" - } - - collectorConfFiles := msg.RemoteConfig.Config.ConfigMap - if len(collectorConfFiles) < 1 { - return "" - } - return string(maps.Values(collectorConfFiles)[0].Body) -} - -func NewAgentConfigMap(body []byte) *protobufs.AgentConfigMap { - return &protobufs.AgentConfigMap{ - ConfigMap: map[string]*protobufs.AgentConfigFile{ - model.CollectorConfigFilename: { - Body: body, - ContentType: "text/yaml", - }, - }, - } - -} - -func initialAgentConf() *protobufs.AgentConfigMap { - return NewAgentConfigMap( - []byte(` - receivers: - otlp: - processors: - batch: - exporters: - otlp: - service: - pipelines: - logs: - receivers: [otlp] - processors: [batch] - exporters: [otlp] - `), - ) -} diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index 2549810be5..5b8455be15 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -7,6 +7,7 @@ import ( "net" "net/http" _ "net/http/pprof" // http profiler + "slices" "github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore" @@ -30,7 +31,6 @@ import ( "github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/telemetrystore" - "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/web" "github.com/rs/cors" "github.com/soheilhy/cmux" @@ -41,6 +41,8 @@ import ( "github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/rules" "github.com/SigNoz/signoz/pkg/query-service/utils" + "go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux" + "go.opentelemetry.io/otel/propagation" "go.uber.org/zap" ) @@ -48,7 +50,6 @@ import ( type Server struct { config signoz.Config signoz *signoz.SigNoz - jwt *authtypes.JWT ruleManager *rules.Manager // public http router @@ -62,7 +63,7 @@ type Server struct { } // NewServer creates and initializes Server -func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) (*Server, error) { +func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) { integrationsController, err := integrations.NewController(signoz.SQLStore) if err != nil { return nil, err @@ -125,7 +126,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) s := &Server{ config: config, signoz: signoz, - jwt: jwt, ruleManager: rm, httpHostPort: constants.HTTPHostPort, unavailableChannel: make(chan healthcheck.Status), @@ -170,7 +170,17 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status { func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server, error) { r := NewRouter() - r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap) + r.Use(otelmux.Middleware( + "apiserver", + otelmux.WithMeterProvider(s.signoz.Instrumentation.MeterProvider()), + otelmux.WithTracerProvider(s.signoz.Instrumentation.TracerProvider()), + otelmux.WithPropagators(propagation.NewCompositeTextMapPropagator(propagation.Baggage{}, propagation.TraceContext{})), + otelmux.WithFilter(func(r *http.Request) bool { + return !slices.Contains([]string{"/api/v1/health"}, r.URL.Path) + }), + otelmux.WithPublicEndpoint(), + )) + r.Use(middleware.NewAuthN([]string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Tokenizer, s.signoz.Instrumentation.Logger()).Wrap) r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(), s.config.APIServer.Timeout.ExcludedRoutes, s.config.APIServer.Timeout.Default, diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index 2bbfb2e823..8475ec4b5b 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -730,9 +730,5 @@ var MaterializedDataTypeMap = map[string]string{ const InspectMetricsMaxTimeDiff = 1800000 -func GetDefaultSiteURL() string { - return GetOrDefaultEnv("SIGNOZ_SITE_URL", HTTPHostPort) -} - const DotMetricsEnabled = "DOT_METRICS_ENABLED" const maxJSONFlatteningDepth = "MAX_JSON_FLATTENING_DEPTH" diff --git a/pkg/query-service/rules/manager_test.go b/pkg/query-service/rules/manager_test.go deleted file mode 100644 index 90781e1547..0000000000 --- a/pkg/query-service/rules/manager_test.go +++ /dev/null @@ -1,818 +0,0 @@ -package rules - -import ( - "context" - "fmt" - "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfroutingstore/nfroutingstoretest" - "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/rulebasednotification" - "github.com/prometheus/common/model" - "strings" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "go.uber.org/zap" - - "github.com/SigNoz/signoz/pkg/alertmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerserver" - "github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager" - "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" - "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" - "github.com/SigNoz/signoz/pkg/query-service/utils" - "github.com/SigNoz/signoz/pkg/ruler/rulestore/rulestoretest" - "github.com/SigNoz/signoz/pkg/sharder" - "github.com/SigNoz/signoz/pkg/sharder/noopsharder" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/alertmanagertypes" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/SigNoz/signoz/pkg/types/ruletypes" - "github.com/SigNoz/signoz/pkg/valuer" -) - -func TestManager_PatchRule_PayloadVariations(t *testing.T) { - // Set up test claims and manager once for all test cases - claims := &authtypes.Claims{ - UserID: "550e8400-e29b-41d4-a716-446655440000", - Email: "test@example.com", - Role: "admin", - } - manager, mockSQLRuleStore, mockRouteStore, nfmanager, orgId := setupTestManager(t) - claims.OrgID = orgId - - testCases := []struct { - name string - originalData string - patchData string - Route []*alertmanagertypes.RoutePolicy - Config *alertmanagertypes.NotificationConfig - expectedResult func(*ruletypes.GettableRule) bool - expectError bool - description string - }{ - { - name: "patch complete rule with task sync validation", - Route: []*alertmanagertypes.RoutePolicy{ - { - Expression: fmt.Sprintf("ruleId == \"{{.ruleId}}\" && threshold.name == \"warning\""), - ExpressionKind: alertmanagertypes.RuleBasedExpression, - Channels: []string{"test-alerts"}, - Name: "{{.ruleId}}", - Enabled: true, - }, - }, - Config: &alertmanagertypes.NotificationConfig{ - NotificationGroup: map[model.LabelName]struct{}{model.LabelName("ruleId"): {}}, - Renotify: alertmanagertypes.ReNotificationConfig{ - RenotifyInterval: 4 * time.Hour, - NoDataInterval: 4 * time.Hour, - }, - UsePolicy: false, - }, - originalData: `{ - "schemaVersion":"v1", - "alert": "test-original-alert", - "alertType": "METRIC_BASED_ALERT", - "ruleType": "threshold_rule", - "evalWindow": "5m0s", - "condition": { - "compositeQuery": { - "queryType": "builder", - "panelType": "graph", - "queries": [ - { - "type": "builder_query", - "spec": { - "name": "A", - "signal": "metrics", - "disabled": false, - "aggregations": [ - { - "metricName": "container.cpu.time", - "timeAggregation": "rate", - "spaceAggregation": "sum" - } - ] - } - } - ] - } - }, - "labels": { - "severity": "warning" - }, - "disabled": false, - "preferredChannels": ["test-alerts"] - }`, - patchData: `{ - "alert": "test-patched-alert", - "labels": { - "severity": "critical" - } - }`, - expectedResult: func(result *ruletypes.GettableRule) bool { - return result.AlertName == "test-patched-alert" && - result.Labels["severity"] == "critical" && - result.Disabled == false - }, - expectError: false, - }, - { - name: "patch rule to disabled state", - Route: []*alertmanagertypes.RoutePolicy{ - { - Expression: fmt.Sprintf("ruleId == \"{{.ruleId}}\" && threshold.name == \"warning\""), - ExpressionKind: alertmanagertypes.RuleBasedExpression, - Channels: []string{"test-alerts"}, - Name: "{{.ruleId}}", - Enabled: true, - }, - }, - Config: &alertmanagertypes.NotificationConfig{ - NotificationGroup: map[model.LabelName]struct{}{model.LabelName("ruleId"): {}}, - Renotify: alertmanagertypes.ReNotificationConfig{ - RenotifyInterval: 4 * time.Hour, - NoDataInterval: 4 * time.Hour, - }, - UsePolicy: false, - }, - originalData: `{ - "schemaVersion":"v2", - "alert": "test-disable-alert", - "alertType": "METRIC_BASED_ALERT", - "ruleType": "threshold_rule", - "evalWindow": "5m0s", - "condition": { - "thresholds": { - "kind": "basic", - "spec": [ - { - "name": "WARNING", - "target": 30, - "matchType": "1", - "op": "1", - "selectedQuery": "A", - "channels": ["test-alerts"] - } - ] - }, - "compositeQuery": { - "queryType": "builder", - "panelType": "graph", - "queries": [ - { - "type": "builder_query", - "spec": { - "name": "A", - "signal": "metrics", - "disabled": false, - "aggregations": [ - { - "metricName": "container.memory.usage", - "timeAggregation": "avg", - "spaceAggregation": "sum" - } - ] - } - } - ] - } - }, - "evaluation": { - "kind": "rolling", - "spec": { - "evalWindow": "5m", - "frequency": "1m" - } - }, - "labels": { - "severity": "warning" - }, - "disabled": false, - "preferredChannels": ["test-alerts"] - }`, - patchData: `{ - "disabled": true - }`, - expectedResult: func(result *ruletypes.GettableRule) bool { - return result.Disabled == true - }, - expectError: false, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - ruleID := valuer.GenerateUUID() - existingRule := &ruletypes.Rule{ - Identifiable: types.Identifiable{ - ID: ruleID, - }, - TimeAuditable: types.TimeAuditable{ - CreatedAt: time.Now(), - UpdatedAt: time.Now(), - }, - UserAuditable: types.UserAuditable{ - CreatedBy: "creator@example.com", - UpdatedBy: "creator@example.com", - }, - Data: tc.originalData, - OrgID: claims.OrgID, - } - - // Update route expectations with actual rule ID - routesWithRuleID := make([]*alertmanagertypes.RoutePolicy, len(tc.Route)) - for i, route := range tc.Route { - routesWithRuleID[i] = &alertmanagertypes.RoutePolicy{ - Expression: strings.Replace(route.Expression, "{{.ruleId}}", ruleID.String(), -1), - ExpressionKind: route.ExpressionKind, - Channels: route.Channels, - Name: strings.Replace(route.Name, "{{.ruleId}}", ruleID.String(), -1), - Enabled: route.Enabled, - } - } - - mockRouteStore.ExpectDeleteRouteByName(existingRule.OrgID, ruleID.String()) - mockRouteStore.ExpectCreateBatch(routesWithRuleID) - mockSQLRuleStore.ExpectGetStoredRule(ruleID, existingRule) - mockSQLRuleStore.ExpectEditRule(existingRule) - - ctx := authtypes.NewContextWithClaims(context.Background(), *claims) - result, err := manager.PatchRule(ctx, tc.patchData, ruleID) - - assert.NoError(t, err) - assert.NotNil(t, result) - assert.Equal(t, ruleID.StringValue(), result.Id) - - if tc.expectedResult != nil { - assert.True(t, tc.expectedResult(result), "Expected result validation failed") - } - taskName := prepareTaskName(result.Id) - - if result.Disabled { - syncCompleted := waitForTaskSync(manager, taskName, false, 2*time.Second) - assert.True(t, syncCompleted, "Task synchronization should complete within timeout") - assert.Nil(t, findTaskByName(manager.RuleTasks(), taskName), "Task should be removed for disabled rule") - } else { - syncCompleted := waitForTaskSync(manager, taskName, true, 2*time.Second) - - // Verify notification config - config, err := nfmanager.GetNotificationConfig(orgId, result.Id) - assert.NoError(t, err) - assert.Equal(t, tc.Config, config) - - assert.True(t, syncCompleted, "Task synchronization should complete within timeout") - assert.NotNil(t, findTaskByName(manager.RuleTasks(), taskName), "Task should be created/updated for enabled rule") - assert.Greater(t, len(manager.Rules()), 0, "Rules should be updated in manager") - } - - assert.NoError(t, mockSQLRuleStore.AssertExpectations()) - }) - } -} - -func waitForTaskSync(manager *Manager, taskName string, expectedExists bool, timeout time.Duration) bool { - deadline := time.Now().Add(timeout) - for time.Now().Before(deadline) { - task := findTaskByName(manager.RuleTasks(), taskName) - exists := task != nil - - if exists == expectedExists { - return true - } - time.Sleep(10 * time.Millisecond) - } - return false -} - -// findTaskByName finds a task by name in the slice of tasks -func findTaskByName(tasks []Task, taskName string) Task { - for i := 0; i < len(tasks); i++ { - if tasks[i].Name() == taskName { - return tasks[i] - } - } - return nil -} - -func setupTestManager(t *testing.T) (*Manager, *rulestoretest.MockSQLRuleStore, *nfroutingstoretest.MockSQLRouteStore, nfmanager.NotificationManager, string) { - settings := instrumentationtest.New().ToProviderSettings() - testDB := utils.NewQueryServiceDBForTests(t) - - err := utils.CreateTestOrg(t, testDB) - if err != nil { - t.Fatalf("Failed to create test org: %v", err) - } - testOrgID, err := utils.GetTestOrgId(testDB) - if err != nil { - t.Fatalf("Failed to get test org ID: %v", err) - } - - //will replace this with alertmanager mock - newConfig := alertmanagerserver.NewConfig() - defaultConfig, err := alertmanagertypes.NewDefaultConfig(newConfig.Global, newConfig.Route, testOrgID.StringValue()) - if err != nil { - t.Fatalf("Failed to create default alertmanager config: %v", err) - } - - _, err = testDB.BunDB().NewInsert(). - Model(defaultConfig.StoreableConfig()). - Exec(context.Background()) - if err != nil { - t.Fatalf("Failed to insert alertmanager config: %v", err) - } - - noopSharder, err := noopsharder.New(context.TODO(), settings, sharder.Config{}) - if err != nil { - t.Fatalf("Failed to create noop sharder: %v", err) - } - orgGetter := implorganization.NewGetter(implorganization.NewStore(testDB), noopSharder) - routeStore := nfroutingstoretest.NewMockSQLRouteStore() - notificationManager, err := rulebasednotification.New(t.Context(), settings, nfmanager.Config{}, routeStore) - if err != nil { - t.Fatalf("Failed to create alert manager: %v", err) - } - alertManager, err := signozalertmanager.New(context.TODO(), settings, alertmanager.Config{Provider: "signoz", Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, testDB, orgGetter, notificationManager) - if err != nil { - t.Fatalf("Failed to create alert manager: %v", err) - } - mockSQLRuleStore := rulestoretest.NewMockSQLRuleStore() - - options := ManagerOptions{ - Context: context.Background(), - Logger: zap.L(), - SLogger: instrumentationtest.New().Logger(), - EvalDelay: time.Minute, - PrepareTaskFunc: defaultPrepareTaskFunc, - Alertmanager: alertManager, - OrgGetter: orgGetter, - RuleStore: mockSQLRuleStore, - } - - manager, err := NewManager(&options) - if err != nil { - t.Fatalf("Failed to create manager: %v", err) - } - - close(manager.block) - return manager, mockSQLRuleStore, routeStore, notificationManager, testOrgID.StringValue() -} - -func TestCreateRule(t *testing.T) { - claims := &authtypes.Claims{ - Email: "test@example.com", - } - manager, mockSQLRuleStore, mockRouteStore, nfmanager, orgId := setupTestManager(t) - claims.OrgID = orgId - testCases := []struct { - name string - Route []*alertmanagertypes.RoutePolicy - Config *alertmanagertypes.NotificationConfig - ruleStr string - }{ - { - name: "validate stored rule data structure", - Route: []*alertmanagertypes.RoutePolicy{ - { - Expression: fmt.Sprintf("ruleId == \"{{.ruleId}}\" && threshold.name == \"warning\""), - ExpressionKind: alertmanagertypes.RuleBasedExpression, - Channels: []string{"test-alerts"}, - Name: "{{.ruleId}}", - Enabled: true, - }, - }, - Config: &alertmanagertypes.NotificationConfig{ - NotificationGroup: map[model.LabelName]struct{}{model.LabelName("ruleId"): {}}, - Renotify: alertmanagertypes.ReNotificationConfig{ - RenotifyInterval: 4 * time.Hour, - NoDataInterval: 4 * time.Hour, - }, - UsePolicy: false, - }, - ruleStr: `{ - "alert": "cpu usage", - "ruleType": "threshold_rule", - "evalWindow": "5m", - "frequency": "1m", - "condition": { - "compositeQuery": { - "queryType": "builder", - "builderQueries": { - "A": { - "expression": "A", - "disabled": false, - "dataSource": "metrics", - "aggregateOperator": "avg", - "aggregateAttribute": { - "key": "cpu_usage", - "type": "Gauge" - } - } - } - }, - "op": "1", - "target": 80, - "matchType": "1" - }, - "labels": { - "severity": "warning" - }, - "annotations": { - "summary": "High CPU usage detected" - }, - "preferredChannels": ["test-alerts"] - }`, - }, - { - name: "create complete v2 rule with thresholds", - Route: []*alertmanagertypes.RoutePolicy{ - { - Expression: fmt.Sprintf("ruleId == \"{{.ruleId}}\" && threshold.name == \"critical\""), - ExpressionKind: alertmanagertypes.RuleBasedExpression, - Channels: []string{"test-alerts"}, - Name: "{{.ruleId}}", - Enabled: true, - }, - { - Expression: fmt.Sprintf("ruleId == \"{{.ruleId}}\" && threshold.name == \"warning\""), - ExpressionKind: alertmanagertypes.RuleBasedExpression, - Channels: []string{"test-alerts"}, - Name: "{{.ruleId}}", - Enabled: true, - }, - }, - Config: &alertmanagertypes.NotificationConfig{ - NotificationGroup: map[model.LabelName]struct{}{model.LabelName("k8s.node.name"): {}, model.LabelName("ruleId"): {}}, - Renotify: alertmanagertypes.ReNotificationConfig{ - RenotifyInterval: 10 * time.Minute, - NoDataInterval: 4 * time.Hour, - }, - UsePolicy: false, - }, - ruleStr: `{ - "schemaVersion":"v2", - "state": "firing", - "alert": "test-multi-threshold-create", - "alertType": "METRIC_BASED_ALERT", - "ruleType": "threshold_rule", - "evalWindow": "5m0s", - "condition": { - "thresholds": { - "kind": "basic", - "spec": [ - { - "name": "CRITICAL", - "target": 0, - "matchType": "1", - "op": "1", - "selectedQuery": "A", - "channels": ["test-alerts"] - }, - { - "name": "WARNING", - "target": 0, - "matchType": "1", - "op": "1", - "selectedQuery": "A", - "channels": ["test-alerts"] - } - ] - }, - "compositeQuery": { - "queryType": "builder", - "panelType": "graph", - "queries": [ - { - "type": "builder_query", - "spec": { - "name": "A", - "signal": "metrics", - "disabled": false, - "aggregations": [ - { - "metricName": "container.cpu.time", - "timeAggregation": "rate", - "spaceAggregation": "sum" - } - ] - } - } - ] - } - }, - "evaluation": { - "kind": "rolling", - "spec": { - "evalWindow": "6m", - "frequency": "1m" - } - }, - "notificationSettings": { - "GroupBy": [ - "k8s.node.name" - ], - "renotify": { - "interval": "10m", - "enabled": true, - "alertStates": [ - "firing" - ] - } - }, - "labels": { - "severity": "warning" - }, - "annotations": { - "description": "This alert is fired when the defined metric crosses the threshold", - "summary": "The rule threshold is set and the observed metric value is evaluated" - }, - "disabled": false, - "preferredChannels": ["#test-alerts-v2"], - "version": "v5" - }`, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - rule := &ruletypes.Rule{ - Identifiable: types.Identifiable{ - ID: valuer.GenerateUUID(), - }, - TimeAuditable: types.TimeAuditable{ - CreatedAt: time.Now(), - UpdatedAt: time.Now(), - }, - UserAuditable: types.UserAuditable{ - CreatedBy: claims.Email, - UpdatedBy: claims.Email, - }, - OrgID: claims.OrgID, - } - - // Update route expectations with actual rule ID - routesWithRuleID := make([]*alertmanagertypes.RoutePolicy, len(tc.Route)) - for i, route := range tc.Route { - routesWithRuleID[i] = &alertmanagertypes.RoutePolicy{ - Expression: strings.Replace(route.Expression, "{{.ruleId}}", rule.ID.String(), -1), - ExpressionKind: route.ExpressionKind, - Channels: route.Channels, - Name: strings.Replace(route.Name, "{{.ruleId}}", rule.ID.String(), -1), - Enabled: route.Enabled, - } - } - - mockRouteStore.ExpectCreateBatch(routesWithRuleID) - mockSQLRuleStore.ExpectCreateRule(rule) - - ctx := authtypes.NewContextWithClaims(context.Background(), *claims) - result, err := manager.CreateRule(ctx, tc.ruleStr) - - assert.NoError(t, err) - assert.NotNil(t, result) - assert.NotEmpty(t, result.Id, "Result should have a valid ID") - - // Wait for task creation with proper synchronization - taskName := prepareTaskName(result.Id) - syncCompleted := waitForTaskSync(manager, taskName, true, 2*time.Second) - - // Verify notification config - config, err := nfmanager.GetNotificationConfig(orgId, result.Id) - assert.NoError(t, err) - assert.Equal(t, tc.Config, config) - - assert.True(t, syncCompleted, "Task creation should complete within timeout") - assert.NotNil(t, findTaskByName(manager.RuleTasks(), taskName), "Task should be created with correct name") - assert.Greater(t, len(manager.Rules()), 0, "Rules should be added to manager") - - assert.NoError(t, mockSQLRuleStore.AssertExpectations()) - }) - } -} - -func TestEditRule(t *testing.T) { - // Set up test claims and manager once for all test cases - claims := &authtypes.Claims{ - Email: "test@example.com", - } - manager, mockSQLRuleStore, mockRouteStore, nfmanager, orgId := setupTestManager(t) - claims.OrgID = orgId - testCases := []struct { - ruleID string - name string - Route []*alertmanagertypes.RoutePolicy - Config *alertmanagertypes.NotificationConfig - ruleStr string - }{ - { - ruleID: "12345678-1234-1234-1234-123456789012", - name: "validate edit rule functionality", - Route: []*alertmanagertypes.RoutePolicy{ - { - Expression: fmt.Sprintf("ruleId == \"rule1\" && threshold.name == \"critical\""), - ExpressionKind: alertmanagertypes.RuleBasedExpression, - Channels: []string{"critical-alerts"}, - Name: "12345678-1234-1234-1234-123456789012", - Enabled: true, - }, - }, - Config: &alertmanagertypes.NotificationConfig{ - NotificationGroup: map[model.LabelName]struct{}{model.LabelName("ruleId"): {}}, - Renotify: alertmanagertypes.ReNotificationConfig{ - RenotifyInterval: 4 * time.Hour, - NoDataInterval: 4 * time.Hour, - }, - UsePolicy: false, - }, - ruleStr: `{ - "alert": "updated cpu usage", - "ruleType": "threshold_rule", - "evalWindow": "10m", - "frequency": "2m", - "condition": { - "compositeQuery": { - "queryType": "builder", - "builderQueries": { - "A": { - "expression": "A", - "disabled": false, - "dataSource": "metrics", - "aggregateOperator": "avg", - "aggregateAttribute": { - "key": "cpu_usage", - "type": "Gauge" - } - } - } - }, - "op": "1", - "target": 90, - "matchType": "1" - }, - "labels": { - "severity": "critical" - }, - "annotations": { - "summary": "Very high CPU usage detected" - }, - "preferredChannels": ["critical-alerts"] - }`, - }, - { - ruleID: "12345678-1234-1234-1234-123456789013", - name: "edit complete v2 rule with thresholds", - Route: []*alertmanagertypes.RoutePolicy{ - { - Expression: fmt.Sprintf("ruleId == \"rule2\" && threshold.name == \"critical\""), - ExpressionKind: alertmanagertypes.RuleBasedExpression, - Channels: []string{"test-alerts"}, - Name: "12345678-1234-1234-1234-123456789013", - Enabled: true, - }, - { - Expression: fmt.Sprintf("ruleId == \"rule2\" && threshold.name == \"warning\""), - ExpressionKind: alertmanagertypes.RuleBasedExpression, - Channels: []string{"test-alerts"}, - Name: "12345678-1234-1234-1234-123456789013", - Enabled: true, - }, - }, - Config: &alertmanagertypes.NotificationConfig{ - NotificationGroup: map[model.LabelName]struct{}{model.LabelName("ruleId"): {}, model.LabelName("k8s.node.name"): {}}, - Renotify: alertmanagertypes.ReNotificationConfig{ - RenotifyInterval: 10 * time.Minute, - NoDataInterval: 4 * time.Hour, - }, - UsePolicy: false, - }, - ruleStr: `{ - "schemaVersion":"v2", - "state": "firing", - "alert": "test-multi-threshold-edit", - "alertType": "METRIC_BASED_ALERT", - "ruleType": "threshold_rule", - "evalWindow": "5m0s", - "condition": { - "thresholds": { - "kind": "basic", - "spec": [ - { - "name": "CRITICAL", - "target": 10, - "matchType": "1", - "op": "1", - "selectedQuery": "A", - "channels": ["test-alerts"] - }, - { - "name": "WARNING", - "target": 5, - "matchType": "1", - "op": "1", - "selectedQuery": "A", - "channels": ["test-alerts"] - } - ] - }, - "compositeQuery": { - "queryType": "builder", - "panelType": "graph", - "queries": [ - { - "type": "builder_query", - "spec": { - "name": "A", - "signal": "metrics", - "disabled": false, - "aggregations": [ - { - "metricName": "container.memory.usage", - "timeAggregation": "avg", - "spaceAggregation": "sum" - } - ] - } - } - ] - } - }, - "evaluation": { - "kind": "rolling", - "spec": { - "evalWindow": "8m", - "frequency": "2m" - } - }, - "labels": { - "severity": "critical" - }, - "notificationSettings": { - "GroupBy": [ - "k8s.node.name" - ], - "renotify": { - "interval": "10m", - "enabled": true, - "alertStates": [ - "firing" - ] - } - }, - "annotations": { - "description": "This alert is fired when memory usage crosses the threshold", - "summary": "Memory usage threshold exceeded" - }, - "disabled": false, - "preferredChannels": ["#critical-alerts-v2"], - "version": "v5" - }`, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - ruleId, err := valuer.NewUUID(tc.ruleID) - if err != nil { - t.Errorf("error creating ruleId: %s", err) - } - existingRule := &ruletypes.Rule{ - Identifiable: types.Identifiable{ - ID: ruleId, - }, - TimeAuditable: types.TimeAuditable{ - CreatedAt: time.Now(), - UpdatedAt: time.Now(), - }, - UserAuditable: types.UserAuditable{ - CreatedBy: "creator@example.com", - UpdatedBy: "creator@example.com", - }, - Data: `{"alert": "original cpu usage", "disabled": false}`, - OrgID: claims.OrgID, - } - mockRouteStore.ExpectDeleteRouteByName(existingRule.OrgID, ruleId.String()) - mockRouteStore.ExpectCreateBatch(tc.Route) - mockSQLRuleStore.ExpectGetStoredRule(ruleId, existingRule) - mockSQLRuleStore.ExpectEditRule(existingRule) - - ctx := authtypes.NewContextWithClaims(context.Background(), *claims) - err = manager.EditRule(ctx, tc.ruleStr, ruleId) - - assert.NoError(t, err) - - // Wait for task update with proper synchronization - - taskName := prepareTaskName(ruleId.String()) - syncCompleted := waitForTaskSync(manager, taskName, true, 2*time.Second) - - config, err := nfmanager.GetNotificationConfig(orgId, ruleId.String()) - assert.NoError(t, err) - assert.Equal(t, tc.Config, config) - assert.True(t, syncCompleted, "Task update should complete within timeout") - assert.NotNil(t, findTaskByName(manager.RuleTasks(), taskName), "Task should be updated with correct name") - assert.Greater(t, len(manager.Rules()), 0, "Rules should be updated in manager") - - assert.NoError(t, mockSQLRuleStore.AssertExpectations()) - }) - } -} diff --git a/pkg/query-service/tests/integration/filter_suggestions_test.go b/pkg/query-service/tests/integration/filter_suggestions_test.go deleted file mode 100644 index 65764a9beb..0000000000 --- a/pkg/query-service/tests/integration/filter_suggestions_test.go +++ /dev/null @@ -1,389 +0,0 @@ -package tests - -import ( - "context" - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "slices" - "strings" - "testing" - "time" - - "github.com/SigNoz/signoz/pkg/alertmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerserver" - "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest" - "github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager" - "github.com/SigNoz/signoz/pkg/analytics/analyticstest" - "github.com/SigNoz/signoz/pkg/emailing/emailingtest" - "github.com/SigNoz/signoz/pkg/sharder" - "github.com/SigNoz/signoz/pkg/sharder/noopsharder" - "github.com/SigNoz/signoz/pkg/types/authtypes" - - "github.com/SigNoz/signoz/pkg/http/middleware" - "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" - "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" - "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/query-service/app" - "github.com/SigNoz/signoz/pkg/query-service/constants" - v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" - "github.com/SigNoz/signoz/pkg/query-service/utils" - "github.com/SigNoz/signoz/pkg/signoz" - "github.com/SigNoz/signoz/pkg/types" - mockhouse "github.com/srikanthccv/ClickHouse-go-mock" - "github.com/stretchr/testify/require" - "go.uber.org/zap" -) - -// If no data has been received yet, filter suggestions should contain -// standard log fields and static example queries based on them -func TestDefaultLogsFilterSuggestions(t *testing.T) { - require := require.New(t) - tb := NewFilterSuggestionsTestBed(t) - - tb.mockAttribKeysQueryResponse([]v3.AttributeKey{}) - suggestionsQueryParams := map[string]string{} - suggestionsResp := tb.GetQBFilterSuggestionsForLogs(suggestionsQueryParams) - - require.Greater(len(suggestionsResp.AttributeKeys), 0) - require.True(slices.ContainsFunc( - suggestionsResp.AttributeKeys, func(a v3.AttributeKey) bool { - return a.Key == "body" - }, - )) - - require.Greater(len(suggestionsResp.ExampleQueries), 0) - require.False(slices.ContainsFunc( - suggestionsResp.AttributeKeys, func(a v3.AttributeKey) bool { - return a.Type == v3.AttributeKeyTypeTag || a.Type == v3.AttributeKeyTypeResource - }, - )) -} - -func TestLogsFilterSuggestionsWithoutExistingFilter(t *testing.T) { - require := require.New(t) - tb := NewFilterSuggestionsTestBed(t) - - testAttrib := v3.AttributeKey{ - Key: "container_id", - Type: v3.AttributeKeyTypeResource, - DataType: v3.AttributeKeyDataTypeString, - IsColumn: false, - } - testAttribValue := "test-container" - - tb.mockAttribKeysQueryResponse([]v3.AttributeKey{testAttrib}) - tb.mockAttribValuesQueryResponse( - []v3.AttributeKey{testAttrib}, [][]string{{testAttribValue}}, - ) - suggestionsQueryParams := map[string]string{} - suggestionsResp := tb.GetQBFilterSuggestionsForLogs(suggestionsQueryParams) - - require.Greater(len(suggestionsResp.AttributeKeys), 0) - require.True(slices.ContainsFunc( - suggestionsResp.AttributeKeys, func(a v3.AttributeKey) bool { - return a.Key == testAttrib.Key && a.Type == testAttrib.Type - }, - )) - - require.Greater(len(suggestionsResp.ExampleQueries), 0) - - require.True(slices.ContainsFunc( - suggestionsResp.ExampleQueries, func(q v3.FilterSet) bool { - return slices.ContainsFunc(q.Items, func(i v3.FilterItem) bool { - return i.Key.Key == testAttrib.Key && i.Value == testAttribValue - }) - }, - )) -} - -// If a filter already exists, suggested example queries should -// contain existing filter -func TestLogsFilterSuggestionsWithExistingFilter(t *testing.T) { - require := require.New(t) - tb := NewFilterSuggestionsTestBed(t) - - testAttrib := v3.AttributeKey{ - Key: "container_id", - Type: v3.AttributeKeyTypeResource, - DataType: v3.AttributeKeyDataTypeString, - IsColumn: false, - } - testAttribValue := "test-container" - - testFilterAttrib := v3.AttributeKey{ - Key: "tenant_id", - Type: v3.AttributeKeyTypeTag, - DataType: v3.AttributeKeyDataTypeString, - IsColumn: false, - } - testFilterAttribValue := "test-tenant" - testFilter := v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: testFilterAttrib, - Operator: "=", - Value: testFilterAttribValue, - }, - }, - } - - tb.mockAttribKeysQueryResponse([]v3.AttributeKey{testAttrib, testFilterAttrib}) - tb.mockAttribValuesQueryResponse( - []v3.AttributeKey{testAttrib, testFilterAttrib}, - [][]string{{testAttribValue}, {testFilterAttribValue}}, - ) - - testFilterJson, err := json.Marshal(testFilter) - require.Nil(err, "couldn't serialize existing filter to JSON") - suggestionsQueryParams := map[string]string{ - "existingFilter": base64.RawURLEncoding.EncodeToString(testFilterJson), - } - suggestionsResp := tb.GetQBFilterSuggestionsForLogs(suggestionsQueryParams) - - require.Greater(len(suggestionsResp.AttributeKeys), 0) - - // All example queries should contain the existing filter as a prefix - require.Greater(len(suggestionsResp.ExampleQueries), 0) - for _, q := range suggestionsResp.ExampleQueries { - require.Equal(q.Items[0], testFilter.Items[0]) - } -} - -func TestResourceAttribsRankedHigherInLogsFilterSuggestions(t *testing.T) { - require := require.New(t) - - tagKeys := []v3.AttributeKey{} - for _, k := range []string{"user_id", "user_email"} { - tagKeys = append(tagKeys, v3.AttributeKey{ - Key: k, - Type: v3.AttributeKeyTypeTag, - DataType: v3.AttributeKeyDataTypeString, - IsColumn: false, - }) - } - - specialResourceAttrKeys := []v3.AttributeKey{} - for _, k := range []string{"service", "env"} { - specialResourceAttrKeys = append(specialResourceAttrKeys, v3.AttributeKey{ - Key: k, - Type: v3.AttributeKeyTypeResource, - DataType: v3.AttributeKeyDataTypeString, - IsColumn: false, - }) - } - - otherResourceAttrKeys := []v3.AttributeKey{} - for _, k := range []string{"container_name", "container_id"} { - otherResourceAttrKeys = append(otherResourceAttrKeys, v3.AttributeKey{ - Key: k, - Type: v3.AttributeKeyTypeResource, - DataType: v3.AttributeKeyDataTypeString, - IsColumn: false, - }) - } - - tb := NewFilterSuggestionsTestBed(t) - - mockAttrKeysInDB := append(tagKeys, otherResourceAttrKeys...) - mockAttrKeysInDB = append(mockAttrKeysInDB, specialResourceAttrKeys...) - - tb.mockAttribKeysQueryResponse(mockAttrKeysInDB) - - expectedTopSuggestions := append(specialResourceAttrKeys, otherResourceAttrKeys...) - expectedTopSuggestions = append(expectedTopSuggestions, tagKeys...) - - tb.mockAttribValuesQueryResponse( - expectedTopSuggestions[:2], [][]string{{"test"}, {"test"}}, - ) - - suggestionsQueryParams := map[string]string{"examplesLimit": "2"} - suggestionsResp := tb.GetQBFilterSuggestionsForLogs(suggestionsQueryParams) - - require.Equal( - expectedTopSuggestions, - suggestionsResp.AttributeKeys[:len(expectedTopSuggestions)], - ) -} - -// Mocks response for CH queries made by reader.GetLogAttributeKeys -func (tb *FilterSuggestionsTestBed) mockAttribKeysQueryResponse( - attribsToReturn []v3.AttributeKey, -) { - cols := []mockhouse.ColumnType{} - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "tag_key"}) - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "tag_type"}) - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "tag_data_type"}) - - values := [][]any{} - for _, a := range attribsToReturn { - rowValues := []any{} - rowValues = append(rowValues, a.Key) - rowValues = append(rowValues, string(a.Type)) - rowValues = append(rowValues, string(a.DataType)) - values = append(values, rowValues) - } - - tb.mockClickhouse.ExpectQuery( - "select.*from.*signoz_logs.distributed_tag_attributes_v2.*", - ).WithArgs( - constants.DefaultFilterSuggestionsAttributesLimit, - ).WillReturnRows( - mockhouse.NewRows(cols, values), - ) - - // Add expectation for the create table query used to determine - // if an attribute is a column - cols = []mockhouse.ColumnType{{Type: "String", Name: "statement"}} - values = [][]any{{"CREATE TABLE signoz_logs.distributed_logs"}} - tb.mockClickhouse.ExpectSelect( - "SHOW CREATE TABLE.*", - ).WillReturnRows(mockhouse.NewRows(cols, values)) - -} - -// Mocks response for CH queries made by reader.GetLogAttributeValues -func (tb *FilterSuggestionsTestBed) mockAttribValuesQueryResponse( - expectedAttribs []v3.AttributeKey, - stringValuesToReturn [][]string, -) { - resultCols := []mockhouse.ColumnType{ - {Type: "String", Name: "tag_key"}, - {Type: "String", Name: "string_value"}, - {Type: "Nullable(Int64)", Name: "number_value"}, - } - - expectedAttribKeysInQuery := []any{} - mockResultRows := [][]any{} - for idx, attrib := range expectedAttribs { - expectedAttribKeysInQuery = append(expectedAttribKeysInQuery, attrib.Key) - for _, stringTagValue := range stringValuesToReturn[idx] { - mockResultRows = append(mockResultRows, []any{ - attrib.Key, stringTagValue, nil, - }) - } - } - - tb.mockClickhouse.ExpectQuery( - "select.*tag_key.*string_value.*number_value.*distributed_tag_attributes_v2.*tag_key", - ).WithArgs(expectedAttribKeysInQuery...).WillReturnRows(mockhouse.NewRows(resultCols, mockResultRows)) -} - -type FilterSuggestionsTestBed struct { - t *testing.T - testUser *types.User - qsHttpHandler http.Handler - mockClickhouse mockhouse.ClickConnMockCommon - userModule user.Module -} - -func (tb *FilterSuggestionsTestBed) GetQBFilterSuggestionsForLogs( - queryParams map[string]string, -) *v3.QBFilterSuggestionsResponse { - - _, dsExistsInQP := queryParams["dataSource"] - require.False(tb.t, dsExistsInQP) - queryParams["dataSource"] = "logs" - - result := tb.QSGetRequest("/api/v3/filter_suggestions", queryParams) - - dataJson, err := json.Marshal(result.Data) - if err != nil { - tb.t.Fatalf("could not marshal apiResponse.Data: %v", err) - } - - var resp v3.QBFilterSuggestionsResponse - err = json.Unmarshal(dataJson, &resp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json into PipelinesResponse") - } - - return &resp -} - -func NewFilterSuggestionsTestBed(t *testing.T) *FilterSuggestionsTestBed { - testDB := utils.NewQueryServiceDBForTests(t) - - reader, mockClickhouse := NewMockClickhouseReader(t, testDB) - mockClickhouse.MatchExpectationsInOrder(false) - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.TODO(), providerSettings, sharder.Config{}) - require.NoError(t, err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(testDB), sharder) - notificationManager := nfmanagertest.NewMock() - require.NoError(t, err) - alertmanager, err := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, testDB, orgGetter, notificationManager) - require.NoError(t, err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - emailing := emailingtest.New() - analytics := analyticstest.New() - modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) - handlers := signoz.NewHandlers(modules, providerSettings) - - apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ - Reader: reader, - Signoz: &signoz.SigNoz{ - Modules: modules, - Handlers: handlers, - }, - }) - if err != nil { - t.Fatalf("could not create a new ApiHandler: %v", err) - } - - router := app.NewRouter() - //add the jwt middleware - router.Use(middleware.NewAuth(jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, sharder, instrumentationtest.New().Logger()).Wrap) - am := middleware.NewAuthZ(instrumentationtest.New().Logger()) - apiHandler.RegisterRoutes(router, am) - apiHandler.RegisterQueryRangeV3Routes(router, am) - - user, apiErr := createTestUser(modules.OrgSetter, modules.User) - if apiErr != nil { - t.Fatalf("could not create a test user: %v", apiErr) - } - - logger := zap.NewExample() - originalLogger := zap.L() - zap.ReplaceGlobals(logger) - t.Cleanup(func() { - zap.ReplaceGlobals(originalLogger) - }) - - return &FilterSuggestionsTestBed{ - t: t, - testUser: user, - qsHttpHandler: router, - mockClickhouse: mockClickhouse, - userModule: modules.User, - } -} - -func (tb *FilterSuggestionsTestBed) QSGetRequest( - path string, - queryParams map[string]string, -) *app.ApiResponse { - if len(queryParams) > 0 { - qps := []string{} - for q, v := range queryParams { - qps = append(qps, fmt.Sprintf("%s=%s", q, v)) - } - path = fmt.Sprintf("%s?%s", path, strings.Join(qps, "&")) - } - - req, err := AuthenticatedRequestForTest( - tb.userModule, tb.testUser, path, nil, - ) - if err != nil { - tb.t.Fatalf("couldn't create authenticated test request: %v", err) - } - - result, err := HandleTestRequest(tb.qsHttpHandler, req, 200) - if err != nil { - tb.t.Fatalf("test request failed: %v", err) - } - return result -} diff --git a/pkg/query-service/tests/integration/logparsingpipeline_test.go b/pkg/query-service/tests/integration/logparsingpipeline_test.go deleted file mode 100644 index 25046bcd5e..0000000000 --- a/pkg/query-service/tests/integration/logparsingpipeline_test.go +++ /dev/null @@ -1,866 +0,0 @@ -package tests - -import ( - "context" - "encoding/json" - "fmt" - "io" - "log/slog" - "net/http/httptest" - "runtime/debug" - "strings" - "testing" - "time" - - "github.com/SigNoz/signoz/pkg/alertmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerserver" - "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest" - "github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager" - "github.com/SigNoz/signoz/pkg/analytics/analyticstest" - "github.com/SigNoz/signoz/pkg/emailing/emailingtest" - "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" - "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" - "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/query-service/agentConf" - "github.com/SigNoz/signoz/pkg/query-service/app" - "github.com/SigNoz/signoz/pkg/query-service/app/integrations" - "github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline" - "github.com/SigNoz/signoz/pkg/query-service/app/opamp" - "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model" - "github.com/SigNoz/signoz/pkg/query-service/constants" - v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" - "github.com/SigNoz/signoz/pkg/query-service/queryBuilderToExpr" - "github.com/SigNoz/signoz/pkg/query-service/utils" - "github.com/SigNoz/signoz/pkg/sharder" - "github.com/SigNoz/signoz/pkg/sharder/noopsharder" - "github.com/SigNoz/signoz/pkg/signoz" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/SigNoz/signoz/pkg/types/opamptypes" - "github.com/SigNoz/signoz/pkg/types/pipelinetypes" - "github.com/SigNoz/signoz/pkg/valuer" - "github.com/gorilla/mux" - "github.com/knadh/koanf/parsers/yaml" - "github.com/open-telemetry/opamp-go/protobufs" - "github.com/pkg/errors" - "github.com/stretchr/testify/require" - "golang.org/x/exp/maps" - "golang.org/x/exp/slices" -) - -func TestLogPipelinesLifecycle(t *testing.T) { - agentID := valuer.GenerateUUID().String() - testbed := NewLogPipelinesTestBed(t, nil, agentID) - require := require.New(t) - - getPipelinesResp := testbed.GetPipelinesFromQS() - require.Equal( - 0, len(getPipelinesResp.Pipelines), - "There should be no pipelines at the start", - ) - require.Equal( - 0, len(getPipelinesResp.History), - "There should be no pipelines config history at the start", - ) - - // Should be able to create pipelines config - pipelineFilterSet := &v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: v3.AttributeKey{ - Key: "method", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - }, - Operator: "=", - Value: "GET", - }, - }, - } - - postablePipelines := pipelinetypes.PostablePipelines{ - Pipelines: []pipelinetypes.PostablePipeline{ - { - OrderID: 1, - Name: "pipeline1", - Alias: "pipeline1", - Enabled: true, - Filter: pipelineFilterSet, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "attributes.test", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, { - OrderID: 2, - Name: "pipeline2", - Alias: "pipeline2", - Enabled: true, - Filter: pipelineFilterSet, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "remove", - Type: "remove", - Field: "attributes.test", - Enabled: true, - Name: "test remove", - }, - }, - }, - }, - } - - createPipelinesResp := testbed.PostPipelinesToQS(postablePipelines) - assertPipelinesResponseMatchesPostedPipelines( - t, postablePipelines, createPipelinesResp, - ) - testbed.assertPipelinesSentToOpampClient(createPipelinesResp.Pipelines) - testbed.assertNewAgentGetsPipelinesOnConnection(createPipelinesResp.Pipelines) - - // Should be able to get the configured pipelines. - getPipelinesResp = testbed.GetPipelinesFromQS() - assertPipelinesResponseMatchesPostedPipelines( - t, postablePipelines, getPipelinesResp, - ) - - // Deployment status should be pending. - require.Equal( - 1, len(getPipelinesResp.History), - "pipelines config history should not be empty after 1st configuration", - ) - require.Equal( - opamptypes.DeployInitiated, getPipelinesResp.History[0].DeployStatus, - "pipelines deployment should be in progress after 1st configuration", - ) - - // Deployment status should get updated after acknowledgement from opamp client - testbed.simulateOpampClientAcknowledgementForLatestConfig(agentID) - - getPipelinesResp = testbed.GetPipelinesFromQS() - assertPipelinesResponseMatchesPostedPipelines( - t, postablePipelines, getPipelinesResp, - ) - require.Equal( - opamptypes.Deployed, - getPipelinesResp.History[0].DeployStatus, - "pipeline deployment should be complete after acknowledgment from opamp client", - ) - - // Should be able to update pipelines config. - postablePipelines.Pipelines[1].Enabled = false - updatePipelinesResp := testbed.PostPipelinesToQS(postablePipelines) - assertPipelinesResponseMatchesPostedPipelines( - t, postablePipelines, updatePipelinesResp, - ) - testbed.assertPipelinesSentToOpampClient(updatePipelinesResp.Pipelines) - testbed.assertNewAgentGetsPipelinesOnConnection(updatePipelinesResp.Pipelines) - - getPipelinesResp = testbed.GetPipelinesFromQS() - require.Equal( - 2, len(getPipelinesResp.History), - "there should be 2 history entries after posting pipelines config for the 2nd time", - ) - require.Equal( - opamptypes.DeployInitiated, getPipelinesResp.History[0].DeployStatus, - "deployment should be in progress for latest pipeline config", - ) - - // Deployment status should get updated again on receiving msg from client. - testbed.simulateOpampClientAcknowledgementForLatestConfig(agentID) - - getPipelinesResp = testbed.GetPipelinesFromQS() - assertPipelinesResponseMatchesPostedPipelines( - t, postablePipelines, getPipelinesResp, - ) - require.Equal( - opamptypes.Deployed, - getPipelinesResp.History[0].DeployStatus, - "deployment for latest pipeline config should be complete after acknowledgment from opamp client", - ) -} - -func TestLogPipelinesHistory(t *testing.T) { - require := require.New(t) - agentID := valuer.GenerateUUID().String() - testbed := NewLogPipelinesTestBed(t, nil, agentID) - - // Only the latest config version can be "IN_PROGRESS", - // other incomplete deployments should have status "UNKNOWN" - getPipelinesResp := testbed.GetPipelinesFromQS() - require.Equal(0, len(getPipelinesResp.History)) - - postablePipelines := pipelinetypes.PostablePipelines{ - Pipelines: []pipelinetypes.PostablePipeline{ - { - OrderID: 1, - Name: "pipeline1", - Alias: "pipeline1", - Enabled: true, - Filter: &v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: v3.AttributeKey{ - Key: "method", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - }, - Operator: "=", - Value: "GET", - }, - }, - }, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "attributes.test", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, - }, - } - - testbed.PostPipelinesToQS(postablePipelines) - getPipelinesResp = testbed.GetPipelinesFromQS() - require.Equal(1, len(getPipelinesResp.History)) - require.Equal(opamptypes.DeployInitiated, getPipelinesResp.History[0].DeployStatus) - - postablePipelines.Pipelines[0].Config = append( - postablePipelines.Pipelines[0].Config, - pipelinetypes.PipelineOperator{ - OrderId: 2, - ID: "remove", - Type: "remove", - Field: "attributes.test", - Enabled: true, - Name: "test remove", - }, - ) - postablePipelines.Pipelines[0].Config[0].Output = "remove" - - testbed.PostPipelinesToQS(postablePipelines) - getPipelinesResp = testbed.GetPipelinesFromQS() - - require.Equal(2, len(getPipelinesResp.History)) - require.Equal(opamptypes.DeployInitiated, getPipelinesResp.History[0].DeployStatus) - require.Equal(opamptypes.DeployStatusUnknown, getPipelinesResp.History[1].DeployStatus) -} - -func TestLogPipelinesValidation(t *testing.T) { - validPipelineFilterSet := &v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: v3.AttributeKey{ - Key: "method", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - }, - Operator: "=", - Value: "GET", - }, - }, - } - - testCases := []struct { - Name string - Pipeline pipelinetypes.PostablePipeline - ExpectedResponseStatusCode int - }{ - { - Name: "Valid Pipeline", - Pipeline: pipelinetypes.PostablePipeline{ - OrderID: 1, - Name: "pipeline 1", - Alias: "pipeline1", - Enabled: true, - Filter: validPipelineFilterSet, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "attributes.test", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, - ExpectedResponseStatusCode: 200, - }, - { - Name: "Invalid orderId", - Pipeline: pipelinetypes.PostablePipeline{ - OrderID: 0, - Name: "pipeline 1", - Alias: "pipeline1", - Enabled: true, - Filter: validPipelineFilterSet, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "attributes.test", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, - ExpectedResponseStatusCode: 400, - }, - { - Name: "Invalid filter", - Pipeline: pipelinetypes.PostablePipeline{ - OrderID: 1, - Name: "pipeline 1", - Alias: "pipeline1", - Enabled: true, - Filter: &v3.FilterSet{}, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "attributes.test", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, - ExpectedResponseStatusCode: 400, - }, - { - Name: "Invalid operator field", - Pipeline: pipelinetypes.PostablePipeline{ - OrderID: 1, - Name: "pipeline 1", - Alias: "pipeline1", - Enabled: true, - Filter: validPipelineFilterSet, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "bad.field", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, - ExpectedResponseStatusCode: 400, - }, { - Name: "Invalid from field path", - Pipeline: pipelinetypes.PostablePipeline{ - OrderID: 1, - Name: "pipeline 1", - Alias: "pipeline1", - Enabled: true, - Filter: validPipelineFilterSet, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "move", - Type: "move", - From: `attributes.temp_parsed_body."@l"`, - To: "attributes.test", - Enabled: true, - Name: "test move", - }, - }, - }, - ExpectedResponseStatusCode: 400, - }, - } - - for _, tc := range testCases { - t.Run(tc.Name, func(t *testing.T) { - agentID := valuer.GenerateUUID().String() - testbed := NewLogPipelinesTestBed(t, nil, agentID) - testbed.PostPipelinesToQSExpectingStatusCode( - pipelinetypes.PostablePipelines{ - Pipelines: []pipelinetypes.PostablePipeline{tc.Pipeline}, - }, - tc.ExpectedResponseStatusCode, - ) - }) - } -} - -func TestCanSavePipelinesWithoutConnectedAgents(t *testing.T) { - require := require.New(t) - testbed := NewTestbedWithoutOpamp(t, nil) - - getPipelinesResp := testbed.GetPipelinesFromQS() - require.Equal(0, len(getPipelinesResp.Pipelines)) - require.Equal(0, len(getPipelinesResp.History)) - - postablePipelines := pipelinetypes.PostablePipelines{ - Pipelines: []pipelinetypes.PostablePipeline{ - { - OrderID: 1, - Name: "pipeline1", - Alias: "pipeline1", - Enabled: true, - Filter: &v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: v3.AttributeKey{ - Key: "method", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - }, - Operator: "=", - Value: "GET", - }, - }, - }, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "attributes.test", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, - }, - } - - testbed.PostPipelinesToQS(postablePipelines) - getPipelinesResp = testbed.GetPipelinesFromQS() - require.Equal(1, len(getPipelinesResp.Pipelines)) - require.Equal(1, len(getPipelinesResp.History)) - -} - -// LogPipelinesTestBed coordinates and mocks components involved in -// configuring log pipelines and provides test helpers. -type LogPipelinesTestBed struct { - t *testing.T - testUser *types.User - apiHandler *app.APIHandler - agentConfMgr *agentConf.Manager - opampServer *opamp.Server - opampClientConn *opamp.MockOpAmpConnection - store sqlstore.SQLStore - userModule user.Module - JWT *authtypes.JWT -} - -// testDB can be injected for sharing a DB across multiple integration testbeds. -func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipelinesTestBed { - if sqlStore == nil { - sqlStore = utils.NewQueryServiceDBForTests(t) - } - - ic, err := integrations.NewController(sqlStore) - if err != nil { - t.Fatalf("could not create integrations controller: %v", err) - } - - controller, err := logparsingpipeline.NewLogParsingPipelinesController( - sqlStore, ic.GetPipelinesForInstalledIntegrations, - ) - if err != nil { - t.Fatalf("could not create a logparsingpipelines controller: %v", err) - } - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.Background(), providerSettings, sharder.Config{}) - require.NoError(t, err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlStore), sharder) - notificationManager := nfmanagertest.NewMock() - require.NoError(t, err) - alertmanager, err := signozalertmanager.New(context.Background(), providerSettings, alertmanager.Config{Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, sqlStore, orgGetter, notificationManager) - require.NoError(t, err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - emailing := emailingtest.New() - analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) - handlers := signoz.NewHandlers(modules, providerSettings) - - apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ - LogsParsingPipelineController: controller, - Signoz: &signoz.SigNoz{ - Modules: modules, - Handlers: handlers, - }, - }) - if err != nil { - t.Fatalf("could not create a new ApiHandler: %v", err) - } - - // organizationModule := implorganization.NewModule(implorganization.NewStore(store)) - user, apiErr := createTestUser(modules.OrgSetter, modules.User) - if apiErr != nil { - t.Fatalf("could not create a test user: %v", apiErr) - } - - // Mock an available opamp agent - require.Nil(t, err, "failed to init opamp model") - - agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{ - Store: sqlStore, - AgentFeatures: []agentConf.AgentFeature{ - apiHandler.LogsParsingPipelineController, - }}) - require.Nil(t, err, "failed to init agentConf") - - return &LogPipelinesTestBed{ - t: t, - testUser: user, - apiHandler: apiHandler, - agentConfMgr: agentConfMgr, - store: sqlStore, - userModule: modules.User, - JWT: jwt, - } -} - -func NewLogPipelinesTestBed(t *testing.T, testDB sqlstore.SQLStore, agentID string) *LogPipelinesTestBed { - testbed := NewTestbedWithoutOpamp(t, testDB) - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.Background(), providerSettings, sharder.Config{}) - require.Nil(t, err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(testbed.store), sharder) - - model.Init(testbed.store, slog.Default(), orgGetter) - - opampServer := opamp.InitializeServer(nil, testbed.agentConfMgr, instrumentationtest.New()) - err = opampServer.Start(opamp.GetAvailableLocalAddress()) - require.Nil(t, err, "failed to start opamp server") - - t.Cleanup(func() { - opampServer.Stop() - }) - - opampClientConnection := &opamp.MockOpAmpConnection{} - opampServer.OnMessage( - context.Background(), - opampClientConnection, - &protobufs.AgentToServer{ - InstanceUid: []byte(agentID), - EffectiveConfig: &protobufs.EffectiveConfig{ - ConfigMap: newInitialAgentConfigMap(), - }, - }, - ) - - testbed.opampServer = opampServer - testbed.opampClientConn = opampClientConnection - - return testbed - -} - -func (tb *LogPipelinesTestBed) PostPipelinesToQSExpectingStatusCode( - postablePipelines pipelinetypes.PostablePipelines, - expectedStatusCode int, -) *logparsingpipeline.PipelinesResponse { - req, err := AuthenticatedRequestForTest( - tb.userModule, tb.testUser, "/api/v1/logs/pipelines", postablePipelines, - ) - if err != nil { - tb.t.Fatalf("couldn't create authenticated test request: %v", err) - } - - respWriter := httptest.NewRecorder() - - ctx, err := tb.JWT.ContextFromRequest(req.Context(), req.Header.Get("Authorization")) - if err != nil { - tb.t.Fatalf("couldn't get jwt from request: %v", err) - } - - req = req.WithContext(ctx) - tb.apiHandler.CreateLogsPipeline(respWriter, req) - - response := respWriter.Result() - responseBody, err := io.ReadAll(response.Body) - if err != nil { - tb.t.Fatalf("couldn't read response body received from posting pipelines to QS: %v", err) - } - - if response.StatusCode != expectedStatusCode { - tb.t.Fatalf( - "Received response status %d after posting log pipelines. Expected: %d\nResponse body:%s\n", - response.StatusCode, expectedStatusCode, string(responseBody), - ) - } - - var result app.ApiResponse - err = json.Unmarshal(responseBody, &result) - if err != nil { - tb.t.Fatalf( - "Could not unmarshal QS response into an ApiResponse.\nResponse body: %s", - responseBody, - ) - } - - pipelinesResp, err := unmarshalPipelinesResponse(&result) - if err != nil { - tb.t.Fatalf("could not extract PipelinesResponse from apiResponse: %v", err) - } - return pipelinesResp -} - -func (tb *LogPipelinesTestBed) PostPipelinesToQS( - postablePipelines pipelinetypes.PostablePipelines, -) *logparsingpipeline.PipelinesResponse { - return tb.PostPipelinesToQSExpectingStatusCode( - postablePipelines, 200, - ) -} - -func (tb *LogPipelinesTestBed) GetPipelinesFromQS() *logparsingpipeline.PipelinesResponse { - req, err := AuthenticatedRequestForTest( - tb.userModule, tb.testUser, "/api/v1/logs/pipelines/latest", nil, - ) - if err != nil { - tb.t.Fatalf("couldn't create authenticated test request: %v", err) - } - req = mux.SetURLVars(req, map[string]string{ - "version": "latest", - }) - - respWriter := httptest.NewRecorder() - tb.apiHandler.ListLogsPipelinesHandler(respWriter, req) - response := respWriter.Result() - responseBody, err := io.ReadAll(response.Body) - if err != nil { - tb.t.Fatalf("couldn't read response body received from QS: %v", err) - } - - if response.StatusCode != 200 { - tb.t.Fatalf( - "could not list log parsing pipelines. status: %d, body: %v\n%s", - response.StatusCode, string(responseBody), string(debug.Stack()), - ) - } - - var result app.ApiResponse - err = json.Unmarshal(responseBody, &result) - if err != nil { - tb.t.Fatalf( - "Could not unmarshal QS response into an ApiResponse.\nResponse body: %s", - string(responseBody), - ) - } - pipelinesResp, err := unmarshalPipelinesResponse(&result) - if err != nil { - tb.t.Fatalf("could not extract PipelinesResponse from apiResponse: %v", err) - } - return pipelinesResp -} - -func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient( - pipelines []pipelinetypes.GettablePipeline, -) { - lastMsg := tb.opampClientConn.LatestMsgFromServer() - assertPipelinesRecommendedInRemoteConfig( - tb.t, lastMsg, pipelines, - ) -} - -func assertPipelinesRecommendedInRemoteConfig( - t *testing.T, - msg *protobufs.ServerToAgent, - gettablePipelines []pipelinetypes.GettablePipeline, -) { - collectorConfigFiles := msg.RemoteConfig.Config.ConfigMap - require.Equal( - t, len(collectorConfigFiles), 1, - "otel config sent to client is expected to contain atleast 1 file", - ) - - collectorConfigYaml := maps.Values(collectorConfigFiles)[0].Body - collectorConfSentToClient, err := yaml.Parser().Unmarshal(collectorConfigYaml) - if err != nil { - t.Fatalf("could not unmarshal config file sent to opamp client: %v", err) - } - - // Each pipeline is expected to become its own processor - // in the logs service in otel collector config. - collectorConfSvcs := collectorConfSentToClient["service"].(map[string]interface{}) - collectorConfLogsSvc := collectorConfSvcs["pipelines"].(map[string]interface{})["logs"].(map[string]interface{}) - collectorConfLogsSvcProcessorNames := collectorConfLogsSvc["processors"].([]interface{}) - collectorConfLogsPipelineProcNames := []string{} - for _, procNameVal := range collectorConfLogsSvcProcessorNames { - procName := procNameVal.(string) - if strings.HasPrefix(procName, constants.LogsPPLPfx) { - collectorConfLogsPipelineProcNames = append( - collectorConfLogsPipelineProcNames, - procName, - ) - } - } - - _, expectedLogProcessorNames, err := logparsingpipeline.PreparePipelineProcessor(gettablePipelines) - require.NoError(t, err) - require.Equal( - t, expectedLogProcessorNames, collectorConfLogsPipelineProcNames, - "config sent to opamp client doesn't contain expected log pipelines", - ) - - collectorConfProcessors := collectorConfSentToClient["processors"].(map[string]interface{}) - for _, procName := range expectedLogProcessorNames { - pipelineProcessorInConf, procExists := collectorConfProcessors[procName] - require.True(t, procExists, fmt.Sprintf( - "%s processor not found in config sent to opamp client", procName, - )) - - // Validate that filter expr in collector conf is as expected. - - // extract expr present in collector conf processor - pipelineProcOps := pipelineProcessorInConf.(map[string]interface{})["operators"].([]interface{}) - - routerOpIdx := slices.IndexFunc( - pipelineProcOps, - func(op interface{}) bool { return op.(map[string]interface{})["id"] == "router_signoz" }, - ) - require.GreaterOrEqual(t, routerOpIdx, 0) - routerOproutes := pipelineProcOps[routerOpIdx].(map[string]interface{})["routes"].([]interface{}) - pipelineFilterExpr := routerOproutes[0].(map[string]interface{})["expr"].(string) - - // find logparsingpipeline.Pipeline whose processor is being validated here - pipelineIdx := slices.IndexFunc( - gettablePipelines, func(p pipelinetypes.GettablePipeline) bool { - return logparsingpipeline.CollectorConfProcessorName(p) == procName - }, - ) - require.GreaterOrEqual(t, pipelineIdx, 0) - expectedExpr, err := queryBuilderToExpr.Parse(gettablePipelines[pipelineIdx].Filter) - require.Nil(t, err) - require.Equal(t, expectedExpr, pipelineFilterExpr) - } -} - -func (tb *LogPipelinesTestBed) simulateOpampClientAcknowledgementForLatestConfig(agentID string) { - lastMsg := tb.opampClientConn.LatestMsgFromServer() - tb.opampServer.OnMessage(context.Background(), tb.opampClientConn, &protobufs.AgentToServer{ - InstanceUid: []byte(agentID), - EffectiveConfig: &protobufs.EffectiveConfig{ - ConfigMap: lastMsg.RemoteConfig.Config, - }, - RemoteConfigStatus: &protobufs.RemoteConfigStatus{ - Status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED, - LastRemoteConfigHash: lastMsg.RemoteConfig.ConfigHash, - }, - }) -} - -func (tb *LogPipelinesTestBed) assertNewAgentGetsPipelinesOnConnection( - pipelines []pipelinetypes.GettablePipeline, -) { - newAgentConn := &opamp.MockOpAmpConnection{} - agentIDUUID := valuer.GenerateUUID() - agentID, err := agentIDUUID.MarshalBinary() - require.Nil(tb.t, err) - tb.opampServer.OnMessage( - context.Background(), - newAgentConn, - &protobufs.AgentToServer{ - InstanceUid: agentID, - EffectiveConfig: &protobufs.EffectiveConfig{ - ConfigMap: newInitialAgentConfigMap(), - }, - }, - ) - latestMsgFromServer := newAgentConn.LatestMsgFromServer() - require.NotNil(tb.t, latestMsgFromServer) - assertPipelinesRecommendedInRemoteConfig( - tb.t, latestMsgFromServer, pipelines, - ) -} - -func unmarshalPipelinesResponse(apiResponse *app.ApiResponse) ( - *logparsingpipeline.PipelinesResponse, - error, -) { - dataJson, err := json.Marshal(apiResponse.Data) - if err != nil { - return nil, errors.Wrap(err, "could not marshal apiResponse.Data") - } - var pipelinesResp logparsingpipeline.PipelinesResponse - err = json.Unmarshal(dataJson, &pipelinesResp) - if err != nil { - return nil, errors.Wrap(err, "could not unmarshal apiResponse.Data json into PipelinesResponse") - } - - return &pipelinesResp, nil -} - -func assertPipelinesResponseMatchesPostedPipelines( - t *testing.T, - postablePipelines pipelinetypes.PostablePipelines, - pipelinesResp *logparsingpipeline.PipelinesResponse, -) { - require.Equal( - t, len(postablePipelines.Pipelines), len(pipelinesResp.Pipelines), - "length mistmatch between posted pipelines and pipelines in response", - ) - for i, pipeline := range pipelinesResp.Pipelines { - postable := postablePipelines.Pipelines[i] - require.Equal(t, postable.Name, pipeline.Name, "pipeline.Name mismatch") - require.Equal(t, postable.OrderID, pipeline.OrderID, "pipeline.OrderId mismatch") - require.Equal(t, postable.Enabled, pipeline.Enabled, "pipeline.Enabled mismatch") - require.Equal(t, postable.Config, pipeline.Config, "pipeline.Config mismatch") - } -} - -func newInitialAgentConfigMap() *protobufs.AgentConfigMap { - return &protobufs.AgentConfigMap{ - ConfigMap: map[string]*protobufs.AgentConfigFile{ - "otel-collector.yaml": { - Body: []byte(` - receivers: - otlp: - protocols: - grpc: - endpoint: 0.0.0.0:4317 - http: - endpoint: 0.0.0.0:4318 - processors: - batch: - send_batch_size: 10000 - send_batch_max_size: 11000 - timeout: 10s - exporters: - otlp: - endpoint: otelcol2:4317 - service: - pipelines: - logs: - receivers: [otlp] - processors: [batch] - exporters: [otlp] - `), - ContentType: "text/yaml", - }, - }, - } -} diff --git a/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go b/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go deleted file mode 100644 index 7d78b5a0b8..0000000000 --- a/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go +++ /dev/null @@ -1,605 +0,0 @@ -package tests - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "strings" - "testing" - "time" - - "github.com/SigNoz/signoz/pkg/alertmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerserver" - "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest" - "github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager" - "github.com/SigNoz/signoz/pkg/analytics/analyticstest" - "github.com/SigNoz/signoz/pkg/emailing/emailingtest" - "github.com/SigNoz/signoz/pkg/sharder" - "github.com/SigNoz/signoz/pkg/sharder/noopsharder" - "github.com/SigNoz/signoz/pkg/types/authtypes" - - "github.com/SigNoz/signoz/pkg/http/middleware" - "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" - "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/signoz" - - "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" - "github.com/SigNoz/signoz/pkg/query-service/app" - "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations" - "github.com/SigNoz/signoz/pkg/query-service/utils" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types" - "github.com/google/uuid" - mockhouse "github.com/srikanthccv/ClickHouse-go-mock" - "github.com/stretchr/testify/require" -) - -func TestAWSIntegrationAccountLifecycle(t *testing.T) { - // Test for happy path of connecting and managing AWS integration accounts - - require := require.New(t) - testbed := NewCloudIntegrationsTestBed(t, nil) - - accountsListResp := testbed.GetConnectedAccountsListFromQS("aws") - require.Equal(len(accountsListResp.Accounts), 0, - "No accounts should be connected at the beginning", - ) - - // Should be able to generate a connection url from UI - initializing an integration account - testAccountConfig := types.AccountConfig{ - EnabledRegions: []string{"us-east-1", "us-east-2"}, - } - connectionUrlResp := testbed.GenerateConnectionUrlFromQS( - "aws", cloudintegrations.GenerateConnectionUrlRequest{ - AgentConfig: cloudintegrations.SigNozAgentConfig{ - Region: "us-east-1", - }, - AccountConfig: testAccountConfig, - }) - testAccountId := connectionUrlResp.AccountId - require.NotEmpty(testAccountId) - connectionUrl := connectionUrlResp.ConnectionUrl - require.NotEmpty(connectionUrl) - - // Should be able to poll for account connection status from the UI - accountStatusResp := testbed.GetAccountStatusFromQS("aws", testAccountId) - require.Equal(testAccountId, accountStatusResp.Id) - require.Nil(accountStatusResp.Status.Integration.LastHeartbeatTsMillis) - require.Nil(accountStatusResp.CloudAccountId) - - // The unconnected account should not show up in connected accounts list yet - accountsListResp1 := testbed.GetConnectedAccountsListFromQS("aws") - require.Equal(0, len(accountsListResp1.Accounts)) - - // An agent installed in user's AWS account should be able to check in for the new integration account - tsMillisBeforeAgentCheckIn := time.Now().UnixMilli() - testAWSAccountId := "4563215233" - agentCheckInResp := testbed.CheckInAsAgentWithQS( - "aws", cloudintegrations.AgentCheckInRequest{ - ID: testAccountId, - AccountID: testAWSAccountId, - }, - ) - require.Equal(testAccountId, agentCheckInResp.AccountId) - require.Equal(testAWSAccountId, agentCheckInResp.CloudAccountId) - require.Nil(agentCheckInResp.RemovedAt) - - // Polling for connection status from UI should now return latest status - accountStatusResp1 := testbed.GetAccountStatusFromQS("aws", testAccountId) - require.Equal(testAccountId, accountStatusResp1.Id) - require.NotNil(accountStatusResp1.CloudAccountId) - require.Equal(testAWSAccountId, *accountStatusResp1.CloudAccountId) - require.NotNil(accountStatusResp1.Status.Integration.LastHeartbeatTsMillis) - require.LessOrEqual( - tsMillisBeforeAgentCheckIn, - *accountStatusResp1.Status.Integration.LastHeartbeatTsMillis, - ) - - // The account should now show up in list of connected accounts. - accountsListResp2 := testbed.GetConnectedAccountsListFromQS("aws") - require.Equal(len(accountsListResp2.Accounts), 1) - require.Equal(testAccountId, accountsListResp2.Accounts[0].Id) - require.Equal(testAWSAccountId, accountsListResp2.Accounts[0].CloudAccountId) - - // Should be able to update account config from UI - testAccountConfig2 := types.AccountConfig{ - EnabledRegions: []string{"us-east-2", "us-west-1"}, - } - latestAccount := testbed.UpdateAccountConfigWithQS( - "aws", testAccountId, testAccountConfig2, - ) - require.Equal(testAccountId, latestAccount.ID.StringValue()) - require.Equal(testAccountConfig2, *latestAccount.Config) - - // The agent should now receive latest account config. - agentCheckInResp1 := testbed.CheckInAsAgentWithQS( - "aws", cloudintegrations.AgentCheckInRequest{ - ID: testAccountId, - AccountID: testAWSAccountId, - }, - ) - require.Equal(testAccountId, agentCheckInResp1.AccountId) - require.Equal(testAWSAccountId, agentCheckInResp1.CloudAccountId) - require.Nil(agentCheckInResp1.RemovedAt) - - // Should be able to disconnect/remove account from UI. - tsBeforeDisconnect := time.Now() - latestAccount = testbed.DisconnectAccountWithQS("aws", testAccountId) - require.Equal(testAccountId, latestAccount.ID.StringValue()) - require.LessOrEqual(tsBeforeDisconnect, *latestAccount.RemovedAt) - - // The agent should receive the disconnected status in account config post disconnection - agentCheckInResp2 := testbed.CheckInAsAgentWithQS( - "aws", cloudintegrations.AgentCheckInRequest{ - ID: testAccountId, - AccountID: testAWSAccountId, - }, - ) - require.Equal(testAccountId, agentCheckInResp2.AccountId) - require.Equal(testAWSAccountId, agentCheckInResp2.CloudAccountId) - require.LessOrEqual(tsBeforeDisconnect, *agentCheckInResp2.RemovedAt) -} - -func TestAWSIntegrationServices(t *testing.T) { - require := require.New(t) - - testbed := NewCloudIntegrationsTestBed(t, nil) - - // should be able to list available cloud services. - svcListResp := testbed.GetServicesFromQS("aws", nil) - require.Greater(len(svcListResp.Services), 0) - for _, svc := range svcListResp.Services { - require.NotEmpty(svc.Id) - require.Nil(svc.Config) - } - - // should be able to get details of a particular service. - svcId := svcListResp.Services[0].Id - svcDetailResp := testbed.GetServiceDetailFromQS("aws", svcId, nil) - require.Equal(svcId, svcDetailResp.Id) - require.NotEmpty(svcDetailResp.Overview) - require.Nil(svcDetailResp.Config) - require.Nil(svcDetailResp.ConnectionStatus) - - // should be able to configure a service in the ctx of a connected account - - // create a connected account - testAccountId := uuid.NewString() - testAWSAccountId := "389389489489" - testbed.CheckInAsAgentWithQS( - "aws", cloudintegrations.AgentCheckInRequest{ - ID: testAccountId, - AccountID: testAWSAccountId, - }, - ) - - testSvcConfig := types.CloudServiceConfig{ - Metrics: &types.CloudServiceMetricsConfig{ - Enabled: true, - }, - } - updateSvcConfigResp := testbed.UpdateServiceConfigWithQS("aws", svcId, cloudintegrations.UpdateServiceConfigRequest{ - CloudAccountId: testAWSAccountId, - Config: testSvcConfig, - }) - require.Equal(svcId, updateSvcConfigResp.Id) - require.Equal(testSvcConfig, updateSvcConfigResp.Config) - - // service list should include config when queried in the ctx of an account - svcListResp = testbed.GetServicesFromQS("aws", &testAWSAccountId) - require.Greater(len(svcListResp.Services), 0) - for _, svc := range svcListResp.Services { - if svc.Id == svcId { - require.NotNil(svc.Config) - require.Equal(testSvcConfig, *svc.Config) - } - } - - // service detail should include config and status info when - // queried in the ctx of an account - svcDetailResp = testbed.GetServiceDetailFromQS("aws", svcId, &testAWSAccountId) - require.Equal(svcId, svcDetailResp.Id) - require.NotNil(svcDetailResp.Config) - require.Equal(testSvcConfig, *svcDetailResp.Config) - -} - -func TestConfigReturnedWhenAgentChecksIn(t *testing.T) { - require := require.New(t) - - testbed := NewCloudIntegrationsTestBed(t, nil) - - // configure a connected account - testAccountConfig := types.AccountConfig{ - EnabledRegions: []string{"us-east-1", "us-east-2"}, - } - connectionUrlResp := testbed.GenerateConnectionUrlFromQS( - "aws", cloudintegrations.GenerateConnectionUrlRequest{ - AgentConfig: cloudintegrations.SigNozAgentConfig{ - Region: "us-east-1", - SigNozAPIKey: "test-api-key", - }, - AccountConfig: testAccountConfig, - }, - ) - testAccountId := connectionUrlResp.AccountId - require.NotEmpty(testAccountId) - require.NotEmpty(connectionUrlResp.ConnectionUrl) - - testAWSAccountId := "389389489489" - checkinResp := testbed.CheckInAsAgentWithQS( - "aws", cloudintegrations.AgentCheckInRequest{ - ID: testAccountId, - AccountID: testAWSAccountId, - }, - ) - - require.Equal(testAccountId, checkinResp.AccountId) - require.Equal(testAWSAccountId, checkinResp.CloudAccountId) - require.Nil(checkinResp.RemovedAt) - require.Equal(testAccountConfig.EnabledRegions, checkinResp.IntegrationConfig.EnabledRegions) - - telemetryCollectionStrategy := checkinResp.IntegrationConfig.TelemetryCollectionStrategy - require.Equal("aws", telemetryCollectionStrategy.Provider) - require.NotNil(telemetryCollectionStrategy.AWSMetrics) - require.Empty(telemetryCollectionStrategy.AWSMetrics.StreamFilters) - require.NotNil(telemetryCollectionStrategy.AWSLogs) - require.Empty(telemetryCollectionStrategy.AWSLogs.Subscriptions) - - // helper - setServiceConfig := func(svcId string, metricsEnabled bool, logsEnabled bool) { - testSvcConfig := types.CloudServiceConfig{} - if metricsEnabled { - testSvcConfig.Metrics = &types.CloudServiceMetricsConfig{ - Enabled: metricsEnabled, - } - } - if logsEnabled { - testSvcConfig.Logs = &types.CloudServiceLogsConfig{ - Enabled: logsEnabled, - } - } - - updateSvcConfigResp := testbed.UpdateServiceConfigWithQS("aws", svcId, cloudintegrations.UpdateServiceConfigRequest{ - CloudAccountId: testAWSAccountId, - Config: testSvcConfig, - }) - require.Equal(svcId, updateSvcConfigResp.Id) - require.Equal(testSvcConfig, updateSvcConfigResp.Config) - } - - setServiceConfig("ec2", true, false) - setServiceConfig("rds", true, true) - - checkinResp = testbed.CheckInAsAgentWithQS( - "aws", cloudintegrations.AgentCheckInRequest{ - ID: testAccountId, - AccountID: testAWSAccountId, - }, - ) - - require.Equal(testAccountId, checkinResp.AccountId) - require.Equal(testAWSAccountId, checkinResp.CloudAccountId) - require.Nil(checkinResp.RemovedAt) - - integrationConf := checkinResp.IntegrationConfig - require.Equal(testAccountConfig.EnabledRegions, integrationConf.EnabledRegions) - - telemetryCollectionStrategy = integrationConf.TelemetryCollectionStrategy - require.Equal("aws", telemetryCollectionStrategy.Provider) - require.NotNil(telemetryCollectionStrategy.AWSMetrics) - metricStreamNamespaces := []string{} - for _, f := range telemetryCollectionStrategy.AWSMetrics.StreamFilters { - metricStreamNamespaces = append(metricStreamNamespaces, f.Namespace) - } - require.Equal([]string{"AWS/EC2", "CWAgent", "AWS/RDS"}, metricStreamNamespaces) - - require.NotNil(telemetryCollectionStrategy.AWSLogs) - logGroupPrefixes := []string{} - for _, f := range telemetryCollectionStrategy.AWSLogs.Subscriptions { - logGroupPrefixes = append(logGroupPrefixes, f.LogGroupNamePrefix) - } - require.Equal(1, len(logGroupPrefixes)) - require.True(strings.HasPrefix(logGroupPrefixes[0], "/aws/rds")) - - // change regions and update service configs and validate config changes for agent - testAccountConfig2 := types.AccountConfig{ - EnabledRegions: []string{"us-east-2", "us-west-1"}, - } - latestAccount := testbed.UpdateAccountConfigWithQS( - "aws", testAccountId, testAccountConfig2, - ) - require.Equal(testAccountId, latestAccount.ID.StringValue()) - require.Equal(testAccountConfig2, *latestAccount.Config) - - // disable metrics for one and logs for the other. - // config should be as expected. - setServiceConfig("ec2", false, false) - setServiceConfig("rds", true, false) - - checkinResp = testbed.CheckInAsAgentWithQS( - "aws", cloudintegrations.AgentCheckInRequest{ - ID: testAccountId, - AccountID: testAWSAccountId, - }, - ) - require.Equal(testAccountId, checkinResp.AccountId) - require.Equal(testAWSAccountId, checkinResp.CloudAccountId) - require.Nil(checkinResp.RemovedAt) - integrationConf = checkinResp.IntegrationConfig - require.Equal(testAccountConfig2.EnabledRegions, integrationConf.EnabledRegions) - - telemetryCollectionStrategy = integrationConf.TelemetryCollectionStrategy - require.Equal("aws", telemetryCollectionStrategy.Provider) - require.NotNil(telemetryCollectionStrategy.AWSMetrics) - metricStreamNamespaces = []string{} - for _, f := range telemetryCollectionStrategy.AWSMetrics.StreamFilters { - metricStreamNamespaces = append(metricStreamNamespaces, f.Namespace) - } - require.Equal([]string{"AWS/RDS"}, metricStreamNamespaces) - - require.NotNil(telemetryCollectionStrategy.AWSLogs) - logGroupPrefixes = []string{} - for _, f := range telemetryCollectionStrategy.AWSLogs.Subscriptions { - logGroupPrefixes = append(logGroupPrefixes, f.LogGroupNamePrefix) - } - require.Equal(0, len(logGroupPrefixes)) - -} - -type CloudIntegrationsTestBed struct { - t *testing.T - testUser *types.User - qsHttpHandler http.Handler - mockClickhouse mockhouse.ClickConnMockCommon - userModule user.Module -} - -// testDB can be injected for sharing a DB across multiple integration testbeds. -func NewCloudIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *CloudIntegrationsTestBed { - if testDB == nil { - testDB = utils.NewQueryServiceDBForTests(t) - } - - controller, err := cloudintegrations.NewController(testDB) - if err != nil { - t.Fatalf("could not create cloud integrations controller: %v", err) - } - - reader, mockClickhouse := NewMockClickhouseReader(t, testDB) - mockClickhouse.MatchExpectationsInOrder(false) - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.TODO(), providerSettings, sharder.Config{}) - require.NoError(t, err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(testDB), sharder) - nfmanager := nfmanagertest.NewMock() - require.NoError(t, err) - alertmanager, err := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, testDB, orgGetter, nfmanager) - require.NoError(t, err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - emailing := emailingtest.New() - analytics := analyticstest.New() - modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) - handlers := signoz.NewHandlers(modules, providerSettings) - - apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ - Reader: reader, - CloudIntegrationsController: controller, - Signoz: &signoz.SigNoz{ - Modules: modules, - Handlers: handlers, - }, - }) - if err != nil { - t.Fatalf("could not create a new ApiHandler: %v", err) - } - - router := app.NewRouter() - router.Use(middleware.NewAuth(jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, sharder, instrumentationtest.New().Logger()).Wrap) - am := middleware.NewAuthZ(instrumentationtest.New().Logger()) - apiHandler.RegisterRoutes(router, am) - apiHandler.RegisterCloudIntegrationsRoutes(router, am) - - user, apiErr := createTestUser(modules.OrgSetter, modules.User) - if apiErr != nil { - t.Fatalf("could not create a test user: %v", apiErr) - } - - return &CloudIntegrationsTestBed{ - t: t, - testUser: user, - qsHttpHandler: router, - mockClickhouse: mockClickhouse, - userModule: modules.User, - } -} - -func (tb *CloudIntegrationsTestBed) GetConnectedAccountsListFromQS( - cloudProvider string, -) *cloudintegrations.ConnectedAccountsListResponse { - respDataJson := tb.RequestQS(fmt.Sprintf("/api/v1/cloud-integrations/%s/accounts", cloudProvider), nil) - - var resp cloudintegrations.ConnectedAccountsListResponse - err := json.Unmarshal(respDataJson, &resp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json into AccountsListResponse") - } - - return &resp -} - -func (tb *CloudIntegrationsTestBed) GenerateConnectionUrlFromQS( - cloudProvider string, req cloudintegrations.GenerateConnectionUrlRequest, -) *cloudintegrations.GenerateConnectionUrlResponse { - respDataJson := tb.RequestQS( - fmt.Sprintf("/api/v1/cloud-integrations/%s/accounts/generate-connection-url", cloudProvider), - req, - ) - - var resp cloudintegrations.GenerateConnectionUrlResponse - err := json.Unmarshal(respDataJson, &resp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json into map[string]any") - } - - return &resp -} - -func (tb *CloudIntegrationsTestBed) GetAccountStatusFromQS( - cloudProvider string, accountId string, -) *cloudintegrations.AccountStatusResponse { - respDataJson := tb.RequestQS(fmt.Sprintf( - "/api/v1/cloud-integrations/%s/accounts/%s/status", - cloudProvider, accountId, - ), nil) - - var resp cloudintegrations.AccountStatusResponse - err := json.Unmarshal(respDataJson, &resp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json into AccountStatusResponse") - } - - return &resp -} - -func (tb *CloudIntegrationsTestBed) CheckInAsAgentWithQS( - cloudProvider string, req cloudintegrations.AgentCheckInRequest, -) *cloudintegrations.AgentCheckInResponse { - respDataJson := tb.RequestQS( - fmt.Sprintf("/api/v1/cloud-integrations/%s/agent-check-in", cloudProvider), req, - ) - - var resp cloudintegrations.AgentCheckInResponse - err := json.Unmarshal(respDataJson, &resp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json into AgentCheckInResponse") - } - - return &resp -} - -func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS( - cloudProvider string, accountId string, newConfig types.AccountConfig, -) *types.CloudIntegration { - respDataJson := tb.RequestQS( - fmt.Sprintf( - "/api/v1/cloud-integrations/%s/accounts/%s/config", - cloudProvider, accountId, - ), cloudintegrations.UpdateAccountConfigRequest{ - Config: newConfig, - }, - ) - - var resp types.CloudIntegration - err := json.Unmarshal(respDataJson, &resp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account") - } - - return &resp -} - -func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS( - cloudProvider string, accountId string, -) *types.CloudIntegration { - respDataJson := tb.RequestQS( - fmt.Sprintf( - "/api/v1/cloud-integrations/%s/accounts/%s/disconnect", - cloudProvider, accountId, - ), map[string]any{}, - ) - - var resp types.CloudIntegration - err := json.Unmarshal(respDataJson, &resp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account") - } - - return &resp -} - -func (tb *CloudIntegrationsTestBed) GetServicesFromQS( - cloudProvider string, cloudAccountId *string, -) *cloudintegrations.ListServicesResponse { - path := fmt.Sprintf("/api/v1/cloud-integrations/%s/services", cloudProvider) - if cloudAccountId != nil { - path = fmt.Sprintf("%s?cloud_account_id=%s", path, *cloudAccountId) - } - - return RequestQSAndParseResp[cloudintegrations.ListServicesResponse]( - tb, path, nil, - ) -} - -func (tb *CloudIntegrationsTestBed) GetServiceDetailFromQS( - cloudProvider string, serviceId string, cloudAccountId *string, -) *cloudintegrations.ServiceDetails { - path := fmt.Sprintf("/api/v1/cloud-integrations/%s/services/%s", cloudProvider, serviceId) - if cloudAccountId != nil { - path = fmt.Sprintf("%s?cloud_account_id=%s", path, *cloudAccountId) - } - - // add mock expectations for connection status queries - metricCols := []mockhouse.ColumnType{} - metricCols = append(metricCols, mockhouse.ColumnType{Type: "String", Name: "metric_name"}) - metricCols = append(metricCols, mockhouse.ColumnType{Type: "String", Name: "labels"}) - metricCols = append(metricCols, mockhouse.ColumnType{Type: "Int64", Name: "unix_milli"}) - tb.mockClickhouse.ExpectQuery( - `SELECT.*from.*signoz_metrics.*`, - ).WillReturnRows(mockhouse.NewRows(metricCols, [][]any{})) - - return RequestQSAndParseResp[cloudintegrations.ServiceDetails]( - tb, path, nil, - ) -} -func (tb *CloudIntegrationsTestBed) UpdateServiceConfigWithQS( - cloudProvider string, serviceId string, req any, -) *cloudintegrations.UpdateServiceConfigResponse { - path := fmt.Sprintf("/api/v1/cloud-integrations/%s/services/%s/config", cloudProvider, serviceId) - - return RequestQSAndParseResp[cloudintegrations.UpdateServiceConfigResponse]( - tb, path, req, - ) -} - -func (tb *CloudIntegrationsTestBed) RequestQS( - path string, - postData interface{}, -) (responseDataJson []byte) { - req, err := AuthenticatedRequestForTest( - tb.userModule, tb.testUser, path, postData, - ) - if err != nil { - tb.t.Fatalf("couldn't create authenticated test request: %v", err) - } - - result, err := HandleTestRequest(tb.qsHttpHandler, req, 200) - if err != nil { - tb.t.Fatalf("test request failed: %v", err) - } - - dataJson, err := json.Marshal(result.Data) - if err != nil { - tb.t.Fatalf("could not marshal apiResponse.Data: %v", err) - } - return dataJson -} - -func RequestQSAndParseResp[ResponseType any]( - tb *CloudIntegrationsTestBed, - path string, - postData interface{}, -) *ResponseType { - respDataJson := tb.RequestQS(path, postData) - - var resp ResponseType - - err := json.Unmarshal(respDataJson, &resp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json into %T: %v", resp, err) - } - - return &resp -} diff --git a/pkg/query-service/tests/integration/signoz_integrations_test.go b/pkg/query-service/tests/integration/signoz_integrations_test.go deleted file mode 100644 index f42e42e198..0000000000 --- a/pkg/query-service/tests/integration/signoz_integrations_test.go +++ /dev/null @@ -1,663 +0,0 @@ -package tests - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "slices" - "testing" - "time" - - "github.com/SigNoz/signoz/pkg/alertmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerserver" - "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest" - "github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager" - "github.com/SigNoz/signoz/pkg/analytics/analyticstest" - "github.com/SigNoz/signoz/pkg/emailing/emailingtest" - "github.com/SigNoz/signoz/pkg/http/middleware" - "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" - "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" - "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/query-service/app" - "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations" - "github.com/SigNoz/signoz/pkg/query-service/app/integrations" - "github.com/SigNoz/signoz/pkg/query-service/model" - v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" - "github.com/SigNoz/signoz/pkg/query-service/utils" - "github.com/SigNoz/signoz/pkg/sharder" - "github.com/SigNoz/signoz/pkg/sharder/noopsharder" - "github.com/SigNoz/signoz/pkg/signoz" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/SigNoz/signoz/pkg/types/dashboardtypes" - "github.com/SigNoz/signoz/pkg/types/pipelinetypes" - "github.com/SigNoz/signoz/pkg/valuer" - mockhouse "github.com/srikanthccv/ClickHouse-go-mock" - "github.com/stretchr/testify/require" -) - -// Higher level tests for UI facing APIs - -func TestSignozIntegrationLifeCycle(t *testing.T) { - require := require.New(t) - testbed := NewIntegrationsTestBed(t, nil) - - merr := utils.CreateTestOrg(t, testbed.store) - require.NoError(merr) - - installedResp := testbed.GetInstalledIntegrationsFromQS() - require.Equal( - len(installedResp.Integrations), 0, - "no integrations should be installed at the beginning", - ) - - availableResp := testbed.GetAvailableIntegrationsFromQS() - availableIntegrations := availableResp.Integrations - require.Greater( - len(availableIntegrations), 0, - "some integrations should come bundled with SigNoz", - ) - - // Should be able to install integration - require.False(availableIntegrations[0].IsInstalled) - testbed.RequestQSToInstallIntegration( - availableIntegrations[0].Id, map[string]interface{}{}, - ) - - ii := testbed.GetIntegrationDetailsFromQS(availableIntegrations[0].Id) - require.Equal(ii.Id, availableIntegrations[0].Id) - require.NotNil(ii.Installation) - - installedResp = testbed.GetInstalledIntegrationsFromQS() - installedIntegrations := installedResp.Integrations - require.Equal(len(installedIntegrations), 1) - require.Equal(installedIntegrations[0].Id, availableIntegrations[0].Id) - - availableResp = testbed.GetAvailableIntegrationsFromQS() - availableIntegrations = availableResp.Integrations - require.Greater(len(availableIntegrations), 0) - - // Integration connection status should get updated after signal data has been received. - testbed.mockLogQueryResponse([]model.SignozLog{}) - testbed.mockMetricStatusQueryResponse(nil) - connectionStatus := testbed.GetIntegrationConnectionStatus(ii.Id) - require.NotNil(connectionStatus) - require.Nil(connectionStatus.Logs) - require.Nil(connectionStatus.Metrics) - - testLog := makeTestSignozLog("test log body", map[string]interface{}{ - "source": "nginx", - }) - testbed.mockLogQueryResponse([]model.SignozLog{testLog}) - - testMetricName := ii.ConnectionTests.Metrics[0] - testMetricLastReceivedTs := time.Now().UnixMilli() - testbed.mockMetricStatusQueryResponse(&model.MetricStatus{ - MetricName: testMetricName, - LastReceivedTsMillis: testMetricLastReceivedTs, - }) - - connectionStatus = testbed.GetIntegrationConnectionStatus(ii.Id) - require.NotNil(connectionStatus) - require.NotNil(connectionStatus.Logs) - require.Equal(connectionStatus.Logs.LastReceivedTsMillis, int64(testLog.Timestamp/1000000)) - require.NotNil(connectionStatus.Metrics) - require.Equal(connectionStatus.Metrics.LastReceivedTsMillis, testMetricLastReceivedTs) - - // Should be able to uninstall integration - require.True(availableIntegrations[0].IsInstalled) - testbed.RequestQSToUninstallIntegration( - availableIntegrations[0].Id, - ) - - ii = testbed.GetIntegrationDetailsFromQS(availableIntegrations[0].Id) - require.Equal(ii.Id, availableIntegrations[0].Id) - require.Nil(ii.Installation) - - installedResp = testbed.GetInstalledIntegrationsFromQS() - installedIntegrations = installedResp.Integrations - require.Equal(len(installedIntegrations), 0) - - availableResp = testbed.GetAvailableIntegrationsFromQS() - availableIntegrations = availableResp.Integrations - require.Greater(len(availableIntegrations), 0) - require.False(availableIntegrations[0].IsInstalled) -} - -func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) { - require := require.New(t) - - testDB := utils.NewQueryServiceDBForTests(t) - utils.CreateTestOrg(t, testDB) - - agentID := valuer.GenerateUUID().String() - - integrationsTB := NewIntegrationsTestBed(t, testDB) - pipelinesTB := NewLogPipelinesTestBed(t, testDB, agentID) - - availableIntegrationsResp := integrationsTB.GetAvailableIntegrationsFromQS() - availableIntegrations := availableIntegrationsResp.Integrations - require.Greater( - len(availableIntegrations), 0, - "some integrations should come bundled with SigNoz", - ) - - getPipelinesResp := pipelinesTB.GetPipelinesFromQS() - require.Equal( - 0, len(getPipelinesResp.Pipelines), - "There should be no pipelines at the start", - ) - - // Find an available integration that contains a log pipeline - var testAvailableIntegration *integrations.IntegrationsListItem - for _, ai := range availableIntegrations { - details := integrationsTB.GetIntegrationDetailsFromQS(ai.Id) - require.NotNil(details) - if len(details.Assets.Logs.Pipelines) > 0 { - testAvailableIntegration = &ai - break - } - } - - if testAvailableIntegration == nil { - // None of the built in integrations include a pipeline right now. - return - } - - // Installing an integration should add its pipelines to pipelines list - require.NotNil(testAvailableIntegration) - require.False(testAvailableIntegration.IsInstalled) - integrationsTB.RequestQSToInstallIntegration( - testAvailableIntegration.Id, map[string]interface{}{}, - ) - - testIntegration := integrationsTB.GetIntegrationDetailsFromQS(testAvailableIntegration.Id) - require.NotNil(testIntegration.Installation) - testIntegrationPipelines := testIntegration.Assets.Logs.Pipelines - require.Greater( - len(testIntegrationPipelines), 0, - "test integration expected to have a pipeline", - ) - - getPipelinesResp = pipelinesTB.GetPipelinesFromQS() - require.Equal( - len(testIntegrationPipelines), len(getPipelinesResp.Pipelines), - "Pipelines for installed integrations should appear in pipelines list", - ) - lastPipeline := getPipelinesResp.Pipelines[len(getPipelinesResp.Pipelines)-1] - require.NotNil(integrations.IntegrationIdForPipeline(lastPipeline)) - require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(lastPipeline)) - - pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines) - pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines) - - // After saving a user created pipeline, pipelines response should include - // both user created pipelines and pipelines for installed integrations. - postablePipelines := pipelinetypes.PostablePipelines{ - Pipelines: []pipelinetypes.PostablePipeline{ - { - OrderID: 1, - Name: "pipeline1", - Alias: "pipeline1", - Enabled: true, - Filter: &v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: v3.AttributeKey{ - Key: "method", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - }, - Operator: "=", - Value: "GET", - }, - }, - }, - Config: []pipelinetypes.PipelineOperator{ - { - OrderId: 1, - ID: "add", - Type: "add", - Field: "attributes.test", - Value: "val", - Enabled: true, - Name: "test add", - }, - }, - }, - }, - } - - pipelinesTB.PostPipelinesToQS(postablePipelines) - - getPipelinesResp = pipelinesTB.GetPipelinesFromQS() - require.Equal(1+len(testIntegrationPipelines), len(getPipelinesResp.Pipelines)) - pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines) - pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines) - - // Reordering integration pipelines should be possible. - postable := postableFromPipelines(getPipelinesResp.Pipelines) - slices.Reverse(postable.Pipelines) - for i := range postable.Pipelines { - postable.Pipelines[i].OrderID = i + 1 - } - - pipelinesTB.PostPipelinesToQS(postable) - - getPipelinesResp = pipelinesTB.GetPipelinesFromQS() - firstPipeline := getPipelinesResp.Pipelines[0] - require.NotNil(integrations.IntegrationIdForPipeline(firstPipeline)) - require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(firstPipeline)) - - pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines) - pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines) - - // enabling/disabling integration pipelines should be possible. - require.True(firstPipeline.Enabled) - - postable.Pipelines[0].Enabled = false - pipelinesTB.PostPipelinesToQS(postable) - - getPipelinesResp = pipelinesTB.GetPipelinesFromQS() - require.Equal(1+len(testIntegrationPipelines), len(getPipelinesResp.Pipelines)) - - firstPipeline = getPipelinesResp.Pipelines[0] - require.NotNil(integrations.IntegrationIdForPipeline(firstPipeline)) - require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(firstPipeline)) - - require.False(firstPipeline.Enabled) - - pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines) - pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines) - - // should not be able to edit integrations pipeline. - require.Greater(len(postable.Pipelines[0].Config), 0) - postable.Pipelines[0].Config = []pipelinetypes.PipelineOperator{} - pipelinesTB.PostPipelinesToQS(postable) - - getPipelinesResp = pipelinesTB.GetPipelinesFromQS() - require.Equal(1+len(testIntegrationPipelines), len(getPipelinesResp.Pipelines)) - - firstPipeline = getPipelinesResp.Pipelines[0] - require.NotNil(integrations.IntegrationIdForPipeline(firstPipeline)) - require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(firstPipeline)) - - require.False(firstPipeline.Enabled) - require.Greater(len(firstPipeline.Config), 0) - - // should not be able to delete integrations pipeline - postable.Pipelines = []pipelinetypes.PostablePipeline{postable.Pipelines[1]} - pipelinesTB.PostPipelinesToQS(postable) - - getPipelinesResp = pipelinesTB.GetPipelinesFromQS() - require.Equal(1+len(testIntegrationPipelines), len(getPipelinesResp.Pipelines)) - - lastPipeline = getPipelinesResp.Pipelines[1] - require.NotNil(integrations.IntegrationIdForPipeline(lastPipeline)) - require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(lastPipeline)) - - // Uninstalling an integration should remove its pipelines - // from pipelines list in the UI - integrationsTB.RequestQSToUninstallIntegration( - testIntegration.Id, - ) - getPipelinesResp = pipelinesTB.GetPipelinesFromQS() - require.Equal( - 1, len(getPipelinesResp.Pipelines), - "Pipelines for uninstalled integrations should get removed from pipelines list", - ) - pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines) - pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines) -} - -func TestDashboardsForInstalledIntegrationDashboards(t *testing.T) { - require := require.New(t) - - testDB := utils.NewQueryServiceDBForTests(t) - integrationsTB := NewIntegrationsTestBed(t, testDB) - - availableIntegrationsResp := integrationsTB.GetAvailableIntegrationsFromQS() - availableIntegrations := availableIntegrationsResp.Integrations - require.Greater( - len(availableIntegrations), 0, - "some integrations should come bundled with SigNoz", - ) - - dashboards := integrationsTB.GetDashboardsFromQS() - require.Equal( - 0, len(dashboards), - "There should be no dashboards at the start", - ) - - // Find an available integration that contains dashboards - var testAvailableIntegration *integrations.IntegrationsListItem - for _, ai := range availableIntegrations { - details := integrationsTB.GetIntegrationDetailsFromQS(ai.Id) - require.NotNil(details) - if len(details.Assets.Dashboards) > 0 { - testAvailableIntegration = &ai - break - } - } - require.NotNil(testAvailableIntegration) - - // Installing an integration should make its dashboards appear in the dashboard list - require.False(testAvailableIntegration.IsInstalled) - tsBeforeInstallation := time.Now().Unix() - integrationsTB.RequestQSToInstallIntegration( - testAvailableIntegration.Id, map[string]interface{}{}, - ) - - testIntegration := integrationsTB.GetIntegrationDetailsFromQS(testAvailableIntegration.Id) - require.NotNil(testIntegration.Installation) - testIntegrationDashboards := testIntegration.Assets.Dashboards - require.Greater( - len(testIntegrationDashboards), 0, - "test integration is expected to have dashboards", - ) - - dashboards = integrationsTB.GetDashboardsFromQS() - require.Equal( - len(testIntegrationDashboards), len(dashboards), - "dashboards for installed integrations should appear in dashboards list", - ) - require.GreaterOrEqual(dashboards[0].CreatedAt.Unix(), tsBeforeInstallation) - require.GreaterOrEqual(dashboards[0].UpdatedAt.Unix(), tsBeforeInstallation) - - // Should be able to get installed integrations dashboard by id - dd := integrationsTB.GetDashboardByIdFromQS(dashboards[0].ID) - require.GreaterOrEqual(dd.CreatedAt.Unix(), tsBeforeInstallation) - require.GreaterOrEqual(dd.UpdatedAt.Unix(), tsBeforeInstallation) - require.Equal(*dd, dashboards[0]) - - // Integration dashboards should not longer appear in dashboard list after uninstallation - integrationsTB.RequestQSToUninstallIntegration( - testIntegration.Id, - ) - dashboards = integrationsTB.GetDashboardsFromQS() - require.Equal( - 0, len(dashboards), - "dashboards for uninstalled integrations should not appear in dashboards list", - ) -} - -type IntegrationsTestBed struct { - t *testing.T - testUser *types.User - qsHttpHandler http.Handler - mockClickhouse mockhouse.ClickConnMockCommon - store sqlstore.SQLStore - userModule user.Module -} - -func (tb *IntegrationsTestBed) GetAvailableIntegrationsFromQS() *integrations.IntegrationsListResponse { - result := tb.RequestQS("/api/v1/integrations", nil) - - dataJson, err := json.Marshal(result.Data) - if err != nil { - tb.t.Fatalf("could not marshal apiResponse.Data: %v", err) - } - var integrationsResp integrations.IntegrationsListResponse - err = json.Unmarshal(dataJson, &integrationsResp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json into PipelinesResponse") - } - - return &integrationsResp -} - -func (tb *IntegrationsTestBed) GetInstalledIntegrationsFromQS() *integrations.IntegrationsListResponse { - result := tb.RequestQS("/api/v1/integrations?is_installed=true", nil) - - dataJson, err := json.Marshal(result.Data) - if err != nil { - tb.t.Fatalf("could not marshal apiResponse.Data: %v", err) - } - var integrationsResp integrations.IntegrationsListResponse - err = json.Unmarshal(dataJson, &integrationsResp) - if err != nil { - tb.t.Fatalf(" could not unmarshal apiResponse.Data json into PipelinesResponse") - } - - return &integrationsResp -} - -func (tb *IntegrationsTestBed) GetIntegrationDetailsFromQS( - integrationId string, -) *integrations.Integration { - result := tb.RequestQS(fmt.Sprintf( - "/api/v1/integrations/%s", integrationId, - ), nil) - - dataJson, err := json.Marshal(result.Data) - if err != nil { - tb.t.Fatalf("could not marshal apiResponse.Data: %v", err) - } - var integrationResp integrations.Integration - err = json.Unmarshal(dataJson, &integrationResp) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json") - } - - return &integrationResp -} - -func (tb *IntegrationsTestBed) GetIntegrationConnectionStatus( - integrationId string, -) *integrations.IntegrationConnectionStatus { - result := tb.RequestQS(fmt.Sprintf( - "/api/v1/integrations/%s/connection_status", integrationId, - ), nil) - - dataJson, err := json.Marshal(result.Data) - if err != nil { - tb.t.Fatalf("could not marshal apiResponse.Data: %v", err) - } - var connectionStatus integrations.IntegrationConnectionStatus - err = json.Unmarshal(dataJson, &connectionStatus) - if err != nil { - tb.t.Fatalf("could not unmarshal apiResponse.Data json") - } - - return &connectionStatus -} - -func (tb *IntegrationsTestBed) RequestQSToInstallIntegration( - integrationId string, config map[string]interface{}, -) { - request := integrations.InstallIntegrationRequest{ - IntegrationId: integrationId, - Config: config, - } - tb.RequestQS("/api/v1/integrations/install", request) -} - -func (tb *IntegrationsTestBed) RequestQSToUninstallIntegration( - integrationId string, -) { - request := integrations.UninstallIntegrationRequest{ - IntegrationId: integrationId, - } - tb.RequestQS("/api/v1/integrations/uninstall", request) -} - -func (tb *IntegrationsTestBed) GetDashboardsFromQS() []dashboardtypes.Dashboard { - result := tb.RequestQS("/api/v1/dashboards", nil) - - dataJson, err := json.Marshal(result.Data) - if err != nil { - tb.t.Fatalf("could not marshal apiResponse.Data: %v", err) - } - - dashboards := []dashboardtypes.Dashboard{} - err = json.Unmarshal(dataJson, &dashboards) - if err != nil { - tb.t.Fatalf(" could not unmarshal apiResponse.Data json into dashboards") - } - - return dashboards -} - -func (tb *IntegrationsTestBed) GetDashboardByIdFromQS(dashboardUuid string) *dashboardtypes.Dashboard { - result := tb.RequestQS(fmt.Sprintf("/api/v1/dashboards/%s", dashboardUuid), nil) - - dataJson, err := json.Marshal(result.Data) - if err != nil { - tb.t.Fatalf("could not marshal apiResponse.Data: %v", err) - } - - dashboard := dashboardtypes.Dashboard{} - err = json.Unmarshal(dataJson, &dashboard) - if err != nil { - tb.t.Fatalf(" could not unmarshal apiResponse.Data json into dashboards") - } - - return &dashboard -} - -func (tb *IntegrationsTestBed) RequestQS( - path string, - postData interface{}, -) *app.ApiResponse { - req, err := AuthenticatedRequestForTest( - tb.userModule, tb.testUser, path, postData, - ) - if err != nil { - tb.t.Fatalf("couldn't create authenticated test request: %v", err) - } - - result, err := HandleTestRequest(tb.qsHttpHandler, req, 200) - if err != nil { - tb.t.Fatalf("test request failed: %v", err) - } - return result -} - -func (tb *IntegrationsTestBed) mockLogQueryResponse(logsInResponse []model.SignozLog) { - addLogsQueryExpectation(tb.mockClickhouse, logsInResponse) -} - -func (tb *IntegrationsTestBed) mockMetricStatusQueryResponse(expectation *model.MetricStatus) { - cols := []mockhouse.ColumnType{} - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "metric_name"}) - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "labels"}) - cols = append(cols, mockhouse.ColumnType{Type: "Int64", Name: "unix_milli"}) - - values := [][]any{} - if expectation != nil { - rowValues := []any{} - - rowValues = append(rowValues, expectation.MetricName) - - labelsJson, err := json.Marshal(expectation.LastReceivedLabels) - require.Nil(tb.t, err) - rowValues = append(rowValues, labelsJson) - - rowValues = append(rowValues, expectation.LastReceivedTsMillis) - - values = append(values, rowValues) - } - - tb.mockClickhouse.ExpectQuery( - `SELECT.*from.*signoz_metrics.*`, - ).WillReturnRows(mockhouse.NewRows(cols, values)) -} - -// testDB can be injected for sharing a DB across multiple integration testbeds. -func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *IntegrationsTestBed { - if testDB == nil { - testDB = utils.NewQueryServiceDBForTests(t) - } - - controller, err := integrations.NewController(testDB) - if err != nil { - t.Fatalf("could not create integrations controller: %v", err) - } - - reader, mockClickhouse := NewMockClickhouseReader(t, testDB) - mockClickhouse.MatchExpectationsInOrder(false) - - cloudIntegrationsController, err := cloudintegrations.NewController(testDB) - if err != nil { - t.Fatalf("could not create cloud integrations controller: %v", err) - } - - providerSettings := instrumentationtest.New().ToProviderSettings() - sharder, err := noopsharder.New(context.TODO(), providerSettings, sharder.Config{}) - require.NoError(t, err) - orgGetter := implorganization.NewGetter(implorganization.NewStore(testDB), sharder) - nfManager := nfmanagertest.NewMock() - if err != nil { - t.Fatal(err) - } - alertmanager, err := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{Signoz: alertmanager.Signoz{PollInterval: 10 * time.Second, Config: alertmanagerserver.NewConfig()}}, testDB, orgGetter, nfManager) - require.NoError(t, err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - emailing := emailingtest.New() - analytics := analyticstest.New() - modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) - handlers := signoz.NewHandlers(modules, providerSettings) - - apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ - Reader: reader, - IntegrationsController: controller, - CloudIntegrationsController: cloudIntegrationsController, - Signoz: &signoz.SigNoz{ - Modules: modules, - Handlers: handlers, - }, - }) - if err != nil { - t.Fatalf("could not create a new ApiHandler: %v", err) - } - - router := app.NewRouter() - router.Use(middleware.NewAuth(jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, sharder, instrumentationtest.New().Logger()).Wrap) - am := middleware.NewAuthZ(instrumentationtest.New().Logger()) - apiHandler.RegisterRoutes(router, am) - apiHandler.RegisterIntegrationRoutes(router, am) - - user, apiErr := createTestUser(modules.OrgSetter, modules.User) - if apiErr != nil { - t.Fatalf("could not create a test user: %v", apiErr) - } - - return &IntegrationsTestBed{ - t: t, - testUser: user, - qsHttpHandler: router, - mockClickhouse: mockClickhouse, - store: testDB, - userModule: modules.User, - } -} - -func postableFromPipelines(gettablePipelines []pipelinetypes.GettablePipeline) pipelinetypes.PostablePipelines { - result := pipelinetypes.PostablePipelines{} - - for _, p := range gettablePipelines { - postable := pipelinetypes.PostablePipeline{ - ID: p.ID.StringValue(), - OrderID: p.OrderID, - Name: p.Name, - Alias: p.Alias, - Enabled: p.Enabled, - Config: p.Config, - } - - if p.Description != "" { - postable.Description = p.Description - } - - if p.Filter != nil { - postable.Filter = p.Filter - } - - result.Pipelines = append(result.Pipelines, postable) - } - - return result -} diff --git a/pkg/query-service/tests/integration/test_utils.go b/pkg/query-service/tests/integration/test_utils.go deleted file mode 100644 index 90fb293005..0000000000 --- a/pkg/query-service/tests/integration/test_utils.go +++ /dev/null @@ -1,235 +0,0 @@ -package tests - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "net/http/httptest" - "os" - "runtime/debug" - "testing" - "time" - - "github.com/DATA-DOG/go-sqlmock" - "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" - "github.com/SigNoz/signoz/pkg/modules/organization" - "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/prometheus" - "github.com/SigNoz/signoz/pkg/prometheus/prometheustest" - "github.com/SigNoz/signoz/pkg/query-service/app" - "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader" - "github.com/SigNoz/signoz/pkg/query-service/model" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/telemetrystore" - "github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/SigNoz/signoz/pkg/valuer" - "github.com/google/uuid" - "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry" - mockhouse "github.com/srikanthccv/ClickHouse-go-mock" - "github.com/stretchr/testify/require" - "golang.org/x/exp/maps" -) - -var jwt = authtypes.NewJWT(os.Getenv("SIGNOZ_JWT_SECRET"), 1*time.Hour, 2*time.Hour) - -func NewMockClickhouseReader(t *testing.T, testDB sqlstore.SQLStore) (*clickhouseReader.ClickHouseReader, mockhouse.ClickConnMockCommon) { - require.NotNil(t, testDB) - - telemetryStore := telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherRegexp) - reader := clickhouseReader.NewReaderFromClickhouseConnection( - clickhouseReader.NewOptions("", ""), - testDB, - telemetryStore, - prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), - "", - time.Duration(time.Second), - nil, - ) - - return reader, telemetryStore.Mock() -} - -func addLogsQueryExpectation( - mockClickhouse mockhouse.ClickConnMockCommon, - logsToReturn []model.SignozLog, -) { - cols := []mockhouse.ColumnType{} - cols = append(cols, mockhouse.ColumnType{Type: "UInt64", Name: "timestamp"}) - cols = append(cols, mockhouse.ColumnType{Type: "UInt64", Name: "observed_timestamp"}) - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "id"}) - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "trace_id"}) - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "span_id"}) - cols = append(cols, mockhouse.ColumnType{Type: "UInt32", Name: "trace_flags"}) - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "severity_text"}) - cols = append(cols, mockhouse.ColumnType{Type: "UInt8", Name: "severity_number"}) - cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "body"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "resources_string_key"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "resources_string_value"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_string_key"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_string_value"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_int64_key"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(Int64)", Name: "attributes_int64_value"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_float64_key"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(Float64)", Name: "attributes_float64_value"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_bool_key"}) - cols = append(cols, mockhouse.ColumnType{Type: "Array(Bool)", Name: "attributes_bool_value"}) - - values := [][]any{} - for _, l := range logsToReturn { - rowValues := []any{} - rowValues = append(rowValues, l.Timestamp) - rowValues = append(rowValues, l.Timestamp) - rowValues = append(rowValues, l.ID) - rowValues = append(rowValues, l.TraceID) - rowValues = append(rowValues, l.SpanID) - rowValues = append(rowValues, l.TraceFlags) - rowValues = append(rowValues, l.SeverityText) - rowValues = append(rowValues, l.SeverityNumber) - rowValues = append(rowValues, l.Body) - rowValues = append(rowValues, maps.Keys(l.Resources_string)) - rowValues = append(rowValues, maps.Values(l.Resources_string)) - rowValues = append(rowValues, maps.Keys(l.Attributes_string)) - rowValues = append(rowValues, maps.Values(l.Attributes_string)) - rowValues = append(rowValues, maps.Keys(l.Attributes_int64)) - rowValues = append(rowValues, maps.Values(l.Attributes_int64)) - rowValues = append(rowValues, maps.Keys(l.Attributes_float64)) - rowValues = append(rowValues, maps.Values(l.Attributes_float64)) - rowValues = append(rowValues, maps.Keys(l.Attributes_bool)) - rowValues = append(rowValues, maps.Values(l.Attributes_bool)) - values = append(values, rowValues) - } - - rows := mockhouse.NewRows(cols, values) - mockClickhouse.ExpectQuery( - "SELECT .*? from signoz_logs.distributed_logs.*", - ).WillReturnRows(rows) -} - -func makeTestSignozLog( - body string, - attributes map[string]interface{}, -) model.SignozLog { - - testLog := model.SignozLog{ - Timestamp: uint64(time.Now().UnixNano()), - Body: body, - Attributes_bool: map[string]bool{}, - Attributes_string: map[string]string{}, - Attributes_int64: map[string]int64{}, - Attributes_float64: map[string]float64{}, - Resources_string: map[string]string{}, - SeverityText: entry.Info.String(), - SeverityNumber: uint8(entry.Info), - SpanID: uuid.New().String(), - TraceID: uuid.New().String(), - } - - for k, v := range attributes { - switch v := v.(type) { - case bool: - testLog.Attributes_bool[k] = v - case string: - testLog.Attributes_string[k] = v - case int: - testLog.Attributes_int64[k] = int64(v) - case float64: - testLog.Attributes_float64[k] = v - default: - panic(fmt.Sprintf("found attribute value of unsupported type %T in test log", v)) - } - } - - return testLog -} - -func createTestUser(orgSetter organization.Setter, userModule user.Module) (*types.User, *model.ApiError) { - // Create a test user for auth - ctx := context.Background() - organization := types.NewOrganization("test") - err := orgSetter.Create(ctx, organization) - if err != nil { - return nil, model.InternalError(err) - } - - userId := valuer.GenerateUUID() - - user, err := types.NewUser("test", userId.String()+"test@test.com", types.RoleAdmin.String(), organization.ID.StringValue()) - if err != nil { - return nil, model.InternalError(err) - } - - err = userModule.CreateUser(ctx, user) - if err != nil { - return nil, model.InternalError(err) - } - - return user, nil -} - -func AuthenticatedRequestForTest( - userModule user.Module, - user *types.User, - path string, - postData interface{}, -) (*http.Request, error) { - userJwt, err := userModule.GetJWTForUser(context.Background(), user) - if err != nil { - return nil, err - } - - var req *http.Request - - if postData != nil { - var body bytes.Buffer - err = json.NewEncoder(&body).Encode(postData) - if err != nil { - return nil, err - } - req = httptest.NewRequest(http.MethodPost, path, &body) - } else { - req = httptest.NewRequest(http.MethodGet, path, nil) - } - - req.Header.Add("Authorization", "Bearer "+userJwt.AccessJwt) - - ctx, err := jwt.ContextFromRequest(req.Context(), req.Header.Get("Authorization")) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - - return req, nil -} - -func HandleTestRequest(handler http.Handler, req *http.Request, expectedStatus int) (*app.ApiResponse, error) { - respWriter := httptest.NewRecorder() - handler.ServeHTTP(respWriter, req) - response := respWriter.Result() - responseBody, err := io.ReadAll(response.Body) - if err != nil { - return nil, fmt.Errorf("couldn't read response body received from QS: %w", err) - } - - if response.StatusCode != expectedStatus { - return nil, fmt.Errorf( - "unexpected response status from query service for path %s. status: %d, body: %v\n%v", - req.URL.Path, response.StatusCode, string(responseBody), string(debug.Stack()), - ) - } - - var result app.ApiResponse - err = json.Unmarshal(responseBody, &result) - if err != nil { - return nil, fmt.Errorf( - "Could not unmarshal QS response into an ApiResponse.\nResponse body: %s", - string(responseBody), - ) - } - - return &result, nil -} diff --git a/pkg/query-service/utils/pass.go b/pkg/query-service/utils/pass.go deleted file mode 100644 index 381d3fe1e7..0000000000 --- a/pkg/query-service/utils/pass.go +++ /dev/null @@ -1,10 +0,0 @@ -package utils - -import ( - "github.com/sethvargo/go-password/password" -) - -func GeneratePassowrd() string { - res, _ := password.Generate(64, 10, 10, false, false) - return res -} diff --git a/pkg/query-service/utils/testutils.go b/pkg/query-service/utils/testutils.go deleted file mode 100644 index 54bb34e813..0000000000 --- a/pkg/query-service/utils/testutils.go +++ /dev/null @@ -1,119 +0,0 @@ -package utils - -import ( - "context" - "os" - "testing" - - "github.com/SigNoz/signoz/pkg/factory" - "github.com/SigNoz/signoz/pkg/factory/factorytest" - "github.com/SigNoz/signoz/pkg/sqlmigration" - "github.com/SigNoz/signoz/pkg/sqlmigrator" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/sqlstore/sqlitesqlstore" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/valuer" - _ "github.com/mattn/go-sqlite3" -) - -func NewTestSqliteDB(t *testing.T) (sqlStore sqlstore.SQLStore, testDBFilePath string) { - testDBFile, err := os.CreateTemp("", "test-signoz-db-*") - if err != nil { - t.Fatalf("could not create temp file for test db: %v", err) - } - testDBFilePath = testDBFile.Name() - t.Cleanup(func() { os.Remove(testDBFilePath) }) - testDBFile.Close() - - sqlStore, err = sqlitesqlstore.New(context.Background(), factorytest.NewSettings(), sqlstore.Config{Provider: "sqlite", Sqlite: sqlstore.SqliteConfig{Path: testDBFilePath}}) - if err != nil { - t.Fatalf("could not create test db sqlite store: %v", err) - } - - sqlmigrations, err := sqlmigration.New( - context.Background(), - factorytest.NewSettings(), - sqlmigration.Config{}, - factory.MustNewNamedMap( - sqlmigration.NewAddDataMigrationsFactory(), - sqlmigration.NewAddOrganizationFactory(), - sqlmigration.NewAddPreferencesFactory(), - sqlmigration.NewAddDashboardsFactory(), - sqlmigration.NewAddSavedViewsFactory(), - sqlmigration.NewAddAgentsFactory(), - sqlmigration.NewAddPipelinesFactory(), - sqlmigration.NewAddIntegrationsFactory(), - sqlmigration.NewAddLicensesFactory(), - sqlmigration.NewAddPatsFactory(), - sqlmigration.NewModifyDatetimeFactory(), - sqlmigration.NewModifyOrgDomainFactory(), - sqlmigration.NewUpdateOrganizationFactory(sqlStore), - sqlmigration.NewAddAlertmanagerFactory(sqlStore), - sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlStore), - sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlStore), - sqlmigration.NewUpdatePipelines(sqlStore), - sqlmigration.NewDropLicensesSitesFactory(sqlStore), - sqlmigration.NewUpdateInvitesFactory(sqlStore), - sqlmigration.NewUpdatePatFactory(sqlStore), - sqlmigration.NewUpdateAlertmanagerFactory(sqlStore), - sqlmigration.NewUpdatePreferencesFactory(sqlStore), - sqlmigration.NewUpdateApdexTtlFactory(sqlStore), - sqlmigration.NewUpdateResetPasswordFactory(sqlStore), - sqlmigration.NewUpdateRulesFactory(sqlStore), - sqlmigration.NewAddVirtualFieldsFactory(), - sqlmigration.NewUpdateIntegrationsFactory(sqlStore), - sqlmigration.NewUpdateOrganizationsFactory(sqlStore), - sqlmigration.NewDropGroupsFactory(sqlStore), - sqlmigration.NewCreateQuickFiltersFactory(sqlStore), - sqlmigration.NewUpdateQuickFiltersFactory(sqlStore), - sqlmigration.NewAuthRefactorFactory(sqlStore), - sqlmigration.NewMigratePATToFactorAPIKey(sqlStore), - sqlmigration.NewUpdateApiMonitoringFiltersFactory(sqlStore), - sqlmigration.NewAddKeyOrganizationFactory(sqlStore), - sqlmigration.NewUpdateDashboardFactory(sqlStore), - sqlmigration.NewUpdateAgentsFactory(sqlStore), - ), - ) - if err != nil { - t.Fatalf("could not create test db sql migrations: %v", err) - } - - err = sqlmigrator.New(context.Background(), factorytest.NewSettings(), sqlStore, sqlmigrations, sqlmigrator.Config{}).Migrate(context.Background()) - if err != nil { - t.Fatalf("could not migrate test db sql migrations: %v", err) - } - - return sqlStore, testDBFilePath -} - -func NewQueryServiceDBForTests(t *testing.T) sqlstore.SQLStore { - sqlStore, _ := NewTestSqliteDB(t) - return sqlStore -} - -func CreateTestOrg(t *testing.T, store sqlstore.SQLStore) error { - org := &types.Organization{ - Identifiable: types.Identifiable{ - ID: valuer.GenerateUUID(), - }, - Name: "testOrg", - } - _, err := store.BunDB().NewInsert().Model(org).Exec(context.Background()) - if err != nil { - return err - } - return nil -} - -func GetTestOrgId(store sqlstore.SQLStore) (valuer.UUID, error) { - var orgID valuer.UUID - err := store.BunDB().NewSelect(). - Model(&types.Organization{}). - Column("id"). - Limit(1). - Scan(context.Background(), &orgID) - if err != nil { - return orgID, err - } - return orgID, nil -} diff --git a/pkg/signoz/authn.go b/pkg/signoz/authn.go new file mode 100644 index 0000000000..c7f1ad7329 --- /dev/null +++ b/pkg/signoz/authn.go @@ -0,0 +1,26 @@ +package signoz + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/authn" + "github.com/SigNoz/signoz/pkg/authn/callbackauthn/googlecallbackauthn" + "github.com/SigNoz/signoz/pkg/authn/passwordauthn/emailpasswordauthn" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/types/authtypes" +) + +func NewAuthNs(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) { + emailPasswordAuthN := emailpasswordauthn.New(store) + + googleCallbackAuthN, err := googlecallbackauthn.New(ctx, store) + if err != nil { + return nil, err + } + + return map[authtypes.AuthNProvider]authn.AuthN{ + authtypes.AuthNProviderEmailPassword: emailPasswordAuthN, + authtypes.AuthNProviderGoogleAuth: googleCallbackAuthN, + }, nil +} diff --git a/pkg/signoz/config.go b/pkg/signoz/config.go index 3e43772547..d3d43be1b3 100644 --- a/pkg/signoz/config.go +++ b/pkg/signoz/config.go @@ -29,6 +29,7 @@ import ( "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/statsreporter" "github.com/SigNoz/signoz/pkg/telemetrystore" + "github.com/SigNoz/signoz/pkg/tokenizer" "github.com/SigNoz/signoz/pkg/version" "github.com/SigNoz/signoz/pkg/web" "github.com/spf13/cobra" @@ -92,6 +93,9 @@ type Config struct { // Gateway config Gateway gateway.Config `mapstructure:"gateway"` + + // Tokenizer config + Tokenizer tokenizer.Config `mapstructure:"tokenizer"` } // DeprecatedFlags are the flags that are deprecated and scheduled for removal. @@ -150,6 +154,7 @@ func NewConfig(ctx context.Context, logger *slog.Logger, resolverConfig config.R sharder.NewConfigFactory(), statsreporter.NewConfigFactory(), gateway.NewConfigFactory(), + tokenizer.NewConfigFactory(), } conf, err := config.New(ctx, resolverConfig, configFactories) @@ -323,4 +328,9 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge config.Gateway.URL = u } } + + if os.Getenv("SIGNOZ_JWT_SECRET") != "" { + logger.WarnContext(ctx, "[Deprecated] env SIGNOZ_JWT_SECRET is deprecated and scheduled for removal. Please use SIGNOZ_TOKENIZER_JWT_SECRET instead.") + config.Tokenizer.JWT.Secret = os.Getenv("SIGNOZ_JWT_SECRET") + } } diff --git a/pkg/signoz/handler.go b/pkg/signoz/handler.go index 546977f31b..432cf31d42 100644 --- a/pkg/signoz/handler.go +++ b/pkg/signoz/handler.go @@ -4,6 +4,8 @@ import ( "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/modules/apdex" "github.com/SigNoz/signoz/pkg/modules/apdex/implapdex" + "github.com/SigNoz/signoz/pkg/modules/authdomain" + "github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain" "github.com/SigNoz/signoz/pkg/modules/dashboard" "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard" "github.com/SigNoz/signoz/pkg/modules/organization" @@ -16,6 +18,8 @@ import ( "github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport" "github.com/SigNoz/signoz/pkg/modules/savedview" "github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview" + "github.com/SigNoz/signoz/pkg/modules/session" + "github.com/SigNoz/signoz/pkg/modules/session/implsession" "github.com/SigNoz/signoz/pkg/modules/tracefunnel" "github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel" "github.com/SigNoz/signoz/pkg/modules/user" @@ -32,18 +36,22 @@ type Handlers struct { QuickFilter quickfilter.Handler TraceFunnel tracefunnel.Handler RawDataExport rawdataexport.Handler + AuthDomain authdomain.Handler + Session session.Handler } func NewHandlers(modules Modules, providerSettings factory.ProviderSettings) Handlers { return Handlers{ Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter), Preference: implpreference.NewHandler(modules.Preference), - User: impluser.NewHandler(modules.User), + User: impluser.NewHandler(modules.User, modules.UserGetter), SavedView: implsavedview.NewHandler(modules.SavedView), Apdex: implapdex.NewHandler(modules.Apdex), Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings), QuickFilter: implquickfilter.NewHandler(modules.QuickFilter), TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel), RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport), + AuthDomain: implauthdomain.NewHandler(modules.AuthDomain), + Session: implsession.NewHandler(modules.Session), } } diff --git a/pkg/signoz/handler_test.go b/pkg/signoz/handler_test.go index 93a6ca960e..f3d98f3af6 100644 --- a/pkg/signoz/handler_test.go +++ b/pkg/signoz/handler_test.go @@ -4,7 +4,6 @@ import ( "context" "reflect" "testing" - "time" "github.com/DATA-DOG/go-sqlmock" "github.com/SigNoz/signoz/pkg/alertmanager" @@ -17,7 +16,7 @@ import ( "github.com/SigNoz/signoz/pkg/sharder/noopsharder" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest" - "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/tokenizer/tokenizertest" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -34,9 +33,9 @@ func TestNewHandlers(t *testing.T) { require.NoError(t, err) alertmanager, err := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{}, sqlstore, orgGetter, notificationManager) require.NoError(t, err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) + tokenizer := tokenizertest.New() emailing := emailingtest.New() - modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, nil, nil) + modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil) handlers := NewHandlers(modules, providerSettings) diff --git a/pkg/signoz/module.go b/pkg/signoz/module.go index e5daf80ac1..323582c733 100644 --- a/pkg/signoz/module.go +++ b/pkg/signoz/module.go @@ -3,10 +3,13 @@ package signoz import ( "github.com/SigNoz/signoz/pkg/alertmanager" "github.com/SigNoz/signoz/pkg/analytics" + "github.com/SigNoz/signoz/pkg/authn" "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/modules/apdex" "github.com/SigNoz/signoz/pkg/modules/apdex/implapdex" + "github.com/SigNoz/signoz/pkg/modules/authdomain" + "github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain" "github.com/SigNoz/signoz/pkg/modules/dashboard" "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard" "github.com/SigNoz/signoz/pkg/modules/organization" @@ -19,12 +22,15 @@ import ( "github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport" "github.com/SigNoz/signoz/pkg/modules/savedview" "github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview" + "github.com/SigNoz/signoz/pkg/modules/session" + "github.com/SigNoz/signoz/pkg/modules/session/implsession" "github.com/SigNoz/signoz/pkg/modules/tracefunnel" "github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel" "github.com/SigNoz/signoz/pkg/modules/user" "github.com/SigNoz/signoz/pkg/modules/user/impluser" "github.com/SigNoz/signoz/pkg/querier" "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/tokenizer" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/preferencetypes" ) @@ -34,27 +40,32 @@ type Modules struct { OrgSetter organization.Setter Preference preference.Module User user.Module + UserGetter user.Getter SavedView savedview.Module Apdex apdex.Module Dashboard dashboard.Module QuickFilter quickfilter.Module TraceFunnel tracefunnel.Module RawDataExport rawdataexport.Module + AuthDomain authdomain.Module + Session session.Module } func NewModules( sqlstore sqlstore.SQLStore, - jwt *authtypes.JWT, + tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgGetter organization.Getter, alertmanager alertmanager.Alertmanager, analytics analytics.Analytics, querier querier.Querier, + authNs map[authtypes.AuthNProvider]authn.AuthN, ) Modules { quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore)) orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter) - user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), jwt, emailing, providerSettings, orgSetter, analytics) + user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, analytics) + userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings)) return Modules{ OrgGetter: orgGetter, OrgSetter: orgSetter, @@ -63,8 +74,11 @@ func NewModules( Apdex: implapdex.NewModule(sqlstore), Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics), User: user, + UserGetter: userGetter, QuickFilter: quickfilter, TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)), RawDataExport: implrawdataexport.NewModule(querier), + AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore)), + Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore)), tokenizer, orgGetter), } } diff --git a/pkg/signoz/module_test.go b/pkg/signoz/module_test.go index a833861f14..8112a047ef 100644 --- a/pkg/signoz/module_test.go +++ b/pkg/signoz/module_test.go @@ -4,7 +4,6 @@ import ( "context" "reflect" "testing" - "time" "github.com/DATA-DOG/go-sqlmock" "github.com/SigNoz/signoz/pkg/alertmanager" @@ -17,7 +16,7 @@ import ( "github.com/SigNoz/signoz/pkg/sharder/noopsharder" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest" - "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/tokenizer/tokenizertest" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -34,9 +33,9 @@ func TestNewModules(t *testing.T) { require.NoError(t, err) alertmanager, err := signozalertmanager.New(context.TODO(), providerSettings, alertmanager.Config{}, sqlstore, orgGetter, notificationManager) require.NoError(t, err) - jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) + tokenizer := tokenizertest.New() emailing := emailingtest.New() - modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, nil, nil) + modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil) reflectVal := reflect.ValueOf(modules) for i := 0; i < reflectVal.NumField(); i++ { diff --git a/pkg/signoz/provider.go b/pkg/signoz/provider.go index d66b2acd46..740eb83595 100644 --- a/pkg/signoz/provider.go +++ b/pkg/signoz/provider.go @@ -38,7 +38,10 @@ import ( "github.com/SigNoz/signoz/pkg/telemetrystore" "github.com/SigNoz/signoz/pkg/telemetrystore/clickhousetelemetrystore" "github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystorehook" - routeTypes "github.com/SigNoz/signoz/pkg/types/alertmanagertypes" + "github.com/SigNoz/signoz/pkg/tokenizer" + "github.com/SigNoz/signoz/pkg/tokenizer/jwttokenizer" + "github.com/SigNoz/signoz/pkg/tokenizer/opaquetokenizer" + "github.com/SigNoz/signoz/pkg/types/alertmanagertypes" "github.com/SigNoz/signoz/pkg/version" "github.com/SigNoz/signoz/pkg/web" "github.com/SigNoz/signoz/pkg/web/noopweb" @@ -67,9 +70,8 @@ func NewWebProviderFactories() factory.NamedMap[factory.ProviderFactory[web.Web, } func NewSQLStoreProviderFactories() factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]] { - hook := sqlstorehook.NewLoggingFactory() return factory.MustNewNamedMap( - sqlitesqlstore.NewFactory(hook), + sqlitesqlstore.NewFactory(sqlstorehook.NewLoggingFactory(), sqlstorehook.NewInstrumentationFactory()), ) } @@ -135,6 +137,7 @@ func NewSQLMigrationProviderFactories( sqlmigration.NewAddMeterQuickFiltersFactory(sqlstore, sqlschema), sqlmigration.NewUpdateTTLSettingForCustomRetentionFactory(sqlstore, sqlschema), sqlmigration.NewAddRoutePolicyFactory(sqlstore, sqlschema), + sqlmigration.NewAddAuthTokenFactory(sqlstore, sqlschema), ) } @@ -157,15 +160,15 @@ func NewPrometheusProviderFactories(telemetryStore telemetrystore.TelemetryStore ) } -func NewNotificationManagerProviderFactories(routeStore routeTypes.RouteStore) factory.NamedMap[factory.ProviderFactory[nfmanager.NotificationManager, nfmanager.Config]] { +func NewNotificationManagerProviderFactories(routeStore alertmanagertypes.RouteStore) factory.NamedMap[factory.ProviderFactory[nfmanager.NotificationManager, nfmanager.Config]] { return factory.MustNewNamedMap( rulebasednotification.NewFactory(routeStore), ) } -func NewAlertmanagerProviderFactories(sqlstore sqlstore.SQLStore, orgGetter organization.Getter, notificationManager nfmanager.NotificationManager) factory.NamedMap[factory.ProviderFactory[alertmanager.Alertmanager, alertmanager.Config]] { +func NewAlertmanagerProviderFactories(sqlstore sqlstore.SQLStore, orgGetter organization.Getter, nfManager nfmanager.NotificationManager) factory.NamedMap[factory.ProviderFactory[alertmanager.Alertmanager, alertmanager.Config]] { return factory.MustNewNamedMap( - signozalertmanager.NewFactory(sqlstore, orgGetter, notificationManager), + signozalertmanager.NewFactory(sqlstore, orgGetter, nfManager), ) } @@ -189,9 +192,9 @@ func NewSharderProviderFactories() factory.NamedMap[factory.ProviderFactory[shar ) } -func NewStatsReporterProviderFactories(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, build version.Build, analyticsConfig analytics.Config) factory.NamedMap[factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config]] { +func NewStatsReporterProviderFactories(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, tokenizer tokenizer.Tokenizer, build version.Build, analyticsConfig analytics.Config) factory.NamedMap[factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config]] { return factory.MustNewNamedMap( - analyticsstatsreporter.NewFactory(telemetryStore, collectors, orgGetter, userGetter, build, analyticsConfig), + analyticsstatsreporter.NewFactory(telemetryStore, collectors, orgGetter, userGetter, tokenizer, build, analyticsConfig), noopstatsreporter.NewFactory(), ) } @@ -201,3 +204,11 @@ func NewQuerierProviderFactories(telemetryStore telemetrystore.TelemetryStore, p signozquerier.NewFactory(telemetryStore, prometheus, cache), ) } + +func NewTokenizerProviderFactories(cache cache.Cache, sqlstore sqlstore.SQLStore, orgGetter organization.Getter) factory.NamedMap[factory.ProviderFactory[tokenizer.Tokenizer, tokenizer.Config]] { + tokenStore := opaquetokenizer.NewStore(sqlstore) + return factory.MustNewNamedMap( + opaquetokenizer.NewFactory(cache, tokenStore, orgGetter), + jwttokenizer.NewFactory(), + ) +} diff --git a/pkg/signoz/provider_test.go b/pkg/signoz/provider_test.go index 921d47bf13..f2e683dde3 100644 --- a/pkg/signoz/provider_test.go +++ b/pkg/signoz/provider_test.go @@ -16,6 +16,7 @@ import ( "github.com/SigNoz/signoz/pkg/statsreporter" "github.com/SigNoz/signoz/pkg/telemetrystore" "github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest" + "github.com/SigNoz/signoz/pkg/tokenizer/tokenizertest" "github.com/SigNoz/signoz/pkg/version" "github.com/stretchr/testify/assert" ) @@ -75,6 +76,6 @@ func TestNewProviderFactories(t *testing.T) { userGetter := impluser.NewGetter(impluser.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), instrumentationtest.New().ToProviderSettings())) orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)), nil) telemetryStore := telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherEqual) - NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, userGetter, version.Build{}, analytics.Config{Enabled: true}) + NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, userGetter, tokenizertest.New(), version.Build{}, analytics.Config{Enabled: true}) }) } diff --git a/pkg/signoz/signoz.go b/pkg/signoz/signoz.go index 6d531e6fe3..46f411ca15 100644 --- a/pkg/signoz/signoz.go +++ b/pkg/signoz/signoz.go @@ -2,10 +2,13 @@ package signoz import ( "context" + "github.com/SigNoz/signoz/pkg/alertmanager" "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager" "github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfroutingstore/sqlroutingstore" "github.com/SigNoz/signoz/pkg/analytics" + "github.com/SigNoz/signoz/pkg/authn" + "github.com/SigNoz/signoz/pkg/authn/authnstore/sqlauthnstore" "github.com/SigNoz/signoz/pkg/cache" "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/factory" @@ -16,7 +19,6 @@ import ( "github.com/SigNoz/signoz/pkg/modules/user/impluser" "github.com/SigNoz/signoz/pkg/prometheus" "github.com/SigNoz/signoz/pkg/querier" - "github.com/SigNoz/signoz/pkg/ruler" "github.com/SigNoz/signoz/pkg/sharder" "github.com/SigNoz/signoz/pkg/sqlmigration" "github.com/SigNoz/signoz/pkg/sqlmigrator" @@ -24,6 +26,7 @@ import ( "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/statsreporter" "github.com/SigNoz/signoz/pkg/telemetrystore" + pkgtokenizer "github.com/SigNoz/signoz/pkg/tokenizer" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/version" "github.com/SigNoz/signoz/pkg/zeus" @@ -42,12 +45,12 @@ type SigNoz struct { Prometheus prometheus.Prometheus Alertmanager alertmanager.Alertmanager Querier querier.Querier - Rules ruler.Ruler Zeus zeus.Zeus Licensing licensing.Licensing Emailing emailing.Emailing Sharder sharder.Sharder StatsReporter statsreporter.StatsReporter + Tokenizer pkgtokenizer.Tokenizer Modules Modules Handlers Handlers } @@ -55,7 +58,6 @@ type SigNoz struct { func New( ctx context.Context, config Config, - jwt *authtypes.JWT, zeusConfig zeus.Config, zeusProviderFactory factory.ProviderFactory[zeus.Zeus, zeus.Config], licenseConfig licensing.Config, @@ -66,6 +68,7 @@ func New( sqlSchemaProviderFactories func(sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]], sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]], telemetrystoreProviderFactories factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]], + authNsCallback func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error), ) (*SigNoz, error) { // Initialize instrumentation instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz") @@ -228,17 +231,27 @@ func New( // Initialize organization getter orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlstore), sharder) + // Initialize tokenizer from the available tokenizer provider factories + tokenizer, err := factory.NewProviderFromNamedMap( + ctx, + providerSettings, + config.Tokenizer, + NewTokenizerProviderFactories(cache, sqlstore, orgGetter), + config.Tokenizer.Provider, + ) + if err != nil { + return nil, err + } + // Initialize user getter userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings)) - // will need to create factory for all stores - routeStore := sqlroutingstore.NewStore(sqlstore) - // shared NotificationManager instance for both alertmanager and rules - notificationManager, err := factory.NewProviderFromNamedMap( + // Initialize notification manager from the available notification manager provider factories + nfManager, err := factory.NewProviderFromNamedMap( ctx, providerSettings, nfmanager.Config{}, - NewNotificationManagerProviderFactories(routeStore), + NewNotificationManagerProviderFactories(sqlroutingstore.NewStore(sqlstore)), "rulebased", ) if err != nil { @@ -250,7 +263,7 @@ func New( ctx, providerSettings, config.Alertmanager, - NewAlertmanagerProviderFactories(sqlstore, orgGetter, notificationManager), + NewAlertmanagerProviderFactories(sqlstore, orgGetter, nfManager), config.Alertmanager.Provider, ) if err != nil { @@ -279,8 +292,15 @@ func New( return nil, err } + // Initialize authns + store := sqlauthnstore.NewStore(sqlstore) + authNs, err := authNsCallback(ctx, providerSettings, store, licensing) + if err != nil { + return nil, err + } + // Initialize all modules - modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, querier) + modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, authNs) // Initialize all handlers for the modules handlers := NewHandlers(modules, providerSettings) @@ -293,6 +313,7 @@ func New( modules.SavedView, modules.User, licensing, + tokenizer, } // Initialize stats reporter from the available stats reporter provider factories @@ -300,7 +321,7 @@ func New( ctx, providerSettings, config.StatsReporter, - NewStatsReporterProviderFactories(telemetrystore, statsCollectors, orgGetter, userGetter, version.Info, config.Analytics), + NewStatsReporterProviderFactories(telemetrystore, statsCollectors, orgGetter, userGetter, tokenizer, version.Info, config.Analytics), config.StatsReporter.Provider(), ) if err != nil { @@ -314,6 +335,7 @@ func New( factory.NewNamedService(factory.MustNewName("alertmanager"), alertmanager), factory.NewNamedService(factory.MustNewName("licensing"), licensing), factory.NewNamedService(factory.MustNewName("statsreporter"), statsReporter), + factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer), ) if err != nil { return nil, err @@ -330,11 +352,11 @@ func New( Prometheus: prometheus, Alertmanager: alertmanager, Querier: querier, - Rules: ruler, Zeus: zeus, Licensing: licensing, Emailing: emailing, Sharder: sharder, + Tokenizer: tokenizer, Modules: modules, Handlers: handlers, }, nil diff --git a/pkg/sqlmigration/050_add_auth_token.go b/pkg/sqlmigration/050_add_auth_token.go new file mode 100644 index 0000000000..54eccf24f6 --- /dev/null +++ b/pkg/sqlmigration/050_add_auth_token.go @@ -0,0 +1,95 @@ +package sqlmigration + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate" +) + +type addAuthToken struct { + sqlstore sqlstore.SQLStore + sqlschema sqlschema.SQLSchema +} + +func NewAddAuthTokenFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] { + return factory.NewProviderFactory(factory.MustNewName("add_auth_token"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) { + return newAddAuthToken(ctx, providerSettings, config, sqlstore, sqlschema) + }) +} + +func newAddAuthToken(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) (SQLMigration, error) { + return &addAuthToken{ + sqlstore: sqlstore, + sqlschema: sqlschema, + }, nil +} + +func (migration *addAuthToken) Register(migrations *migrate.Migrations) error { + if err := migrations.Register(migration.Up, migration.Down); err != nil { + return err + } + + return nil +} + +func (migration *addAuthToken) Up(ctx context.Context, db *bun.DB) error { + tx, err := db.BeginTx(ctx, nil) + if err != nil { + return err + } + + defer func() { + _ = tx.Rollback() + }() + + token := &sqlschema.Table{ + Name: "auth_token", + Columns: []*sqlschema.Column{ + {Name: sqlschema.ColumnName("id"), DataType: sqlschema.DataTypeText, Nullable: false, Default: ""}, + {Name: sqlschema.ColumnName("meta"), DataType: sqlschema.DataTypeText, Nullable: false, Default: ""}, + {Name: sqlschema.ColumnName("prev_access_token"), DataType: sqlschema.DataTypeText, Nullable: true, Default: ""}, + {Name: sqlschema.ColumnName("access_token"), DataType: sqlschema.DataTypeText, Nullable: false, Default: ""}, + {Name: sqlschema.ColumnName("prev_refresh_token"), DataType: sqlschema.DataTypeText, Nullable: true, Default: ""}, + {Name: sqlschema.ColumnName("refresh_token"), DataType: sqlschema.DataTypeText, Nullable: false, Default: ""}, + {Name: sqlschema.ColumnName("last_observed_at"), DataType: sqlschema.DataTypeTimestamp, Nullable: true, Default: ""}, + {Name: sqlschema.ColumnName("rotated_at"), DataType: sqlschema.DataTypeTimestamp, Nullable: true, Default: ""}, + {Name: sqlschema.ColumnName("created_at"), DataType: sqlschema.DataTypeTimestamp, Nullable: false, Default: ""}, + {Name: sqlschema.ColumnName("updated_at"), DataType: sqlschema.DataTypeTimestamp, Nullable: false, Default: ""}, + {Name: sqlschema.ColumnName("user_id"), DataType: sqlschema.DataTypeText, Nullable: false, Default: ""}, + }, + PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName("id")}}, + ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{ + {ReferencingColumnName: sqlschema.ColumnName("user_id"), ReferencedTableName: sqlschema.TableName("users"), ReferencedColumnName: sqlschema.ColumnName("id")}, + }, + } + + sqls := [][]byte{} + createSQLs := migration.sqlschema.Operator().CreateTable(token) + sqls = append(sqls, createSQLs...) + + indexSQLs := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "auth_token", ColumnNames: []sqlschema.ColumnName{"access_token"}}) + sqls = append(sqls, indexSQLs...) + + indexSQLs = migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "auth_token", ColumnNames: []sqlschema.ColumnName{"refresh_token"}}) + sqls = append(sqls, indexSQLs...) + + for _, sql := range sqls { + if _, err := tx.ExecContext(ctx, string(sql)); err != nil { + return err + } + } + + if err := tx.Commit(); err != nil { + return err + } + + return nil +} + +func (migration *addAuthToken) Down(ctx context.Context, db *bun.DB) error { + return nil +} diff --git a/pkg/sqlstore/sqlitesqlstore/provider.go b/pkg/sqlstore/sqlitesqlstore/provider.go index 1dc62d9b17..ca61bf9f30 100644 --- a/pkg/sqlstore/sqlitesqlstore/provider.go +++ b/pkg/sqlstore/sqlitesqlstore/provider.go @@ -27,6 +27,7 @@ func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, if err != nil { return nil, err } + hooks[i] = hook } diff --git a/pkg/sqlstore/sqlstorehook/instrumentation.go b/pkg/sqlstore/sqlstorehook/instrumentation.go new file mode 100644 index 0000000000..8ef7e120fd --- /dev/null +++ b/pkg/sqlstore/sqlstorehook/instrumentation.go @@ -0,0 +1,40 @@ +package sqlstorehook + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/uptrace/bun" + "github.com/uptrace/bun/extra/bunotel" +) + +type instrumentation struct { + bunOtel *bunotel.QueryHook +} + +func NewInstrumentationFactory() factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config] { + return factory.NewProviderFactory(factory.MustNewName("instrumentation"), NewInstrumentation) +} + +func NewInstrumentation(ctx context.Context, providerSettings factory.ProviderSettings, config sqlstore.Config) (sqlstore.SQLStoreHook, error) { + return &instrumentation{ + bunOtel: bunotel.NewQueryHook( + bunotel.WithFormattedQueries(true), + bunotel.WithTracerProvider(providerSettings.TracerProvider), + bunotel.WithMeterProvider(providerSettings.MeterProvider), + ), + }, nil +} + +func (hook *instrumentation) Init(db *bun.DB) { + hook.bunOtel.Init(db) +} + +func (hook *instrumentation) BeforeQuery(ctx context.Context, event *bun.QueryEvent) context.Context { + return hook.bunOtel.BeforeQuery(ctx, event) +} + +func (hook *instrumentation) AfterQuery(ctx context.Context, event *bun.QueryEvent) { + hook.bunOtel.AfterQuery(ctx, event) +} diff --git a/pkg/sqlstore/sqlstorehook/logging.go b/pkg/sqlstore/sqlstorehook/logging.go index a5ab6766f2..fc4b0154a6 100644 --- a/pkg/sqlstore/sqlstorehook/logging.go +++ b/pkg/sqlstore/sqlstorehook/logging.go @@ -11,7 +11,6 @@ import ( ) type logging struct { - bun.QueryHook logger *slog.Logger level slog.Level } @@ -27,11 +26,11 @@ func NewLogging(ctx context.Context, providerSettings factory.ProviderSettings, }, nil } -func (logging) BeforeQuery(ctx context.Context, event *bun.QueryEvent) context.Context { +func (*logging) BeforeQuery(ctx context.Context, event *bun.QueryEvent) context.Context { return ctx } -func (hook logging) AfterQuery(ctx context.Context, event *bun.QueryEvent) { +func (hook *logging) AfterQuery(ctx context.Context, event *bun.QueryEvent) { hook.logger.Log( ctx, hook.level, diff --git a/pkg/statsreporter/analyticsstatsreporter/provider.go b/pkg/statsreporter/analyticsstatsreporter/provider.go index 33632eff3a..25cd4b9bee 100644 --- a/pkg/statsreporter/analyticsstatsreporter/provider.go +++ b/pkg/statsreporter/analyticsstatsreporter/provider.go @@ -12,9 +12,12 @@ import ( "github.com/SigNoz/signoz/pkg/modules/user" "github.com/SigNoz/signoz/pkg/statsreporter" "github.com/SigNoz/signoz/pkg/telemetrystore" + "github.com/SigNoz/signoz/pkg/tokenizer" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/version" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" ) type provider struct { @@ -36,6 +39,9 @@ type provider struct { // used to get users userGetter user.Getter + // used to get tokenizer + tokenizer tokenizer.Tokenizer + // used to send stats to an analytics backend analytics analytics.Analytics @@ -49,9 +55,9 @@ type provider struct { stopC chan struct{} } -func NewFactory(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, build version.Build, analyticsConfig analytics.Config) factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config] { +func NewFactory(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, tokenizer tokenizer.Tokenizer, build version.Build, analyticsConfig analytics.Config) factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config] { return factory.NewProviderFactory(factory.MustNewName("analytics"), func(ctx context.Context, settings factory.ProviderSettings, config statsreporter.Config) (statsreporter.StatsReporter, error) { - return New(ctx, settings, config, telemetryStore, collectors, orgGetter, userGetter, build, analyticsConfig) + return New(ctx, settings, config, telemetryStore, collectors, orgGetter, userGetter, tokenizer, build, analyticsConfig) }) } @@ -63,6 +69,7 @@ func New( collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, + tokenizer tokenizer.Tokenizer, build version.Build, analyticsConfig analytics.Config, ) (statsreporter.StatsReporter, error) { @@ -81,6 +88,7 @@ func New( orgGetter: orgGetter, userGetter: userGetter, analytics: analytics, + tokenizer: tokenizer, build: build, deployment: deployment, stopC: make(chan struct{}), @@ -102,9 +110,14 @@ func (provider *provider) Start(ctx context.Context) error { case <-provider.stopC: return nil case <-ticker.C: + ctx, span := provider.settings.Tracer().Start(ctx, "statsreporter.Report", trace.WithAttributes(attribute.String("statsreporter.provider", "analytics"))) + if err := provider.Report(ctx); err != nil { + span.RecordError(err) provider.settings.Logger().WarnContext(ctx, "failed to report stats", "error", err) } + + span.End() } } } @@ -153,8 +166,20 @@ func (provider *provider) Report(ctx context.Context) error { continue } + maxLastObservedAtPerUserID, err := provider.tokenizer.ListMaxLastObservedAtByOrgID(ctx, org.ID) + if err != nil { + provider.settings.Logger().WarnContext(ctx, "failed to list max last observed at per user id", "error", err, "org_id", org.ID) + maxLastObservedAtPerUserID = make(map[valuer.UUID]time.Time) + } + for _, user := range users { - provider.analytics.IdentifyUser(ctx, org.ID.String(), user.ID.String(), types.NewTraitsFromUser(user)) + traits := types.NewTraitsFromUser(user) + if maxLastObservedAt, ok := maxLastObservedAtPerUserID[user.ID]; ok { + traits["auth_token.last_observed_at.max.time"] = maxLastObservedAt.UTC() + traits["auth_token.last_observed_at.max.time_unix"] = maxLastObservedAt.Unix() + } + + provider.analytics.IdentifyUser(ctx, org.ID.String(), user.ID.String(), traits) } } diff --git a/pkg/tokenizer/config.go b/pkg/tokenizer/config.go new file mode 100644 index 0000000000..109f0fcb47 --- /dev/null +++ b/pkg/tokenizer/config.go @@ -0,0 +1,112 @@ +package tokenizer + +import ( + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" +) + +type Config struct { + // The provider to use for tokenization. + Provider string `mapstructure:"provider"` + + // Config for the opaque tokenizer. + Opaque OpaqueConfig `mapstructure:"opaque"` + + // Config for the JWT tokenizer. + JWT JWTConfig `mapstructure:"jwt"` + + // Rotation config + Rotation RotationConfig `mapstructure:"rotation"` + + // Lifetime config + Lifetime LifetimeConfig `mapstructure:"lifetime"` +} + +type OpaqueConfig struct { + // GC config + GC GCConfig `mapstructure:"gc"` + + // Token config + Token TokenConfig `mapstructure:"token"` +} + +type JWTConfig struct { + // The secret to sign the JWT tokens. + Secret string `mapstructure:"secret"` +} + +type GCConfig struct { + // The interval to perform garbage collection. + Interval time.Duration `mapstructure:"interval"` +} + +type RotationConfig struct { + // The interval to rotate tokens in. + Interval time.Duration `mapstructure:"interval"` + + // The duration for which the previous token pair remains valid after a token pair is rotated. + Duration time.Duration `mapstructure:"duration"` +} + +type LifetimeConfig struct { + // The duration for which a user can be idle before being required to authenticate. + Idle time.Duration `mapstructure:"idle"` + + // The duration for which a user can remain logged in before being asked to login. + Max time.Duration `mapstructure:"max"` +} + +type TokenConfig struct { + // The maximum number of tokens a user can have. + MaxPerUser int `mapstructure:"max_per_user"` +} + +func NewConfigFactory() factory.ConfigFactory { + return factory.NewConfigFactory(factory.MustNewName("tokenizer"), newConfig) +} + +func newConfig() factory.Config { + return &Config{ + Provider: "jwt", + Opaque: OpaqueConfig{ + GC: GCConfig{ + Interval: 1 * time.Hour, // 1 hour + }, + Token: TokenConfig{ + MaxPerUser: 5, + }, + }, + JWT: JWTConfig{ + Secret: "", + }, + Rotation: RotationConfig{ + Interval: 30 * time.Minute, // 30 minutes + Duration: 60 * time.Second, // 60 seconds + }, + Lifetime: LifetimeConfig{ + Idle: 7 * 24 * time.Hour, // 7 days + Max: 30 * 24 * time.Hour, // 30 days + }, + } +} + +func (c Config) Validate() error { + // Ensure that rotation interval is smaller than lifetime idle + if c.Rotation.Interval >= c.Lifetime.Idle { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "rotation::interval must be smaller than lifetime::idle") + } + + // Ensure that lifetime idle interval is smaller than lifetime max + if c.Lifetime.Idle >= c.Lifetime.Max { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "lifetime::idle must be smaller than lifetime::max") + } + + // Ensure that rotation duration is smaller than rotation interval + if c.Rotation.Duration >= c.Rotation.Interval { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "rotation::duration must be smaller than rotation::interval") + } + + return nil +} diff --git a/pkg/tokenizer/jwttokenizer/claims.go b/pkg/tokenizer/jwttokenizer/claims.go new file mode 100644 index 0000000000..dfa39959cb --- /dev/null +++ b/pkg/tokenizer/jwttokenizer/claims.go @@ -0,0 +1,34 @@ +package jwttokenizer + +import ( + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types" + "github.com/golang-jwt/jwt/v5" +) + +var _ jwt.ClaimsValidator = (*Claims)(nil) + +type Claims struct { + jwt.RegisteredClaims + UserID string `json:"id"` + Email string `json:"email"` + Role types.Role `json:"role"` + OrgID string `json:"orgId"` +} + +func (c *Claims) Validate() error { + if c.UserID == "" { + return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "id is required") + } + // The problem is that when the "role" field is missing entirely from the JSON (as opposed to being present but empty), the UnmarshalJSON method for Role isn't called at all. + // The JSON decoder just sets the Role field to its zero value (""). + if c.Role == "" { + return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "role is required") + } + + if c.OrgID == "" { + return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "orgId is required") + } + + return nil +} diff --git a/pkg/tokenizer/jwttokenizer/provider.go b/pkg/tokenizer/jwttokenizer/provider.go new file mode 100644 index 0000000000..f5b8bb9e24 --- /dev/null +++ b/pkg/tokenizer/jwttokenizer/provider.go @@ -0,0 +1,152 @@ +package jwttokenizer + +import ( + "context" + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/tokenizer" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/golang-jwt/jwt/v5" +) + +type provider struct { + config tokenizer.Config + settings factory.ScopedProviderSettings + stopC chan struct{} +} + +func NewFactory() factory.ProviderFactory[tokenizer.Tokenizer, tokenizer.Config] { + return factory.NewProviderFactory(factory.MustNewName("jwt"), func(ctx context.Context, providerSettings factory.ProviderSettings, config tokenizer.Config) (tokenizer.Tokenizer, error) { + return New(ctx, providerSettings, config) + }) +} + +func New(ctx context.Context, providerSettings factory.ProviderSettings, config tokenizer.Config) (tokenizer.Tokenizer, error) { + settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/tokenizer/jwttokenizer") + + if config.JWT.Secret == "" { + settings.Logger().ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_TOKENIZER_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.") + } + + return tokenizer.NewWrappedTokenizer(settings, &provider{ + config: config, + settings: settings, + stopC: make(chan struct{}), + }), nil +} + +func (provider *provider) Start(ctx context.Context) error { + <-provider.stopC + return nil +} +func (provider *provider) CreateToken(ctx context.Context, identity *authtypes.Identity, meta map[string]string) (*authtypes.Token, error) { + accessTokenClaims := Claims{ + UserID: identity.UserID.String(), + Role: identity.Role, + Email: identity.Email.String(), + OrgID: identity.OrgID.String(), + RegisteredClaims: jwt.RegisteredClaims{ + ExpiresAt: jwt.NewNumericDate(time.Now().Add(provider.config.Rotation.Interval)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + } + + accessToken, err := jwt.NewWithClaims(jwt.SigningMethodHS256, accessTokenClaims).SignedString([]byte(provider.config.JWT.Secret)) + if err != nil { + return nil, err + } + + refreshTokenClaims := Claims{ + UserID: identity.UserID.String(), + Role: identity.Role, + Email: identity.Email.String(), + OrgID: identity.OrgID.String(), + RegisteredClaims: jwt.RegisteredClaims{ + ExpiresAt: jwt.NewNumericDate(time.Now().Add(provider.config.Lifetime.Max)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + } + + refreshToken, err := jwt.NewWithClaims(jwt.SigningMethodHS256, refreshTokenClaims).SignedString([]byte(provider.config.JWT.Secret)) + if err != nil { + return nil, err + } + + return authtypes.NewTokenFromAccessTokenAndRefreshToken(accessToken, refreshToken, meta, identity.UserID) +} + +func (provider *provider) GetIdentity(ctx context.Context, accessToken string) (*authtypes.Identity, error) { + claims, err := provider.getClaimsFromToken(accessToken) + if err != nil { + return nil, err + } + + return authtypes.NewIdentity(valuer.MustNewUUID(claims.UserID), valuer.MustNewUUID(claims.OrgID), valuer.MustNewEmail(claims.Email), claims.Role), nil +} + +func (provider *provider) DeleteToken(ctx context.Context, accessToken string) error { + provider.settings.Logger().WarnContext(ctx, "Deleting token by access token is not supported for this tokenizer, this is a no-op", "tokenizer_provider", provider.config.Provider) + return nil +} + +func (provider *provider) RotateToken(ctx context.Context, _ string, refreshToken string) (*authtypes.Token, error) { + claims, err := provider.getClaimsFromToken(refreshToken) + if err != nil { + return nil, err + } + + return provider.CreateToken(ctx, authtypes.NewIdentity(valuer.MustNewUUID(claims.UserID), valuer.MustNewUUID(claims.OrgID), valuer.MustNewEmail(claims.Email), claims.Role), map[string]string{}) +} + +func (provider *provider) DeleteTokensByUserID(ctx context.Context, userID valuer.UUID) error { + provider.settings.Logger().WarnContext(ctx, "Deleting token by user id is not supported for this tokenizer, this is a no-op", "tokenizer_provider", provider.config.Provider) + return nil +} + +func (provider *provider) DeleteIdentity(ctx context.Context, userID valuer.UUID) error { + provider.settings.Logger().WarnContext(ctx, "Deleting identity is not supported for this tokenizer, this is a no-op", "tokenizer_provider", provider.config.Provider) + return nil +} + +func (provider *provider) SetLastObservedAt(ctx context.Context, accessToken string, lastObservedAt time.Time) error { + provider.settings.Logger().WarnContext(ctx, "Setting last observed at is not supported for this tokenizer, this is a no-op", "tokenizer_provider", provider.config.Provider) + return nil +} + +func (provider *provider) Config() tokenizer.Config { + return provider.config +} + +func (provider *provider) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) { + return map[string]any{}, nil +} + +func (provider *provider) getClaimsFromToken(token string) (Claims, error) { + claims := Claims{} + + _, err := jwt.ParseWithClaims(token, &claims, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unrecognized signing algorithm: %s", token.Method.Alg()) + } + + return []byte(provider.config.JWT.Secret), nil + }) + + if err != nil { + return Claims{}, errors.Wrapf(err, errors.TypeUnauthenticated, errors.CodeUnauthenticated, "failed to parse jwt token") + } + + return claims, nil +} + +func (provider *provider) Stop(ctx context.Context) error { + close(provider.stopC) + return nil +} + +func (provider *provider) ListMaxLastObservedAtByOrgID(ctx context.Context, orgID valuer.UUID) (map[valuer.UUID]time.Time, error) { + return map[valuer.UUID]time.Time{}, nil +} diff --git a/pkg/tokenizer/opaquetokenizer/provider.go b/pkg/tokenizer/opaquetokenizer/provider.go new file mode 100644 index 0000000000..6ae90fda1d --- /dev/null +++ b/pkg/tokenizer/opaquetokenizer/provider.go @@ -0,0 +1,488 @@ +package opaquetokenizer + +import ( + "context" + "slices" + "strings" + "time" + + "github.com/SigNoz/signoz/pkg/cache" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/modules/organization" + "github.com/SigNoz/signoz/pkg/tokenizer" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/types/cachetypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/allegro/bigcache/v3" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" +) + +var ( + emptyOrgID valuer.UUID = valuer.UUID{} +) + +type provider struct { + config tokenizer.Config + settings factory.ScopedProviderSettings + cache cache.Cache + tokenStore authtypes.TokenStore + orgGetter organization.Getter + stopC chan struct{} + lastObservedAtCache *bigcache.BigCache +} + +func NewFactory(cache cache.Cache, tokenStore authtypes.TokenStore, orgGetter organization.Getter) factory.ProviderFactory[tokenizer.Tokenizer, tokenizer.Config] { + return factory.NewProviderFactory(factory.MustNewName("opaque"), func(ctx context.Context, providerSettings factory.ProviderSettings, config tokenizer.Config) (tokenizer.Tokenizer, error) { + return New(ctx, providerSettings, config, cache, tokenStore, orgGetter) + }) +} + +func New(ctx context.Context, providerSettings factory.ProviderSettings, config tokenizer.Config, cache cache.Cache, tokenStore authtypes.TokenStore, orgGetter organization.Getter) (tokenizer.Tokenizer, error) { + settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/tokenizer/opaquetokenizer") + + lastObservedAtCache, err := bigcache.New(ctx, bigcache.Config{ + Shards: 1024, + LifeWindow: config.Lifetime.Max, + CleanWindow: config.Opaque.GC.Interval, + StatsEnabled: false, + }) + if err != nil { + return nil, err + } + + return tokenizer.NewWrappedTokenizer(settings, &provider{ + config: config, + settings: settings, + cache: cache, + tokenStore: tokenStore, + orgGetter: orgGetter, + stopC: make(chan struct{}), + lastObservedAtCache: lastObservedAtCache, + }), nil +} + +func (provider *provider) Start(ctx context.Context) error { + ticker := time.NewTicker(provider.config.Opaque.GC.Interval) + defer ticker.Stop() + + for { + select { + case <-provider.stopC: + return nil + case <-ticker.C: + ctx, span := provider.settings.Tracer().Start(ctx, "tokenizer.GC", trace.WithAttributes(attribute.String("tokenizer.provider", provider.config.Provider))) + + if err := provider.gc(ctx); err != nil { + span.RecordError(err) + provider.settings.Logger().ErrorContext(ctx, "failed to garbage collect tokens", "error", err) + } + + if err := provider.flushLastObservedAt(ctx); err != nil { + span.RecordError(err) + provider.settings.Logger().ErrorContext(ctx, "failed to flush tokens", "error", err) + } + + span.End() + } + } +} + +func (provider *provider) CreateToken(ctx context.Context, identity *authtypes.Identity, meta map[string]string) (*authtypes.Token, error) { + existingTokens, err := provider.tokenStore.ListByUserID(ctx, identity.UserID) + if err != nil { + return nil, err + } + + if len(existingTokens) >= provider.config.Opaque.Token.MaxPerUser { + slices.SortFunc(existingTokens, func(a, b *authtypes.Token) int { + return a.CreatedAt.Compare(b.CreatedAt) + }) + + if err := provider.DeleteToken(ctx, existingTokens[0].AccessToken); err != nil { + return nil, err + } + } + + token, err := authtypes.NewToken(meta, identity.UserID) + if err != nil { + return nil, err + } + + if err := provider.setToken(ctx, token, true); err != nil { + return nil, err + } + + if err := provider.setIdentity(ctx, identity); err != nil { + return nil, err + } + + return token, nil +} + +func (provider *provider) GetIdentity(ctx context.Context, accessToken string) (*authtypes.Identity, error) { + token, err := provider.getOrGetSetToken(ctx, accessToken) + if err != nil { + return nil, err + } + + if err := token.IsValid(provider.config.Rotation.Interval, provider.config.Lifetime.Idle, provider.config.Lifetime.Max); err != nil { + return nil, err + } + + identity, err := provider.getOrGetSetIdentity(ctx, token.UserID) + if err != nil { + return nil, err + } + + return identity, nil +} + +func (provider *provider) RotateToken(ctx context.Context, accessToken string, refreshToken string) (*authtypes.Token, error) { + var rotatedToken *authtypes.Token + + if err := provider.tokenStore.GetOrUpdateByAccessTokenOrPrevAccessToken(ctx, accessToken, func(ctx context.Context, token *authtypes.StorableToken) error { + if err := token.Rotate(accessToken, refreshToken, provider.config.Rotation.Duration, provider.config.Lifetime.Idle, provider.config.Lifetime.Max); err != nil { + return err + } + + // If the token passed the Rotate method and is the same as the input token, return the same token. + if token.AccessToken == accessToken && token.RefreshToken == refreshToken { + rotatedToken = token + return nil + } + + if err := provider.setToken(ctx, token, false); err != nil { + return err + } + + // Delete the previous access token from the cache + provider.cache.Delete(ctx, emptyOrgID, accessTokenCacheKey(accessToken)) + + rotatedToken = token + return nil + }); err != nil { + // If the token is not found, return an unauthenticated error. + if errors.Ast(err, errors.TypeNotFound) { + return nil, errors.Wrap(err, errors.TypeUnauthenticated, errors.CodeUnauthenticated, "invalid access token") + } + + return nil, err + } + + return rotatedToken, nil +} + +func (provider *provider) DeleteToken(ctx context.Context, accessToken string) error { + provider.cache.Delete(ctx, emptyOrgID, cachetypes.NewSha1CacheKey(accessToken)) + if err := provider.tokenStore.DeleteByAccessToken(ctx, accessToken); err != nil { + return err + } + + return nil +} + +func (provider *provider) DeleteTokensByUserID(ctx context.Context, userID valuer.UUID) error { + tokens, err := provider.tokenStore.ListByUserID(ctx, userID) + if err != nil { + return err + } + + for _, token := range tokens { + provider.cache.Delete(ctx, emptyOrgID, cachetypes.NewSha1CacheKey(token.AccessToken)) + } + + if err := provider.tokenStore.DeleteByUserID(ctx, userID); err != nil { + return err + } + + return nil +} + +func (provider *provider) DeleteIdentity(ctx context.Context, userID valuer.UUID) error { + provider.cache.Delete(ctx, emptyOrgID, "identity::"+userID.String()) + return nil +} + +func (provider *provider) Stop(ctx context.Context) error { + close(provider.stopC) + + // garbage collect tokens on stop + if err := provider.gc(ctx); err != nil { + provider.settings.Logger().ErrorContext(ctx, "failed to garbage collect tokens", "error", err) + } + + // flush tokens on stop + if err := provider.flushLastObservedAt(ctx); err != nil { + provider.settings.Logger().ErrorContext(ctx, "failed to flush tokens", "error", err) + } + return nil +} + +func (provider *provider) SetLastObservedAt(ctx context.Context, accessToken string, lastObservedAt time.Time) error { + token, err := provider.getOrGetSetToken(ctx, accessToken) + if err != nil { + return err + } + + // If we can't update the last observed at, we return nil. + if err := token.UpdateLastObservedAt(lastObservedAt); err != nil { + return nil + } + + if err := provider.lastObservedAtCache.Set(lastObservedAtCacheKey(accessToken, token.UserID), []byte(lastObservedAt.Format(time.RFC3339))); err != nil { + return err + } + + err = provider.cache.Set(ctx, emptyOrgID, accessTokenCacheKey(accessToken), token, provider.config.Lifetime.Max) + if err != nil { + return err + } + + return nil +} + +func (provider *provider) Config() tokenizer.Config { + return provider.config +} + +func (provider *provider) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) { + tokens, err := provider.tokenStore.ListByOrgID(ctx, orgID) + if err != nil { + return nil, err + } + + stats := make(map[string]any) + stats["auth_token.count"] = len(tokens) + + accessTokenToLastObservedAt, err := provider.listLastObservedAtDesc() + if err != nil { + return nil, err + } + + if len(accessTokenToLastObservedAt) == 0 { + return stats, nil + } + + accessTokenToLastObservedAtMax := accessTokenToLastObservedAt[0] + + if lastObservedAt, ok := accessTokenToLastObservedAtMax["last_observed_at"].(time.Time); ok { + if !lastObservedAt.IsZero() { + stats["auth_token.last_observed_at.max.time"] = lastObservedAt.UTC() + stats["auth_token.last_observed_at.max.time_unix"] = lastObservedAt.Unix() + } + } + + return stats, nil +} + +func (provider *provider) ListMaxLastObservedAtByOrgID(ctx context.Context, orgID valuer.UUID) (map[valuer.UUID]time.Time, error) { + accessTokenToLastObservedAts, err := provider.listLastObservedAtDesc() + if err != nil { + return nil, err + } + + maxLastObservedAtPerUserID := make(map[valuer.UUID]time.Time) + + for _, accessTokenToLastObservedAt := range accessTokenToLastObservedAts { + userID, ok := accessTokenToLastObservedAt["user_id"].(valuer.UUID) + if !ok { + continue + } + + lastObservedAt, ok := accessTokenToLastObservedAt["last_observed_at"].(time.Time) + if !ok { + continue + } + + if lastObservedAt.IsZero() { + continue + } + + if _, ok := maxLastObservedAtPerUserID[userID]; !ok { + maxLastObservedAtPerUserID[userID] = lastObservedAt.UTC() + continue + } + + if lastObservedAt.UTC().After(maxLastObservedAtPerUserID[userID]) { + maxLastObservedAtPerUserID[userID] = lastObservedAt.UTC() + } + } + + return maxLastObservedAtPerUserID, nil + +} + +func (provider *provider) gc(ctx context.Context) error { + orgs, err := provider.orgGetter.ListByOwnedKeyRange(ctx) + if err != nil { + return err + } + + orgIDs := make([]valuer.UUID, 0, len(orgs)) + for _, org := range orgs { + orgIDs = append(orgIDs, org.ID) + } + + tokens, err := provider.tokenStore.ListByOrgIDs(ctx, orgIDs) + if err != nil { + return err + } + + var tokensToDelete []valuer.UUID + for _, token := range tokens { + if err := token.IsExpired(provider.config.Lifetime.Idle, provider.config.Lifetime.Max); err != nil { + tokensToDelete = append(tokensToDelete, token.ID) + } + } + + if len(tokensToDelete) > 0 { + err := provider.tokenStore.DeleteMany(ctx, tokensToDelete) + if err != nil { + return err + } + } + + return nil +} + +func (provider *provider) flushLastObservedAt(ctx context.Context) error { + accessTokenToLastObservedAt, err := provider.listLastObservedAtDesc() + if err != nil { + return err + } + + if err := provider.tokenStore.UpdateLastObservedAtByAccessToken(ctx, accessTokenToLastObservedAt); err != nil { + return err + } + + return nil +} + +func (provider *provider) getOrGetSetToken(ctx context.Context, accessToken string) (*authtypes.Token, error) { + token := new(authtypes.Token) + err := provider.cache.Get(ctx, emptyOrgID, accessTokenCacheKey(accessToken), token, false) + if err != nil && !errors.Ast(err, errors.TypeNotFound) { + return nil, err + } + + if err == nil { + return token, nil + } + + token, err = provider.tokenStore.GetByAccessToken(ctx, accessToken) + if err != nil { + return nil, err + } + + err = provider.cache.Set(ctx, emptyOrgID, accessTokenCacheKey(accessToken), token, provider.config.Lifetime.Max) + if err != nil { + return nil, err + } + + return token, nil +} + +func (provider *provider) setToken(ctx context.Context, token *authtypes.Token, create bool) error { + err := provider.cache.Set(ctx, emptyOrgID, accessTokenCacheKey(token.AccessToken), token, provider.config.Lifetime.Max) + if err != nil { + return err + } + + if create { + return provider.tokenStore.Create(ctx, token) + } + + return provider.tokenStore.Update(ctx, token) +} + +func (provider *provider) setIdentity(ctx context.Context, identity *authtypes.Identity) error { + err := provider.cache.Set(ctx, emptyOrgID, identityCacheKey(identity.UserID), identity, -1) + if err != nil { + return err + } + + return nil +} + +func (provider *provider) getOrGetSetIdentity(ctx context.Context, userID valuer.UUID) (*authtypes.Identity, error) { + identity := new(authtypes.Identity) + err := provider.cache.Get(ctx, emptyOrgID, identityCacheKey(userID), identity, false) + if err != nil && !errors.Ast(err, errors.TypeNotFound) { + return nil, err + } + + if err == nil { + return identity, nil + } + + identity, err = provider.tokenStore.GetIdentityByUserID(ctx, userID) + if err != nil { + return nil, err + } + + err = provider.cache.Set(ctx, emptyOrgID, identityCacheKey(userID), identity, -1) + if err != nil { + return nil, err + } + + return identity, nil +} + +func (provider *provider) listLastObservedAtDesc() ([]map[string]any, error) { + iterator := provider.lastObservedAtCache.Iterator() + + var accessTokenToLastObservedAt []map[string]any + + for iterator.SetNext() { + value, err := iterator.Value() + if err != nil { + return nil, err + } + + accessToken, userID, err := accessTokenAndUserIDFromLastObservedAtCacheKey(value.Key()) + if err != nil { + return nil, err + } + + lastObservedAt, err := time.Parse(time.RFC3339, string(value.Value())) + if err != nil { + return nil, err + } + + accessTokenToLastObservedAt = append(accessTokenToLastObservedAt, map[string]any{ + "user_id": userID, + "access_token": accessToken, + "last_observed_at": lastObservedAt, + }) + } + + // sort by descending order of last_observed_at + slices.SortFunc(accessTokenToLastObservedAt, func(a, b map[string]any) int { + return b["last_observed_at"].(time.Time).Compare(a["last_observed_at"].(time.Time)) + }) + + return accessTokenToLastObservedAt, nil +} + +func accessTokenCacheKey(accessToken string) string { + return "access_token::" + cachetypes.NewSha1CacheKey(accessToken) +} + +func identityCacheKey(userID valuer.UUID) string { + return "identity::" + userID.String() +} + +func lastObservedAtCacheKey(accessToken string, userID valuer.UUID) string { + return "access_token::" + accessToken + "::" + userID.String() +} + +func accessTokenAndUserIDFromLastObservedAtCacheKey(key string) (string, valuer.UUID, error) { + parts := strings.Split(key, "::") + if len(parts) != 3 { + return "", valuer.UUID{}, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid last observed at cache key") + } + + return parts[1], valuer.MustNewUUID(parts[2]), nil +} diff --git a/pkg/tokenizer/opaquetokenizer/store.go b/pkg/tokenizer/opaquetokenizer/store.go new file mode 100644 index 0000000000..a01ec05f20 --- /dev/null +++ b/pkg/tokenizer/opaquetokenizer/store.go @@ -0,0 +1,258 @@ +package opaquetokenizer + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect" +) + +type store struct { + sqlstore sqlstore.SQLStore +} + +func NewStore(sqlstore sqlstore.SQLStore) authtypes.TokenStore { + return &store{sqlstore: sqlstore} +} + +func (store *store) Create(ctx context.Context, token *authtypes.StorableToken) error { + _, err := store. + sqlstore. + BunDBCtx(ctx). + NewInsert(). + Model(token). + Exec(ctx) + if err != nil { + return err + } + + return nil +} + +func (store *store) GetIdentityByUserID(ctx context.Context, userID valuer.UUID) (*authtypes.Identity, error) { + user := new(types.User) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(user). + Where("id = ?", userID). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id: %s does not exist", userID) + } + + return authtypes.NewIdentity(userID, user.OrgID, user.Email, types.Role(user.Role)), nil +} + +func (store *store) GetByAccessToken(ctx context.Context, accessToken string) (*authtypes.StorableToken, error) { + token := new(authtypes.StorableToken) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(token). + Where("access_token = ?", accessToken). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeTokenNotFound, "token does not exist", accessToken) + } + + return token, nil +} + +func (store *store) GetOrUpdateByAccessTokenOrPrevAccessToken(ctx context.Context, accessToken string, updater func(ctx context.Context, token *authtypes.StorableToken) error) error { + return store.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error { + token := new(authtypes.StorableToken) + + selectQuery := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(token). + Where("access_token = ?", accessToken). + WhereOr("prev_access_token = ?", accessToken) + + if store.sqlstore.BunDBCtx(ctx).Dialect().Name() != dialect.SQLite { + selectQuery = selectQuery.For("UPDATE") + } + + err := selectQuery.Scan(ctx) + if err != nil { + return store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeTokenNotFound, "token does not exist", accessToken) + } + + if err := updater(ctx, token); err != nil { + return err + } + + return nil + }) +} + +func (store *store) GetByUserIDAndRefreshToken(ctx context.Context, userID valuer.UUID, refreshToken string) (*authtypes.StorableToken, error) { + token := new(authtypes.StorableToken) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(token). + Where("user_id = ?", userID). + Where("refresh_token = ?", refreshToken). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeTokenNotFound, "token with user id: %s and refresh token: %s does not exist", userID, refreshToken) + } + + return token, nil +} + +func (store *store) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*authtypes.StorableToken, error) { + tokens := make([]*authtypes.StorableToken, 0) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(&tokens). + Join("JOIN users"). + JoinOn("users.id = auth_token.user_id"). + Where("org_id = ?", orgID). + Scan(ctx) + if err != nil { + return nil, err + } + + return tokens, nil +} + +func (store *store) ListByOrgIDs(ctx context.Context, orgIDs []valuer.UUID) ([]*authtypes.StorableToken, error) { + tokens := make([]*authtypes.StorableToken, 0) + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(&tokens). + Join("JOIN users"). + JoinOn("users.id = auth_token.user_id"). + Join("JOIN organizations"). + JoinOn("organizations.id = users.org_id"). + Where("organizations.id IN (?)", bun.In(orgIDs)). + Scan(ctx) + if err != nil { + return nil, err + } + + return tokens, nil +} + +func (store *store) Update(ctx context.Context, token *authtypes.StorableToken) error { + _, err := store. + sqlstore. + BunDBCtx(ctx). + NewUpdate(). + Model(token). + Where("id = ?", token.ID). + Exec(ctx) + if err != nil { + return err + } + + return nil +} + +func (store *store) DeleteByAccessToken(ctx context.Context, accessToken string) error { + token := new(authtypes.StorableToken) + + _, err := store. + sqlstore. + BunDBCtx(ctx). + NewDelete(). + Model(token). + Where("access_token = ?", accessToken). + Exec(ctx) + if err != nil { + return err + } + + return nil +} + +func (store *store) DeleteMany(ctx context.Context, tokenIDs []valuer.UUID) error { + _, err := store. + sqlstore. + BunDBCtx(ctx). + NewDelete(). + Model(new(authtypes.StorableToken)). + Where("id IN (?)", tokenIDs). + Exec(ctx) + if err != nil { + return err + } + + return nil +} + +func (store *store) DeleteByUserID(ctx context.Context, userID valuer.UUID) error { + _, err := store. + sqlstore. + BunDBCtx(ctx). + NewDelete(). + Model(new(authtypes.StorableToken)). + Where("user_id = ?", userID). + Exec(ctx) + if err != nil { + return err + } + + return nil +} + +func (store *store) ListByUserID(ctx context.Context, userID valuer.UUID) ([]*authtypes.StorableToken, error) { + var tokens []*authtypes.StorableToken + + err := store. + sqlstore. + BunDBCtx(ctx). + NewSelect(). + Model(&tokens). + Where("user_id = ?", userID). + Scan(ctx) + if err != nil { + return nil, err + } + + return tokens, nil +} + +func (store *store) UpdateLastObservedAtByAccessToken(ctx context.Context, accessTokenToLastObservedAt []map[string]any) error { + values := store. + sqlstore. + BunDBCtx(ctx). + NewValues(&accessTokenToLastObservedAt) + + _, err := store. + sqlstore. + BunDBCtx(ctx). + NewUpdate(). + With("update_cte", values). + Model((*authtypes.StorableToken)(nil)). + TableExpr("update_cte"). + Set("last_observed_at = update_cte.last_observed_at"). + Where("auth_token.access_token = update_cte.access_token"). + Where("auth_token.user_id = update_cte.user_id"). + Exec(ctx) + if err != nil { + return err + } + + return nil +} diff --git a/pkg/tokenizer/tokenizer.go b/pkg/tokenizer/tokenizer.go new file mode 100644 index 0000000000..36b1ee91d2 --- /dev/null +++ b/pkg/tokenizer/tokenizer.go @@ -0,0 +1,43 @@ +package tokenizer + +import ( + "context" + "time" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/statsreporter" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type Tokenizer interface { + factory.Service + // Create a new token. + CreateToken(context.Context, *authtypes.Identity, map[string]string) (*authtypes.Token, error) + + // Get identity from token. + GetIdentity(context.Context, string) (*authtypes.Identity, error) + + // Rotate the input token and return a new token. + RotateToken(context.Context, string, string) (*authtypes.Token, error) + + // Delete the token by access token. + DeleteToken(context.Context, string) error + + // Delete all tokens by userID. + DeleteTokensByUserID(context.Context, valuer.UUID) error + + // Delete the identity by userID. + DeleteIdentity(context.Context, valuer.UUID) error + + // Set the last observed at for an access token. + SetLastObservedAt(context.Context, string, time.Time) error + + // Returns the config of the tokenizer. + Config() Config + + // Gets the last observed at for each user in an org. + ListMaxLastObservedAtByOrgID(context.Context, valuer.UUID) (map[valuer.UUID]time.Time, error) + + statsreporter.StatsCollector +} diff --git a/pkg/tokenizer/tokenizertest/provider.go b/pkg/tokenizer/tokenizertest/provider.go new file mode 100644 index 0000000000..4a4265a07d --- /dev/null +++ b/pkg/tokenizer/tokenizertest/provider.go @@ -0,0 +1,76 @@ +package tokenizertest + +import ( + "context" + "time" + + "github.com/SigNoz/signoz/pkg/tokenizer" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +var _ tokenizer.Tokenizer = (*Provider)(nil) + +type Provider struct { + stopC chan struct{} +} + +func New() *Provider { + return &Provider{stopC: make(chan struct{})} +} + +// Collect implements tokenizer.Tokenizer. +func (provider *Provider) Collect(context.Context, valuer.UUID) (map[string]any, error) { + panic("unimplemented") +} + +// CreateToken implements tokenizer.Tokenizer. +func (provider *Provider) CreateToken(context.Context, *authtypes.Identity, map[string]string) (*authtypes.Token, error) { + panic("unimplemented") +} + +// DeleteToken implements tokenizer.Tokenizer. +func (provider *Provider) DeleteToken(context.Context, string) error { + panic("unimplemented") +} + +// GetIdentity implements tokenizer.Tokenizer. +func (provider *Provider) GetIdentity(context.Context, string) (*authtypes.Identity, error) { + panic("unimplemented") +} + +// RotateToken implements tokenizer.Tokenizer. +func (provider *Provider) RotateToken(context.Context, string, string) (*authtypes.Token, error) { + panic("unimplemented") +} + +// Start implements tokenizer.Tokenizer. +func (provider *Provider) Start(context.Context) error { + panic("unimplemented") +} + +// Stop implements tokenizer.Tokenizer. +func (provider *Provider) Stop(context.Context) error { + close(provider.stopC) + return nil +} + +func (provider *Provider) DeleteIdentity(context.Context, valuer.UUID) error { + panic("unimplemented") +} + +func (provider *Provider) DeleteTokensByUserID(context.Context, valuer.UUID) error { + panic("unimplemented") +} + +func (provider *Provider) Config() tokenizer.Config { + panic("unimplemented") +} + +func (provider *Provider) SetLastObservedAt(context.Context, string, time.Time) error { + panic("unimplemented") +} + +func (provider *Provider) ListMaxLastObservedAtByOrgID(context.Context, valuer.UUID) (map[valuer.UUID]time.Time, error) { + panic("unimplemented") +} diff --git a/pkg/tokenizer/wrapped.go b/pkg/tokenizer/wrapped.go new file mode 100644 index 0000000000..2a79d73459 --- /dev/null +++ b/pkg/tokenizer/wrapped.go @@ -0,0 +1,135 @@ +package tokenizer + +import ( + "context" + "time" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" +) + +type wrappedTokenizer struct { + settings factory.ScopedProviderSettings + tokenizer Tokenizer +} + +func NewWrappedTokenizer(settings factory.ScopedProviderSettings, tokenizer Tokenizer) Tokenizer { + return &wrappedTokenizer{ + settings: settings, + tokenizer: tokenizer, + } +} + +func (wrapped *wrappedTokenizer) Start(ctx context.Context) error { + return wrapped.tokenizer.Start(ctx) +} + +func (wrapped *wrappedTokenizer) Stop(ctx context.Context) error { + return wrapped.tokenizer.Stop(ctx) +} + +func (wrapped *wrappedTokenizer) CreateToken(ctx context.Context, identity *authtypes.Identity, meta map[string]string) (*authtypes.Token, error) { + ctx, span := wrapped.settings.Tracer().Start(ctx, "tokenizer.CreateToken", trace.WithAttributes(attribute.String("tokenizer.provider", wrapped.tokenizer.Config().Provider))) + defer span.End() + + token, err := wrapped.tokenizer.CreateToken(ctx, identity, meta) + if err != nil { + span.RecordError(err) + return nil, err + } + + return token, nil +} + +func (wrapped *wrappedTokenizer) GetIdentity(ctx context.Context, accessToken string) (*authtypes.Identity, error) { + ctx, span := wrapped.settings.Tracer().Start(ctx, "tokenizer.GetIdentity", trace.WithAttributes(attribute.String("tokenizer.provider", wrapped.tokenizer.Config().Provider))) + defer span.End() + + identity, err := wrapped.tokenizer.GetIdentity(ctx, accessToken) + if err != nil { + span.RecordError(err) + return nil, err + } + + return identity, nil +} + +func (wrapped *wrappedTokenizer) RotateToken(ctx context.Context, accessToken string, refreshToken string) (*authtypes.Token, error) { + ctx, span := wrapped.settings.Tracer().Start(ctx, "tokenizer.RotateToken", trace.WithAttributes(attribute.String("tokenizer.provider", wrapped.tokenizer.Config().Provider))) + defer span.End() + + token, err := wrapped.tokenizer.RotateToken(ctx, accessToken, refreshToken) + if err != nil { + span.RecordError(err) + return nil, err + } + + return token, nil +} + +func (wrapped *wrappedTokenizer) DeleteToken(ctx context.Context, accessToken string) error { + ctx, span := wrapped.settings.Tracer().Start(ctx, "tokenizer.DeleteToken", trace.WithAttributes(attribute.String("tokenizer.provider", wrapped.tokenizer.Config().Provider))) + defer span.End() + + err := wrapped.tokenizer.DeleteToken(ctx, accessToken) + if err != nil { + span.RecordError(err) + return err + } + + return nil +} + +func (wrapped *wrappedTokenizer) DeleteTokensByUserID(ctx context.Context, userID valuer.UUID) error { + ctx, span := wrapped.settings.Tracer().Start(ctx, "tokenizer.DeleteTokensByUserID", trace.WithAttributes(attribute.String("tokenizer.provider", wrapped.tokenizer.Config().Provider))) + defer span.End() + + err := wrapped.tokenizer.DeleteTokensByUserID(ctx, userID) + if err != nil { + span.RecordError(err) + return err + } + + return nil +} + +func (wrapped *wrappedTokenizer) DeleteIdentity(ctx context.Context, userID valuer.UUID) error { + ctx, span := wrapped.settings.Tracer().Start(ctx, "tokenizer.DeleteIdentity", trace.WithAttributes(attribute.String("tokenizer.provider", wrapped.tokenizer.Config().Provider))) + defer span.End() + + err := wrapped.tokenizer.DeleteIdentity(ctx, userID) + if err != nil { + span.RecordError(err) + return err + } + + return nil +} + +func (wrapped *wrappedTokenizer) SetLastObservedAt(ctx context.Context, accessToken string, lastObservedAt time.Time) error { + ctx, span := wrapped.settings.Tracer().Start(ctx, "tokenizer.SetLastObservedAt", trace.WithAttributes(attribute.String("tokenizer.provider", wrapped.tokenizer.Config().Provider))) + defer span.End() + + err := wrapped.tokenizer.SetLastObservedAt(ctx, accessToken, lastObservedAt) + if err != nil { + span.RecordError(err) + return err + } + + return nil +} + +func (wrapped *wrappedTokenizer) Config() Config { + return wrapped.tokenizer.Config() +} + +func (wrapped *wrappedTokenizer) ListMaxLastObservedAtByOrgID(ctx context.Context, orgID valuer.UUID) (map[valuer.UUID]time.Time, error) { + return wrapped.tokenizer.ListMaxLastObservedAtByOrgID(ctx, orgID) +} + +func (wrapped *wrappedTokenizer) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) { + return wrapped.tokenizer.Collect(ctx, orgID) +} diff --git a/pkg/types/authtypes/authn.go b/pkg/types/authtypes/authn.go new file mode 100644 index 0000000000..e94f723fe8 --- /dev/null +++ b/pkg/types/authtypes/authn.go @@ -0,0 +1,128 @@ +package authtypes + +import ( + "context" + "encoding/json" + "net/url" + "strings" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/valuer" +) + +var ( + ErrCodeInvalidState = errors.MustNewCode("invalid_state") +) + +var ( + AuthNProviderGoogleAuth = AuthNProvider{valuer.NewString("google_auth")} + AuthNProviderSAML = AuthNProvider{valuer.NewString("saml")} + AuthNProviderEmailPassword = AuthNProvider{valuer.NewString("email_password")} + AuthNProviderOIDC = AuthNProvider{valuer.NewString("oidc")} +) + +type AuthNProvider struct{ valuer.String } + +type Identity struct { + UserID valuer.UUID `json:"userId"` + OrgID valuer.UUID `json:"orgId"` + Email valuer.Email `json:"email"` + Role types.Role `json:"role"` +} + +type CallbackIdentity struct { + Name string `json:"name"` + Email valuer.Email `json:"email"` + OrgID valuer.UUID `json:"orgId"` + State State `json:"state"` +} + +type State struct { + DomainID valuer.UUID + URL *url.URL +} + +func NewState(siteURL *url.URL, domainID valuer.UUID) State { + u := &url.URL{ + Scheme: siteURL.Scheme, + Host: siteURL.Host, + Path: siteURL.Path, + RawQuery: url.Values{ + "domain_id": {newDomainIDForState(domainID)}, + }.Encode(), + } + + return State{ + DomainID: domainID, + URL: u, + } +} + +func NewStateFromString(state string) (State, error) { + u, err := url.Parse(state) + if err != nil { + return State{}, err + } + + domainID, err := newDomainIDFromState(u.Query().Get("domain_id")) + if err != nil { + return State{}, err + } + + return State{ + DomainID: domainID, + URL: u, + }, nil +} + +func NewIdentity(userID valuer.UUID, orgID valuer.UUID, email valuer.Email, role types.Role) *Identity { + return &Identity{ + UserID: userID, + OrgID: orgID, + Email: email, + Role: role, + } +} + +func NewCallbackIdentity(name string, email valuer.Email, orgID valuer.UUID, state State) *CallbackIdentity { + return &CallbackIdentity{ + Name: name, + Email: email, + OrgID: orgID, + State: state, + } +} + +func newDomainIDForState(domainID valuer.UUID) string { + return strings.Replace(domainID.String(), "-", ":", -1) +} + +func newDomainIDFromState(state string) (valuer.UUID, error) { + return valuer.NewUUID(strings.Replace(state, ":", "-", -1)) +} + +func (typ Identity) MarshalBinary() ([]byte, error) { + return json.Marshal(typ) +} + +func (typ *Identity) UnmarshalBinary(data []byte) error { + return json.Unmarshal(data, typ) +} + +func (typ *Identity) ToClaims() Claims { + return Claims{ + UserID: typ.UserID.String(), + Email: typ.Email.String(), + Role: typ.Role, + OrgID: typ.OrgID.String(), + } +} + +type AuthNStore interface { + // Get user and factor password by email and orgID. + GetUserAndFactorPasswordByEmailAndOrgID(ctx context.Context, email string, orgID valuer.UUID) (*types.User, *types.FactorPassword, error) + + // Get org domain from id. + GetAuthDomainFromID(ctx context.Context, domainID valuer.UUID) (*AuthDomain, error) +} diff --git a/pkg/types/authtypes/claims.go b/pkg/types/authtypes/claims.go index f46fd073d5..dff11c17ec 100644 --- a/pkg/types/authtypes/claims.go +++ b/pkg/types/authtypes/claims.go @@ -1,49 +1,58 @@ package authtypes import ( + "context" "log/slog" "slices" "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/types" - "github.com/golang-jwt/jwt/v5" ) -var _ jwt.ClaimsValidator = (*Claims)(nil) +type claimsKey struct{} +type accessTokenKey struct{} type Claims struct { - jwt.RegisteredClaims - UserID string `json:"id"` - Email string `json:"email"` - Role types.Role `json:"role"` - OrgID string `json:"orgId"` + UserID string + Email string + Role types.Role + OrgID string } -func (c *Claims) Validate() error { - if c.UserID == "" { - return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "id is required") +// NewContextWithClaims attaches individual claims to the context. +func NewContextWithClaims(ctx context.Context, claims Claims) context.Context { + ctx = context.WithValue(ctx, claimsKey{}, claims) + return ctx +} + +func ClaimsFromContext(ctx context.Context) (Claims, error) { + claims, ok := ctx.Value(claimsKey{}).(Claims) + if !ok { + return Claims{}, errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated") } - // The problem is that when the "role" field is missing entirely from the JSON (as opposed to being present but empty), the UnmarshalJSON method for Role isn't called at all. - // The JSON decoder just sets the Role field to its zero value (""). - if c.Role == "" { - return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "role is required") + return claims, nil +} + +func NewContextWithAccessToken(ctx context.Context, accessToken string) context.Context { + return context.WithValue(ctx, accessTokenKey{}, accessToken) +} + +func AccessTokenFromContext(ctx context.Context) (string, error) { + accessToken, ok := ctx.Value(accessTokenKey{}).(string) + if !ok { + return "", errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated") } - if c.OrgID == "" { - return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "orgId is required") - } - - return nil + return accessToken, nil } func (c *Claims) LogValue() slog.Value { return slog.GroupValue( - slog.String("id", c.UserID), + slog.String("user_id", c.UserID), slog.String("email", c.Email), slog.String("role", c.Role.String()), - slog.String("orgId", c.OrgID), - slog.Time("exp", c.ExpiresAt.Time), + slog.String("org_id", c.OrgID), ) } diff --git a/pkg/types/authtypes/domain.go b/pkg/types/authtypes/domain.go new file mode 100644 index 0000000000..2caad1e197 --- /dev/null +++ b/pkg/types/authtypes/domain.go @@ -0,0 +1,206 @@ +package authtypes + +import ( + "context" + "encoding/json" + "regexp" + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/uptrace/bun" +) + +const ( + authDomainNameRegexString string = `^([a-zA-Z0-9]{1}[a-zA-Z0-9-]{0,62}){1}(\.[a-zA-Z0-9]{1}[a-zA-Z0-9-]{0,62})*?$` +) + +var ( + authDomainNameRegex = regexp.MustCompile(authDomainNameRegexString) +) + +var ( + ErrCodeAuthDomainInvalidConfig = errors.MustNewCode("auth_domain_invalid_config") + ErrCodeAuthDomainInvalidName = errors.MustNewCode("auth_domain_invalid_name") + ErrCodeAuthDomainMismatch = errors.MustNewCode("auth_domain_mismatch") + ErrCodeAuthDomainNotFound = errors.MustNewCode("auth_domain_not_found") + ErrCodeAuthDomainAlreadyExists = errors.MustNewCode("auth_domain_already_exists") +) + +type GettableAuthDomain struct { + *StorableAuthDomain + *AuthDomainConfig +} + +type PostableAuthDomain struct { + Config AuthDomainConfig `json:"config"` + Name string `json:"name"` +} + +type UpdateableAuthDomain struct { + Config AuthDomainConfig `json:"config"` +} + +type StorableAuthDomain struct { + bun.BaseModel `bun:"table:org_domains"` + + types.Identifiable + Name string `bun:"name" json:"name"` + Data string `bun:"data" json:"-"` + OrgID valuer.UUID `bun:"org_id" json:"orgId"` + types.TimeAuditable +} + +type AuthDomainConfig struct { + SSOEnabled bool `json:"ssoEnabled"` + AuthNProvider AuthNProvider `json:"ssoType"` + SAML *SamlConfig `json:"samlConfig"` + Google *GoogleConfig `json:"googleAuthConfig"` + OIDC *OIDCConfig `json:"oidcConfig"` +} + +type AuthDomain struct { + storableAuthDomain *StorableAuthDomain + authDomainConfig *AuthDomainConfig +} + +func NewAuthDomainFromConfig(name string, config *AuthDomainConfig, orgID valuer.UUID) (*AuthDomain, error) { + data, err := json.Marshal(config) + if err != nil { + return nil, err + } + + return NewAuthDomain(name, string(data), orgID) +} + +func NewAuthDomain(name string, data string, orgID valuer.UUID) (*AuthDomain, error) { + storableAuthDomain := &StorableAuthDomain{ + Identifiable: types.Identifiable{ + ID: valuer.GenerateUUID(), + }, + Name: name, + Data: data, + OrgID: orgID, + TimeAuditable: types.TimeAuditable{ + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + }, + } + + return NewAuthDomainFromStorableAuthDomain(storableAuthDomain) +} + +func NewAuthDomainFromStorableAuthDomain(storableAuthDomain *StorableAuthDomain) (*AuthDomain, error) { + authDomainConfig := new(AuthDomainConfig) + if err := json.Unmarshal([]byte(storableAuthDomain.Data), authDomainConfig); err != nil { + return nil, err + } + + return &AuthDomain{ + storableAuthDomain: storableAuthDomain, + authDomainConfig: authDomainConfig, + }, nil +} + +func NewGettableAuthDomainFromAuthDomain(authDomain *AuthDomain) *GettableAuthDomain { + return &GettableAuthDomain{ + StorableAuthDomain: authDomain.StorableAuthDomain(), + AuthDomainConfig: authDomain.AuthDomainConfig(), + } +} + +func (typ *AuthDomain) StorableAuthDomain() *StorableAuthDomain { + return typ.storableAuthDomain +} + +func (typ *AuthDomain) AuthDomainConfig() *AuthDomainConfig { + return typ.authDomainConfig +} + +func (typ *AuthDomain) Update(config *AuthDomainConfig) error { + data, err := json.Marshal(config) + if err != nil { + return err + } + + typ.authDomainConfig = config + typ.storableAuthDomain.Data = string(data) + typ.storableAuthDomain.UpdatedAt = time.Now() + return nil +} + +func (typ *PostableAuthDomain) UnmarshalJSON(data []byte) error { + type Alias PostableAuthDomain + + var temp Alias + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + if !authDomainNameRegex.MatchString(temp.Name) { + return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthDomainInvalidName, "invalid domain name %s", temp.Name) + } + + *typ = PostableAuthDomain(temp) + return nil +} + +func (typ *AuthDomainConfig) UnmarshalJSON(data []byte) error { + type Alias AuthDomainConfig + + var temp Alias + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + switch temp.AuthNProvider { + case AuthNProviderGoogleAuth: + if temp.Google == nil { + return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthDomainInvalidConfig, "google auth config is required") + } + + case AuthNProviderSAML: + if temp.SAML == nil { + return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthDomainInvalidConfig, "saml config is required") + } + + case AuthNProviderOIDC: + if temp.OIDC == nil { + return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthDomainInvalidConfig, "oidc config is required") + } + + default: + return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthDomainInvalidConfig, "invalid authn provider %q", temp.AuthNProvider.StringValue()) + } + + *typ = AuthDomainConfig(temp) + return nil + +} + +type AuthDomainStore interface { + // Get by id. + Get(context.Context, valuer.UUID) (*AuthDomain, error) + + // Get by orgID and id. + GetByOrgIDAndID(context.Context, valuer.UUID, valuer.UUID) (*AuthDomain, error) + + // Get by name. + GetByName(context.Context, string) (*AuthDomain, error) + + // Get by name and orgID. + GetByNameAndOrgID(context.Context, string, valuer.UUID) (*AuthDomain, error) + + // List org domains by orgID. + ListByOrgID(context.Context, valuer.UUID) ([]*AuthDomain, error) + + // Create auth domain. + Create(context.Context, *AuthDomain) error + + // Update by orgID and id. + Update(context.Context, *AuthDomain) error + + // Delete by orgID and id. + Delete(context.Context, valuer.UUID, valuer.UUID) error +} diff --git a/pkg/types/authtypes/email_password.go b/pkg/types/authtypes/email_password.go new file mode 100644 index 0000000000..f5da78b759 --- /dev/null +++ b/pkg/types/authtypes/email_password.go @@ -0,0 +1,38 @@ +package authtypes + +import ( + "encoding/json" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type PostableEmailPasswordSession struct { + Email valuer.Email `json:"email"` + Password string `json:"password"` + OrgID valuer.UUID `json:"orgId"` +} + +func (typ *PostableEmailPasswordSession) UnmarshalJSON(data []byte) error { + type Alias PostableEmailPasswordSession + var temp Alias + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + if temp.Email.IsZero() { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required") + } + + if temp.Password == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "password is required") + } + + if temp.OrgID.IsZero() { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is required") + } + + *typ = PostableEmailPasswordSession(temp) + return nil +} diff --git a/pkg/types/authtypes/google.go b/pkg/types/authtypes/google.go new file mode 100644 index 0000000000..41fd1e89e8 --- /dev/null +++ b/pkg/types/authtypes/google.go @@ -0,0 +1,38 @@ +package authtypes + +import ( + "encoding/json" + + "github.com/SigNoz/signoz/pkg/errors" +) + +type GoogleConfig struct { + // ClientID is the application's ID. For example, 292085223830.apps.googleusercontent.com. + ClientID string `json:"clientId"` + + // It is the application's secret. + ClientSecret string `json:"clientSecret"` + + // What is the meaning of this? Should we remove this? + RedirectURI string `json:"redirectURI"` +} + +func (config *GoogleConfig) UnmarshalJSON(data []byte) error { + type Alias GoogleConfig + + var temp Alias + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + if temp.ClientID == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientId is required") + } + + if temp.ClientSecret == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientSecret is required") + } + + *config = GoogleConfig(temp) + return nil +} diff --git a/pkg/types/authtypes/jwt.go b/pkg/types/authtypes/jwt.go deleted file mode 100644 index 6bf54a1782..0000000000 --- a/pkg/types/authtypes/jwt.go +++ /dev/null @@ -1,143 +0,0 @@ -package authtypes - -import ( - "context" - "strings" - "time" - - "github.com/SigNoz/signoz/pkg/errors" - "github.com/SigNoz/signoz/pkg/types" - "github.com/golang-jwt/jwt/v5" -) - -type jwtClaimsKey struct{} - -type JWT struct { - JwtSecret string - JwtExpiry time.Duration - JwtRefresh time.Duration -} - -func NewJWT(jwtSecret string, jwtExpiry time.Duration, jwtRefresh time.Duration) *JWT { - return &JWT{ - JwtSecret: jwtSecret, - JwtExpiry: jwtExpiry, - JwtRefresh: jwtRefresh, - } -} - -func (j *JWT) ContextFromRequest(ctx context.Context, values ...string) (context.Context, error) { - var value string - for _, v := range values { - if v != "" { - value = v - break - } - } - - if value == "" { - return ctx, errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "missing authorization header") - } - - // parse from - bearerToken, ok := parseBearerAuth(value) - if !ok { - // this will take care that if the value is not of type bearer token, directly use it - bearerToken = value - } - - claims, err := j.Claims(bearerToken) - if err != nil { - return ctx, err - } - - return NewContextWithClaims(ctx, claims), nil -} - -func (j *JWT) Claims(jwtStr string) (Claims, error) { - claims := Claims{} - _, err := jwt.ParseWithClaims(jwtStr, &claims, func(token *jwt.Token) (interface{}, error) { - if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { - return nil, errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unrecognized signing algorithm: %s", token.Method.Alg()) - } - return []byte(j.JwtSecret), nil - }) - if err != nil { - return Claims{}, errors.Wrapf(err, errors.TypeUnauthenticated, errors.CodeUnauthenticated, "failed to parse jwt token") - } - - return claims, nil -} - -// NewContextWithClaims attaches individual claims to the context. -func NewContextWithClaims(ctx context.Context, claims Claims) context.Context { - ctx = context.WithValue(ctx, jwtClaimsKey{}, claims) - return ctx -} - -// signToken creates and signs a JWT token with the given claims -func (j *JWT) signToken(claims Claims) (string, error) { - token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) - return token.SignedString([]byte(j.JwtSecret)) -} - -// AccessToken creates an access token with the provided claims -func (j *JWT) AccessToken(orgId, userId, email string, role types.Role) (string, Claims, error) { - claims := Claims{ - UserID: userId, - Role: role, - Email: email, - OrgID: orgId, - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(time.Now().Add(j.JwtExpiry)), - IssuedAt: jwt.NewNumericDate(time.Now()), - }, - } - - token, err := j.signToken(claims) - if err != nil { - return "", Claims{}, errors.Wrapf(err, errors.TypeUnauthenticated, errors.CodeUnauthenticated, "failed to sign token") - } - - return token, claims, nil -} - -// RefreshToken creates a refresh token with the provided claims -func (j *JWT) RefreshToken(orgId, userId, email string, role types.Role) (string, Claims, error) { - claims := Claims{ - UserID: userId, - Role: role, - Email: email, - OrgID: orgId, - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(time.Now().Add(j.JwtRefresh)), - IssuedAt: jwt.NewNumericDate(time.Now()), - }, - } - - token, err := j.signToken(claims) - if err != nil { - return "", Claims{}, errors.Wrapf(err, errors.TypeUnauthenticated, errors.CodeUnauthenticated, "failed to sign token") - } - - return token, claims, nil -} - -func ClaimsFromContext(ctx context.Context) (Claims, error) { - claims, ok := ctx.Value(jwtClaimsKey{}).(Claims) - if !ok { - return Claims{}, errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated") - } - - return claims, nil -} - -func parseBearerAuth(auth string) (string, bool) { - const prefix = "Bearer " - // Case insensitive prefix match - if len(auth) < len(prefix) || !strings.EqualFold(auth[:len(prefix)], prefix) { - return "", false - } - - return auth[len(prefix):], true -} diff --git a/pkg/types/authtypes/jwt_test.go b/pkg/types/authtypes/jwt_test.go deleted file mode 100644 index d0f33dd24c..0000000000 --- a/pkg/types/authtypes/jwt_test.go +++ /dev/null @@ -1,210 +0,0 @@ -package authtypes - -import ( - "testing" - "time" - - "github.com/SigNoz/signoz/pkg/errors" - "github.com/SigNoz/signoz/pkg/types" - "github.com/golang-jwt/jwt/v5" - "github.com/stretchr/testify/assert" -) - -func TestJwtAccessToken(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - token, _, err := jwtService.AccessToken("orgId", "userId", "email@example.com", types.RoleAdmin) - - assert.NoError(t, err) - assert.NotEmpty(t, token) -} - -func TestJwtRefreshToken(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - token, _, err := jwtService.RefreshToken("orgId", "userId", "email@example.com", types.RoleAdmin) - - assert.NoError(t, err) - assert.NotEmpty(t, token) -} - -func TestJwtClaims(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - - // Create a valid token - claims := Claims{ - UserID: "userId", - Role: types.RoleAdmin, - Email: "email@example.com", - OrgID: "orgId", - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Minute)), - IssuedAt: jwt.NewNumericDate(time.Now()), - }, - } - tokenString, err := jwtService.signToken(claims) - assert.NoError(t, err) - - // Test retrieving claims from the token - retrievedClaims, err := jwtService.Claims(tokenString) - assert.NoError(t, err) - assert.Equal(t, claims.UserID, retrievedClaims.UserID) - assert.Equal(t, claims.Role, retrievedClaims.Role) - assert.Equal(t, claims.Email, retrievedClaims.Email) - assert.Equal(t, claims.OrgID, retrievedClaims.OrgID) -} - -func TestJwtClaimsInvalidToken(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - - _, err := jwtService.Claims("invalid.token.string") - assert.Error(t, err) - assert.Contains(t, err.Error(), "token is malformed") -} - -func TestJwtClaimsExpiredToken(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - - // Create an expired token - claims := Claims{ - UserID: "userId", - Role: types.RoleAdmin, - Email: "email@example.com", - OrgID: "orgId", - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(time.Now().Add(-time.Minute)), - IssuedAt: jwt.NewNumericDate(time.Now()), - }, - } - tokenString, err := jwtService.signToken(claims) - assert.NoError(t, err) - - _, err = jwtService.Claims(tokenString) - assert.Error(t, err) - assert.Contains(t, err.Error(), "token is expired") -} - -func TestJwtClaimsInvalidSignature(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - - // Create a valid token - claims := Claims{ - UserID: "userId", - Role: types.RoleAdmin, - Email: "email@example.com", - OrgID: "orgId", - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Minute)), - }, - } - validToken, err := jwtService.signToken(claims) - assert.NoError(t, err) - - // Modify the token to create an invalid signature - invalidToken := validToken + "tampered" - - // Test retrieving claims from the invalid signature token - _, err = jwtService.Claims(invalidToken) - assert.Error(t, err) - assert.Contains(t, err.Error(), "signature is invalid") -} - -func TestJwtClaimsWithInvalidRole(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - - claims := Claims{ - UserID: "userId", - Role: "INVALID_ROLE", - Email: "email@example.com", - OrgID: "orgId", - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Minute)), - }, - } - validToken, err := jwtService.signToken(claims) - assert.NoError(t, err) - - _, err = jwtService.Claims(validToken) - assert.Error(t, err) - assert.True(t, errors.Ast(err, errors.TypeUnauthenticated)) -} - -func TestJwtClaimsMissingUserID(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - - claims := Claims{ - UserID: "", - Role: types.RoleAdmin, - Email: "email@example.com", - OrgID: "orgId", - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Minute)), - }, - } - validToken, err := jwtService.signToken(claims) - assert.NoError(t, err) - - _, err = jwtService.Claims(validToken) - assert.Error(t, err) - assert.True(t, errors.Ast(err, errors.TypeUnauthenticated)) -} - -func TestJwtClaimsMissingRole(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - - claims := Claims{ - UserID: "userId", - Role: "", - Email: "email@example.com", - OrgID: "orgId", - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Minute)), - }, - } - validToken, err := jwtService.signToken(claims) - assert.NoError(t, err) - - _, err = jwtService.Claims(validToken) - assert.Error(t, err) - assert.True(t, errors.Ast(err, errors.TypeUnauthenticated)) -} - -func TestJwtClaimsMissingOrgID(t *testing.T) { - jwtService := NewJWT("secret", time.Minute, time.Hour) - - claims := Claims{ - UserID: "userId", - Role: types.RoleAdmin, - Email: "email@example.com", - OrgID: "", - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Minute)), - }, - } - validToken, err := jwtService.signToken(claims) - assert.NoError(t, err) - - _, err = jwtService.Claims(validToken) - assert.Error(t, err) - assert.True(t, errors.Ast(err, errors.TypeUnauthenticated)) -} - -func TestParseBearerAuth(t *testing.T) { - tests := []struct { - auth string - expected string - expectOk bool - }{ - {"Bearer validToken", "validToken", true}, - {"bearer validToken", "validToken", true}, - {"InvalidToken", "", false}, - {"Bearer", "", false}, - {"", "", false}, - } - - for _, test := range tests { - t.Run(test.auth, func(t *testing.T) { - token, ok := parseBearerAuth(test.auth) - assert.Equal(t, test.expected, token) - assert.Equal(t, test.expectOk, ok) - }) - } -} diff --git a/pkg/types/authtypes/oidc.go b/pkg/types/authtypes/oidc.go new file mode 100644 index 0000000000..9737364a64 --- /dev/null +++ b/pkg/types/authtypes/oidc.go @@ -0,0 +1,65 @@ +package authtypes + +import ( + "encoding/json" + + "github.com/SigNoz/signoz/pkg/errors" +) + +type OIDCConfig struct { + // It is the URL identifier for the service. For example: "https://accounts.google.com" or "https://login.salesforce.com". + Issuer string `json:"issuer"` + + // Some offspec providers like Azure, Oracle IDCS have oidc discovery url different from issuer url which causes issuerValidation to fail + // This provides a way to override the Issuer url from the .well-known/openid-configuration issuer + // from the .well-known/openid-configuration issuer + IssuerAlias string `json:"issuerAlias"` + + // It is the application's ID. + ClientID string `json:"clientId"` + + // It is the application's secret. + ClientSecret string `json:"clientSecret"` + + // Mapping of claims to the corresponding fields in the token. + ClaimMapping ClaimMapping `json:"claimMapping"` + + // Whether to skip email verification. Defaults to "false" + InsecureSkipEmailVerified bool `json:"insecureSkipEmailVerified"` + + // Uses the userinfo endpoint to get additional claims for the token. This is especially useful where upstreams return "thin" id tokens + GetUserInfo bool `json:"getUserInfo"` +} + +type ClaimMapping struct { + // Configurable key which contains the email claims. Defaults to "email" + Email string `json:"email"` +} + +func (config *OIDCConfig) UnmarshalJSON(data []byte) error { + type Alias OIDCConfig + + var temp Alias + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + if temp.Issuer == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "issuer is required") + } + + if temp.ClientID == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientId is required") + } + + if temp.ClientSecret == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientSecret is required") + } + + if temp.ClaimMapping.Email == "" { + temp.ClaimMapping.Email = "email" + } + + *config = OIDCConfig(temp) + return nil +} diff --git a/pkg/types/authtypes/saml.go b/pkg/types/authtypes/saml.go new file mode 100644 index 0000000000..bccafd91a0 --- /dev/null +++ b/pkg/types/authtypes/saml.go @@ -0,0 +1,47 @@ +package authtypes + +import ( + "encoding/json" + + "github.com/SigNoz/signoz/pkg/errors" +) + +type SamlConfig struct { + // The entityID of the SAML identity provider. It can typically be found in the EntityID attribute of the EntityDescriptor element in the SAML metadata of the identity provider. Example: + SamlEntity string `json:"samlEntity"` + + // The SSO endpoint of the SAML identity provider. It can typically be found in the SingleSignOnService element in the SAML metadata of the identity provider. Example: + SamlIdp string `json:"samlIdp"` + + // The certificate of the SAML identity provider. It can typically be found in the X509Certificate element in the SAML metadata of the identity provider. Example: {samlCert} + SamlCert string `json:"samlCert"` + + // Whether to skip signing the SAML requests. It can typically be found in the WantAuthnRequestsSigned attribute of the IDPSSODescriptor element in the SAML metadata of the identity provider. Example: + // For providers like jumpcloud, this should be set to true. + // Note: This is the reverse of WantAuthnRequestsSigned. If WantAuthnRequestsSigned is false, then InsecureSkipAuthNRequestsSigned should be true. + InsecureSkipAuthNRequestsSigned bool `json:"insecureSkipAuthNRequestsSigned"` +} + +func (config *SamlConfig) UnmarshalJSON(data []byte) error { + type Alias SamlConfig + + var temp Alias + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + if temp.SamlEntity == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "samlEntity is required") + } + + if temp.SamlIdp == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "samlIdp is required") + } + + if temp.SamlCert == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "samlCert is required") + } + + *config = SamlConfig(temp) + return nil +} diff --git a/pkg/types/authtypes/session.go b/pkg/types/authtypes/session.go new file mode 100644 index 0000000000..209381e08e --- /dev/null +++ b/pkg/types/authtypes/session.go @@ -0,0 +1,78 @@ +package authtypes + +import ( + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type DeprecatedPostableLogin struct { + Email valuer.Email `json:"email"` + Password string `json:"password"` +} + +type DeprecatedGettableLogin struct { + AccessJWT string `json:"accessJwt"` + UserID string `json:"userId"` +} + +type SessionContext struct { + Exists bool `json:"exists"` + Orgs []*OrgSessionContext `json:"orgs"` +} + +type OrgSessionContext struct { + ID valuer.UUID `json:"id"` + Name string `json:"name"` + AuthNSupport AuthNSupport `json:"authNSupport"` + Warning *errors.JSON `json:"warning,omitempty"` +} + +type AuthNSupport struct { + Callback []CallbackAuthNSupport `json:"callback"` + Password []PasswordAuthNSupport `json:"password"` +} + +type CallbackAuthNSupport struct { + Provider AuthNProvider `json:"provider"` + URL string `json:"url"` +} + +type PasswordAuthNSupport struct { + Provider AuthNProvider `json:"provider"` +} + +func NewSessionContext() *SessionContext { + return &SessionContext{Exists: false, Orgs: []*OrgSessionContext{}} +} + +func NewOrgSessionContext(orgID valuer.UUID, name string) *OrgSessionContext { + return &OrgSessionContext{ + ID: orgID, + Name: name, + AuthNSupport: AuthNSupport{ + Password: []PasswordAuthNSupport{}, + Callback: []CallbackAuthNSupport{}, + }, + Warning: nil, + } +} + +func (s *SessionContext) AddOrgContext(orgContext *OrgSessionContext) *SessionContext { + s.Orgs = append(s.Orgs, orgContext) + return s +} + +func (s *OrgSessionContext) AddPasswordAuthNSupport(provider AuthNProvider) *OrgSessionContext { + s.AuthNSupport.Password = append(s.AuthNSupport.Password, PasswordAuthNSupport{Provider: provider}) + return s +} + +func (s *OrgSessionContext) AddCallbackAuthNSupport(provider AuthNProvider, url string) *OrgSessionContext { + s.AuthNSupport.Callback = append(s.AuthNSupport.Callback, CallbackAuthNSupport{Provider: provider, URL: url}) + return s +} + +func (s *OrgSessionContext) AddWarning(warning error) *OrgSessionContext { + s.Warning = errors.AsJSON(warning) + return s +} diff --git a/pkg/types/authtypes/token.go b/pkg/types/authtypes/token.go new file mode 100644 index 0000000000..3d0e2dfa35 --- /dev/null +++ b/pkg/types/authtypes/token.go @@ -0,0 +1,263 @@ +package authtypes + +import ( + "context" + "encoding/json" + "net/url" + "strconv" + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types/cachetypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/sethvargo/go-password/password" + "github.com/uptrace/bun" +) + +var ( + ErrCodeTokenRotationRequired = errors.MustNewCode("token_rotation_required") + ErrCodeTokenExpired = errors.MustNewCode("token_expired") + ErrCodeTokenNotFound = errors.MustNewCode("token_not_found") + ErrCodeTokenOlderLastObservedAt = errors.MustNewCode("token_older_last_observed_at") +) + +var _ cachetypes.Cacheable = (*Token)(nil) + +type PostableRotateToken struct { + RefreshToken string `json:"refreshToken"` +} + +type StorableToken = Token + +type GettableToken struct { + TokenType string `json:"tokenType"` + AccessToken string `json:"accessToken"` + RefreshToken string `json:"refreshToken"` + ExpiresIn int `json:"expiresIn"` +} + +type Token struct { + bun.BaseModel `bun:"table:auth_token,alias:auth_token"` + + ID valuer.UUID `bun:"id,pk,type:text"` + Meta map[string]string `bun:"meta,notnull"` + PrevAccessToken string `bun:"prev_access_token,nullzero"` + AccessToken string `bun:"access_token,notnull"` + PrevRefreshToken string `bun:"prev_refresh_token,nullzero"` + RefreshToken string `bun:"refresh_token,notnull"` + LastObservedAt time.Time `bun:"last_observed_at,nullzero"` + RotatedAt time.Time `bun:"rotated_at,nullzero"` + CreatedAt time.Time `bun:"created_at,notnull"` + UpdatedAt time.Time `bun:"updated_at,notnull"` + UserID valuer.UUID `bun:"user_id,notnull"` +} + +func NewToken(meta map[string]string, userID valuer.UUID) (*Token, error) { + accessToken := password.MustGenerate(32, 10, 0, true, true) + refreshToken := password.MustGenerate(32, 12, 0, true, true) + + return &Token{ + ID: valuer.GenerateUUID(), + Meta: meta, + PrevAccessToken: "", + AccessToken: accessToken, + PrevRefreshToken: "", + RefreshToken: refreshToken, + LastObservedAt: time.Time{}, + RotatedAt: time.Time{}, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + UserID: userID, + }, nil +} + +func NewTokenFromAccessTokenAndRefreshToken(accessToken string, refreshToken string, meta map[string]string, userID valuer.UUID) (*Token, error) { + return &Token{ + ID: valuer.UUID{}, + Meta: meta, + PrevAccessToken: "", + AccessToken: accessToken, + PrevRefreshToken: "", + RefreshToken: refreshToken, + LastObservedAt: time.Time{}, + RotatedAt: time.Time{}, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + UserID: userID, + }, nil +} + +func NewGettableTokenFromToken(token *Token, rotationInterval time.Duration) *GettableToken { + return &GettableToken{ + TokenType: "bearer", + AccessToken: token.AccessToken, + RefreshToken: token.RefreshToken, + ExpiresIn: int(time.Until(token.RotationAt(rotationInterval)).Seconds()), + } +} + +func NewURLValuesFromToken(token *Token, rotationInterval time.Duration) url.Values { + return url.Values{ + "tokenType": {"bearer"}, + "accessToken": {token.AccessToken}, + "refreshToken": {token.RefreshToken}, + "expiresIn": {strconv.Itoa(int(time.Until(token.RotationAt(rotationInterval)).Seconds()))}, + } +} + +func (typ *PostableRotateToken) UnmarshalJSON(data []byte) error { + type Alias PostableRotateToken + var temp Alias + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + if temp.RefreshToken == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "refresh token is required") + } + + *typ = PostableRotateToken(temp) + return nil +} + +func (typ *Token) IsValid(rotationInterval time.Duration, idleDuration time.Duration, maxDuration time.Duration) error { + // Check for expiration + if err := typ.IsExpired(idleDuration, maxDuration); err != nil { + return err + } + + // Check for rotation + if err := typ.IsRotationRequired(rotationInterval); err != nil { + return err + } + + return nil +} + +func (typ *Token) IsExpired(idleDuration time.Duration, maxDuration time.Duration) error { + // If now - last_seen_at > idle_duration, the token will be considered as expired. + if !typ.LastObservedAt.IsZero() && typ.LastObservedAt.Before(time.Now().Add(-idleDuration)) { + return errors.New(errors.TypeUnauthenticated, ErrCodeTokenExpired, "token has not been used for too long") + } + + // If now - created_at > max_duration, the token will be considered as expired. + if typ.CreatedAt.Before(time.Now().Add(-maxDuration)) { + return errors.New(errors.TypeUnauthenticated, ErrCodeTokenExpired, "token was created a long time ago") + } + + return nil +} + +func (typ *Token) IsRotationRequired(rotationInterval time.Duration) error { + if !typ.RotatedAt.IsZero() && typ.RotatedAt.Before(time.Now().Add(-rotationInterval)) { + return errors.New(errors.TypeUnauthenticated, ErrCodeTokenRotationRequired, "token needs to be rotated") + } + + if typ.RotatedAt.IsZero() && typ.CreatedAt.Before(time.Now().Add(-rotationInterval)) { + return errors.New(errors.TypeUnauthenticated, ErrCodeTokenRotationRequired, "token needs to be rotated") + } + + return nil +} + +func (typ *Token) Rotate(accessTokenOrPrevAccessToken string, refreshTokenOrPrevRefreshToken string, rotationDuration time.Duration, idleDuration time.Duration, maxDuration time.Duration) error { + if typ.PrevAccessToken == accessTokenOrPrevAccessToken && typ.PrevRefreshToken == refreshTokenOrPrevRefreshToken { + // If the token has been rotated within the rotation duration, do nothing and return the same token. + if !typ.RotatedAt.IsZero() && typ.RotatedAt.Before(time.Now().Add(-rotationDuration)) { + return nil + } + + return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "invalid token pair") + } + + if typ.AccessToken != accessTokenOrPrevAccessToken || typ.RefreshToken != refreshTokenOrPrevRefreshToken { + return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "invalid token pair") + } + + if err := typ.IsExpired(idleDuration, maxDuration); err != nil { + return err + } + + // Generate new access and refresh tokens. + typ.PrevAccessToken = typ.AccessToken + typ.AccessToken = password.MustGenerate(32, 10, 0, true, true) + typ.PrevRefreshToken = typ.RefreshToken + typ.RefreshToken = password.MustGenerate(32, 12, 0, true, true) + + // Set the rotated at time. + typ.RotatedAt = time.Now() + + // Set the updated at time. + typ.UpdatedAt = time.Now() + + return nil +} + +func (typ *Token) RotationAt(rotationInterval time.Duration) time.Time { + if typ.RotatedAt.IsZero() { + return typ.CreatedAt.Add(rotationInterval) + } + + return typ.RotatedAt.Add(rotationInterval) +} + +func (typ *Token) UpdateLastObservedAt(lastObservedAt time.Time) error { + if lastObservedAt.Before(typ.LastObservedAt) { + return errors.New(errors.TypeInvalidInput, ErrCodeTokenOlderLastObservedAt, "last observed at is before the current last observed at") + } + + typ.LastObservedAt = lastObservedAt + typ.UpdatedAt = time.Now() + + return nil +} + +func (typ Token) MarshalBinary() ([]byte, error) { + return json.Marshal(typ) +} + +func (typ *Token) UnmarshalBinary(data []byte) error { + return json.Unmarshal(data, typ) +} + +type TokenStore interface { + // Create a new token. + Create(context.Context, *StorableToken) error + + // Get an identity by userID. + GetIdentityByUserID(context.Context, valuer.UUID) (*Identity, error) + + // Get a token by AccessToken. + GetByAccessToken(context.Context, string) (*StorableToken, error) + + // Updates or doesn't update a token by access token or previous access token with update. The callback is run in a transaction. + GetOrUpdateByAccessTokenOrPrevAccessToken(context.Context, string, func(context.Context, *StorableToken) error) error + + // Get a token by userID and refresh token. + GetByUserIDAndRefreshToken(context.Context, valuer.UUID, string) (*StorableToken, error) + + // List all tokens by orgIDs. + ListByOrgIDs(context.Context, []valuer.UUID) ([]*StorableToken, error) + + // List all tokens by orgID. + ListByOrgID(context.Context, valuer.UUID) ([]*StorableToken, error) + + // List all tokens by userID. + ListByUserID(context.Context, valuer.UUID) ([]*StorableToken, error) + + // Update a token. + Update(context.Context, *StorableToken) error + + // Delete a token by access token. + DeleteByAccessToken(context.Context, string) error + + // Delete many tokens by IDs. + DeleteMany(context.Context, []valuer.UUID) error + + // Delete a token by userID. + DeleteByUserID(context.Context, valuer.UUID) error + + // Update last observed at by access token. + UpdateLastObservedAtByAccessToken(context.Context, []map[string]any) error +} diff --git a/pkg/types/cachetypes/cacheable.go b/pkg/types/cachetypes/cacheable.go index 0da002410e..db587552e3 100644 --- a/pkg/types/cachetypes/cacheable.go +++ b/pkg/types/cachetypes/cacheable.go @@ -1,7 +1,9 @@ package cachetypes import ( + "crypto/sha1" "encoding" + "encoding/hex" "reflect" "github.com/SigNoz/signoz/pkg/errors" @@ -18,6 +20,13 @@ type Cloneable interface { Clone() Cacheable } +func NewSha1CacheKey(val string) string { + hash := sha1.New() + hash.Write([]byte(val)) + + return hex.EncodeToString(hash.Sum(nil)) +} + func CheckCacheablePointer(dest any) error { rv := reflect.ValueOf(dest) if rv.Kind() != reflect.Pointer || rv.IsNil() { diff --git a/pkg/types/ctxtypes/auth.go b/pkg/types/ctxtypes/auth.go index 323f781ca8..ec4d926290 100644 --- a/pkg/types/ctxtypes/auth.go +++ b/pkg/types/ctxtypes/auth.go @@ -1,6 +1,20 @@ package ctxtypes -import "context" +import ( + "context" + + "github.com/SigNoz/signoz/pkg/valuer" +) + +type AuthType struct { + valuer.String +} + +var ( + AuthTypeTokenizer = AuthType{valuer.NewString("tokenizer")} + AuthTypeAPIKey = AuthType{valuer.NewString("api_key")} + AuthTypeInternal = AuthType{valuer.NewString("internal")} +) type authTypeKey struct{} diff --git a/pkg/types/ctxtypes/constants.go b/pkg/types/ctxtypes/constants.go deleted file mode 100644 index 3bcfb15c7c..0000000000 --- a/pkg/types/ctxtypes/constants.go +++ /dev/null @@ -1,13 +0,0 @@ -package ctxtypes - -import "github.com/SigNoz/signoz/pkg/valuer" - -type AuthType struct { - valuer.String -} - -var ( - AuthTypeJWT = AuthType{valuer.NewString("jwt")} - AuthTypeAPIKey = AuthType{valuer.NewString("api_key")} - AuthTypeInternal = AuthType{valuer.NewString("internal")} -) diff --git a/pkg/types/domain.go b/pkg/types/domain.go deleted file mode 100644 index 1ea6eb50de..0000000000 --- a/pkg/types/domain.go +++ /dev/null @@ -1,187 +0,0 @@ -package types - -import ( - "encoding/json" - "fmt" - "net/url" - "strings" - - "github.com/SigNoz/signoz/pkg/types/ssotypes" - "github.com/google/uuid" - "github.com/pkg/errors" - saml2 "github.com/russellhaering/gosaml2" - "github.com/uptrace/bun" -) - -type StorableOrgDomain struct { - bun.BaseModel `bun:"table:org_domains"` - - TimeAuditable - ID uuid.UUID `json:"id" bun:"id,pk,type:text"` - OrgID string `json:"orgId" bun:"org_id,type:text,notnull"` - Name string `json:"name" bun:"name,type:varchar(50),notnull,unique"` - Data string `json:"-" bun:"data,type:text,notnull"` -} - -type SSOType string - -const ( - SAML SSOType = "SAML" - GoogleAuth SSOType = "GOOGLE_AUTH" -) - -// GettableOrgDomain identify org owned web domains for auth and other purposes -type GettableOrgDomain struct { - StorableOrgDomain - - SsoEnabled bool `json:"ssoEnabled"` - SsoType SSOType `json:"ssoType"` - - SamlConfig *ssotypes.SamlConfig `json:"samlConfig"` - GoogleAuthConfig *ssotypes.GoogleOAuthConfig `json:"googleAuthConfig"` - - Org *Organization -} - -func (od *GettableOrgDomain) String() string { - return fmt.Sprintf("[%s]%s-%s ", od.Name, od.ID.String(), od.SsoType) -} - -// Valid is used a pipeline function to check if org domain -// loaded from db is valid -func (od *GettableOrgDomain) Valid(err error) error { - if err != nil { - return err - } - - if od.ID == uuid.Nil || od.OrgID == "" { - return fmt.Errorf("both id and orgId are required") - } - - return nil -} - -// ValidNew cheks if the org domain is valid for insertion in db -func (od *GettableOrgDomain) ValidNew() error { - - if od.OrgID == "" { - return fmt.Errorf("orgId is required") - } - - if od.Name == "" { - return fmt.Errorf("name is required") - } - - return nil -} - -// LoadConfig loads config params from json text -func (od *GettableOrgDomain) LoadConfig(jsondata string) error { - d := *od - err := json.Unmarshal([]byte(jsondata), &d) - if err != nil { - return errors.Wrap(err, "failed to marshal json to OrgDomain{}") - } - *od = d - return nil -} - -func (od *GettableOrgDomain) GetSAMLEntityID() string { - if od.SamlConfig != nil { - return od.SamlConfig.SamlEntity - } - return "" -} - -func (od *GettableOrgDomain) GetSAMLIdpURL() string { - if od.SamlConfig != nil { - return od.SamlConfig.SamlIdp - } - return "" -} - -func (od *GettableOrgDomain) GetSAMLCert() string { - if od.SamlConfig != nil { - return od.SamlConfig.SamlCert - } - return "" -} - -// PrepareGoogleOAuthProvider creates GoogleProvider that is used in -// requesting OAuth and also used in processing response from google -func (od *GettableOrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (ssotypes.OAuthCallbackProvider, error) { - if od.GoogleAuthConfig == nil { - return nil, fmt.Errorf("GOOGLE OAUTH is not setup correctly for this domain") - } - - return od.GoogleAuthConfig.GetProvider(od.Name, siteUrl) -} - -// PrepareSamlRequest creates a request accordingly gosaml2 -func (od *GettableOrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServiceProvider, error) { - - // this is the url Idp will call after login completion - acs := fmt.Sprintf("%s://%s/%s", - siteUrl.Scheme, - siteUrl.Host, - "api/v1/complete/saml") - - // this is the address of the calling url, useful to redirect user - sourceUrl := fmt.Sprintf("%s://%s%s", - siteUrl.Scheme, - siteUrl.Host, - siteUrl.Path) - - // ideally this should be some unique ID for each installation - // but since we dont have UI to support it, we default it to - // host. this issuer is an identifier of service provider (signoz) - // on id provider (e.g. azure, okta). Azure requires this id to be configured - // in their system, while others seem to not care about it. - // currently we default it to host from window.location (received from browser) - issuer := siteUrl.Host - - return ssotypes.PrepareRequest(issuer, acs, sourceUrl, od.GetSAMLEntityID(), od.GetSAMLIdpURL(), od.GetSAMLCert()) -} - -func (od *GettableOrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) { - - fmtDomainId := strings.Replace(od.ID.String(), "-", ":", -1) - - // build redirect url from window.location sent by frontend - redirectURL := fmt.Sprintf("%s://%s%s", siteUrl.Scheme, siteUrl.Host, siteUrl.Path) - - // prepare state that gets relayed back when the auth provider - // calls back our url. here we pass the app url (where signoz runs) - // and the domain Id. The domain Id helps in identifying sso config - // when the call back occurs and the app url is useful in redirecting user - // back to the right path. - // why do we need to pass app url? the callback typically is handled by backend - // and sometimes backend might right at a different port or is unaware of frontend - // endpoint (unless SITE_URL param is set). hence, we receive this build sso request - // along with frontend window.location and use it to relay the information through - // auth provider to the backend (HandleCallback or HandleSSO method). - relayState := fmt.Sprintf("%s?domainId=%s", redirectURL, fmtDomainId) - - switch od.SsoType { - case SAML: - - sp, err := od.PrepareSamlRequest(siteUrl) - if err != nil { - return "", err - } - - return sp.BuildAuthURL(relayState) - - case GoogleAuth: - - googleProvider, err := od.PrepareGoogleOAuthProvider(siteUrl) - if err != nil { - return "", err - } - return googleProvider.BuildAuthURL(relayState) - - default: - return "", fmt.Errorf("unsupported SSO config for the domain") - } - -} diff --git a/pkg/types/invite.go b/pkg/types/invite.go index 8de9ede349..ebb2bd03ad 100644 --- a/pkg/types/invite.go +++ b/pkg/types/invite.go @@ -1,8 +1,7 @@ package types import ( - "fmt" - "strings" + "encoding/json" "time" "github.com/SigNoz/signoz/pkg/errors" @@ -15,59 +14,22 @@ var ( ErrInviteNotFound = errors.MustNewCode("invite_not_found") ) -type GettableEEInvite struct { - GettableInvite - PreCheck *GettableLoginPrecheck `bun:"-" json:"precheck"` -} - -type GettableInvite struct { - Invite - Organization string `bun:"organization,type:text,notnull" json:"organization"` -} +type GettableInvite = Invite type Invite struct { bun.BaseModel `bun:"table:user_invite"` Identifiable TimeAuditable - OrgID string `bun:"org_id,type:text,notnull" json:"orgID"` - Name string `bun:"name,type:text,notnull" json:"name"` - Email string `bun:"email,type:text,notnull,unique" json:"email"` - Token string `bun:"token,type:text,notnull" json:"token"` - Role string `bun:"role,type:text,notnull" json:"role"` + Name string `bun:"name,type:text" json:"name"` + Email valuer.Email `bun:"email,type:text" json:"email"` + Token string `bun:"token,type:text" json:"token"` + Role Role `bun:"role,type:text" json:"role"` + OrgID valuer.UUID `bun:"org_id,type:text" json:"orgId"` InviteLink string `bun:"-" json:"inviteLink"` } -func NewInvite(orgID, role, name, email string) (*Invite, error) { - if email == "" { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required") - } - _, err := NewRole(role) - if err != nil { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("invalid role for user: %s", email)) - } - - email = strings.TrimSpace(email) - - invite := &Invite{ - Identifiable: Identifiable{ - ID: valuer.GenerateUUID(), - }, - TimeAuditable: TimeAuditable{ - CreatedAt: time.Now(), - UpdatedAt: time.Now(), - }, - Name: name, - Email: email, - Token: valuer.GenerateUUID().String(), - Role: role, - OrgID: orgID, - } - - return invite, nil -} - type InviteEmailData struct { CustomerName string InviterName string @@ -75,11 +37,20 @@ type InviteEmailData struct { Link string } +type PostableAcceptInvite struct { + DisplayName string `json:"displayName"` + InviteToken string `json:"token"` + Password string `json:"password"` + + // reference URL to track where the register request is coming from + SourceURL string `json:"sourceUrl"` +} + type PostableInvite struct { - Name string `json:"name"` - Email string `json:"email"` - Role Role `json:"role"` - FrontendBaseUrl string `json:"frontendBaseUrl"` + Name string `json:"name"` + Email valuer.Email `json:"email"` + Role Role `json:"role"` + FrontendBaseUrl string `json:"frontendBaseUrl"` } type PostableBulkInviteRequest struct { @@ -89,3 +60,42 @@ type PostableBulkInviteRequest struct { type GettableCreateInviteResponse struct { InviteToken string `json:"token"` } + +func NewInvite(name string, role Role, orgID valuer.UUID, email valuer.Email) (*Invite, error) { + invite := &Invite{ + Identifiable: Identifiable{ + ID: valuer.GenerateUUID(), + }, + Name: name, + Email: email, + Token: valuer.GenerateUUID().String(), + Role: role, + OrgID: orgID, + TimeAuditable: TimeAuditable{ + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + }, + } + + return invite, nil +} + +func (request *PostableAcceptInvite) UnmarshalJSON(data []byte) error { + type Alias PostableAcceptInvite + + var temp Alias + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + if temp.InviteToken == "" { + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invite token is required") + } + + if !IsPasswordValid(temp.Password) { + return ErrInvalidPassword + } + + *request = PostableAcceptInvite(temp) + return nil +} diff --git a/pkg/types/licensetypes/license.go b/pkg/types/licensetypes/license.go index 0f624f1c0b..fea61f26d0 100644 --- a/pkg/types/licensetypes/license.go +++ b/pkg/types/licensetypes/license.go @@ -21,7 +21,7 @@ type StorableLicense struct { Key string `bun:"key,type:text,notnull,unique"` Data map[string]any `bun:"data,type:text"` LastValidatedAt time.Time `bun:"last_validated_at,notnull"` - OrgID valuer.UUID `bun:"org_id,type:text,notnull" json:"orgID"` + OrgID valuer.UUID `bun:"org_id,type:text,notnull" json:"orgId"` } // this data excludes ID and Key diff --git a/pkg/types/ssotypes/google.go b/pkg/types/ssotypes/google.go deleted file mode 100644 index fec3d1b7cb..0000000000 --- a/pkg/types/ssotypes/google.go +++ /dev/null @@ -1,90 +0,0 @@ -package ssotypes - -import ( - "context" - "net/http" - - "github.com/SigNoz/signoz/pkg/errors" - - "github.com/coreos/go-oidc/v3/oidc" - "golang.org/x/oauth2" -) - -type GoogleOAuthProvider struct { - RedirectURI string - OAuth2Config *oauth2.Config - Verifier *oidc.IDTokenVerifier - Cancel context.CancelFunc - HostedDomain string -} - -func (g *GoogleOAuthProvider) BuildAuthURL(state string) (string, error) { - var opts []oauth2.AuthCodeOption - - // set hosted domain. google supports multiple hosted domains but in our case - // we have one config per host domain. - opts = append(opts, oauth2.SetAuthURLParam("hd", g.HostedDomain)) - - return g.OAuth2Config.AuthCodeURL(state, opts...), nil -} - -type oauth2Error struct { - error string - errorDescription string -} - -func (e *oauth2Error) Error() string { - if e.errorDescription == "" { - return e.error - } - return e.error + ": " + e.errorDescription -} - -func (g *GoogleOAuthProvider) HandleCallback(r *http.Request) (identity *SSOIdentity, err error) { - q := r.URL.Query() - if errType := q.Get("error"); errType != "" { - return identity, &oauth2Error{errType, q.Get("error_description")} - } - - token, err := g.OAuth2Config.Exchange(r.Context(), q.Get("code")) - if err != nil { - return identity, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to get token: %v", err) - } - - return g.createIdentity(r.Context(), token) -} - -func (g *GoogleOAuthProvider) createIdentity(ctx context.Context, token *oauth2.Token) (identity *SSOIdentity, err error) { - rawIDToken, ok := token.Extra("id_token").(string) - if !ok { - return identity, errors.New(errors.TypeInternal, errors.CodeInternal, "google: no id_token in token response") - } - idToken, err := g.Verifier.Verify(ctx, rawIDToken) - if err != nil { - return identity, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to verify ID Token: %v", err) - } - - var claims struct { - Username string `json:"name"` - Email string `json:"email"` - EmailVerified bool `json:"email_verified"` - HostedDomain string `json:"hd"` - } - if err := idToken.Claims(&claims); err != nil { - return identity, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to decode claims: %v", err) - } - - if claims.HostedDomain != g.HostedDomain { - return identity, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: unexpected hd claim %v", claims.HostedDomain) - } - - identity = &SSOIdentity{ - UserID: idToken.Subject, - Username: claims.Username, - Email: claims.Email, - EmailVerified: claims.EmailVerified, - ConnectorData: []byte(token.RefreshToken), - } - - return identity, nil -} diff --git a/pkg/types/ssotypes/saml.go b/pkg/types/ssotypes/saml.go deleted file mode 100644 index c097f5580f..0000000000 --- a/pkg/types/ssotypes/saml.go +++ /dev/null @@ -1,107 +0,0 @@ -package ssotypes - -import ( - "crypto/x509" - "encoding/base64" - "encoding/pem" - "fmt" - "strings" - - "github.com/SigNoz/signoz/pkg/errors" - "github.com/SigNoz/signoz/pkg/query-service/constants" - saml2 "github.com/russellhaering/gosaml2" - dsig "github.com/russellhaering/goxmldsig" -) - -func LoadCertificateStore(certString string) (dsig.X509CertificateStore, error) { - certStore := &dsig.MemoryX509CertificateStore{ - Roots: []*x509.Certificate{}, - } - - certData, err := base64.StdEncoding.DecodeString(certString) - if err != nil { - return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to read certificate: %v", err) - } - - idpCert, err := x509.ParseCertificate(certData) - if err != nil { - return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to prepare saml request, invalid cert: %s", err.Error()) - } - - certStore.Roots = append(certStore.Roots, idpCert) - - return certStore, nil -} - -func LoadCertFromPem(certString string) (dsig.X509CertificateStore, error) { - certStore := &dsig.MemoryX509CertificateStore{ - Roots: []*x509.Certificate{}, - } - - block, _ := pem.Decode([]byte(certString)) - if block == nil { - return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no valid pem cert found") - } - - idpCert, err := x509.ParseCertificate(block.Bytes) - if err != nil { - return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse pem cert: %s", err.Error()) - } - - certStore.Roots = append(certStore.Roots, idpCert) - - return certStore, nil -} - -// PrepareRequest prepares authorization URL (Idp Provider URL) -func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (*saml2.SAMLServiceProvider, error) { - var certStore dsig.X509CertificateStore - if certString == "" { - return nil, fmt.Errorf("invalid certificate data") - } - - var err error - if strings.Contains(certString, "-----BEGIN CERTIFICATE-----") { - certStore, err = LoadCertFromPem(certString) - } else { - certStore, err = LoadCertificateStore(certString) - } - // certificate store can not be created, throw error - if err != nil { - return nil, err - } - - randomKeyStore := dsig.RandomKeyStoreForTest() - - // SIGNOZ_SAML_RETURN_URL env var would support overriding window.location - // as return destination after saml request is complete from IdP side. - // this var is also useful for development, as it is easy to override with backend endpoint - // e.g. http://localhost:8080/api/v1/complete/saml - acsUrl = constants.GetOrDefaultEnv("SIGNOZ_SAML_RETURN_URL", acsUrl) - - sp := &saml2.SAMLServiceProvider{ - IdentityProviderSSOURL: idp, - IdentityProviderIssuer: entity, - ServiceProviderIssuer: issuer, - AssertionConsumerServiceURL: acsUrl, - SignAuthnRequests: true, - AllowMissingAttributes: true, - - // about cert stores -sender(signoz app) and receiver (idp) - // The random key (random key store) is sender cert. The public cert store(IDPCertificateStore) that you see on org domain is receiver cert (idp provided). - // At the moment, the library we use doesn't bother about sender cert and IdP too. It just adds additional layer of security, which we can explore in future versions - // The receiver (Idp) cert will be different for each org domain. Imagine cloud setup where each company setups their domain that integrates with their Idp. - // @signoz.io - // @next.io - // Each of above will have their own Idp setup and hence separate public cert to decrypt the response. - // The way SAML request travels is - - // SigNoz Backend -> IdP Login Screen -> SigNoz Backend -> SigNoz Frontend - // ---------------- | -------------------| ------------------------------------- - // The dotted lines indicate request boundries. So if you notice, the response from Idp starts a new request. hence we need relay state to pass the context around. - - IDPCertificateStore: certStore, - SPKeyStore: randomKeyStore, - } - - return sp, nil -} diff --git a/pkg/types/ssotypes/sso.go b/pkg/types/ssotypes/sso.go deleted file mode 100644 index 009ef22ca9..0000000000 --- a/pkg/types/ssotypes/sso.go +++ /dev/null @@ -1,91 +0,0 @@ -package ssotypes - -import ( - "context" - "fmt" - "net/http" - "net/url" - - "github.com/coreos/go-oidc/v3/oidc" - "golang.org/x/oauth2" -) - -// SSOIdentity contains details of user received from SSO provider -type SSOIdentity struct { - UserID string - Username string - PreferredUsername string - Email string - EmailVerified bool - ConnectorData []byte -} - -// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth -// style redirect flow to determine user information. -type OAuthCallbackProvider interface { - // The initial URL user would be redirect to. - // OAuth2 implementations support various scopes but we only need profile and user as - // the roles are still being managed in SigNoz. - BuildAuthURL(state string) (string, error) - - // Handle the callback to the server (after login at oauth provider site) - // and return a email identity. - // At the moment we dont support auto signup flow (based on domain), so - // the full identity (including name, group etc) is not required outside of the - // connector - HandleCallback(r *http.Request) (identity *SSOIdentity, err error) -} - -type SamlConfig struct { - SamlEntity string `json:"samlEntity"` - SamlIdp string `json:"samlIdp"` - SamlCert string `json:"samlCert"` -} - -// GoogleOauthConfig contains a generic config to support oauth -type GoogleOAuthConfig struct { - ClientID string `json:"clientId"` - ClientSecret string `json:"clientSecret"` - RedirectURI string `json:"redirectURI"` -} - -const ( - googleIssuerURL = "https://accounts.google.com" -) - -func (g *GoogleOAuthConfig) GetProvider(domain string, siteUrl *url.URL) (OAuthCallbackProvider, error) { - - ctx, cancel := context.WithCancel(context.Background()) - - provider, err := oidc.NewProvider(ctx, googleIssuerURL) - if err != nil { - cancel() - return nil, fmt.Errorf("failed to get provider: %v", err) - } - - // default to email and profile scope as we just use google auth - // to verify identity and start a session. - scopes := []string{"email"} - - // this is the url google will call after login completion - redirectURL := fmt.Sprintf("%s://%s/%s", - siteUrl.Scheme, - siteUrl.Host, - "api/v1/complete/google") - - return &GoogleOAuthProvider{ - RedirectURI: g.RedirectURI, - OAuth2Config: &oauth2.Config{ - ClientID: g.ClientID, - ClientSecret: g.ClientSecret, - Endpoint: provider.Endpoint(), - Scopes: scopes, - RedirectURL: redirectURL, - }, - Verifier: provider.Verifier( - &oidc.Config{ClientID: g.ClientID}, - ), - Cancel: cancel, - HostedDomain: domain, - }, nil -} diff --git a/pkg/types/tracefunneltypes/utils.go b/pkg/types/tracefunneltypes/utils.go index 60ca4eaf36..8fb3e4bac2 100644 --- a/pkg/types/tracefunneltypes/utils.go +++ b/pkg/types/tracefunneltypes/utils.go @@ -109,7 +109,7 @@ func ConstructFunnelResponse(funnel *StorableFunnel, claims *authtypes.Claims) G } if funnel.CreatedByUser != nil { - resp.UserEmail = funnel.CreatedByUser.Email + resp.UserEmail = funnel.CreatedByUser.Email.String() } else if claims != nil { resp.UserEmail = claims.Email } diff --git a/pkg/types/tracefunneltypes/utils_test.go b/pkg/types/tracefunneltypes/utils_test.go index cf345d92b5..44cac5b259 100644 --- a/pkg/types/tracefunneltypes/utils_test.go +++ b/pkg/types/tracefunneltypes/utils_test.go @@ -1,8 +1,6 @@ package tracefunneltypes import ( - "net/http" - "net/http/httptest" "testing" "time" @@ -378,53 +376,6 @@ func TestNormalizeFunnelSteps(t *testing.T) { } } -func TestGetClaims(t *testing.T) { - tests := []struct { - name string - setup func(*http.Request) - expectError bool - }{ - { - name: "valid claims", - setup: func(r *http.Request) { - claims := authtypes.Claims{ - UserID: "user-123", - OrgID: "org-123", - Email: "test@example.com", - } - *r = *r.WithContext(authtypes.NewContextWithClaims(r.Context(), claims)) - }, - expectError: false, - }, - { - name: "no claims in context", - setup: func(r *http.Request) { - claims := authtypes.Claims{} - *r = *r.WithContext(authtypes.NewContextWithClaims(r.Context(), claims)) - }, - expectError: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - req := httptest.NewRequest("GET", "/", nil) - tt.setup(req) - - claims, err := authtypes.ClaimsFromContext(req.Context()) - if tt.expectError { - assert.Equal(t, authtypes.Claims{}, claims) - } else { - assert.NoError(t, err) - assert.NotNil(t, claims) - assert.Equal(t, "user-123", claims.UserID) - assert.Equal(t, "org-123", claims.OrgID) - assert.Equal(t, "test@example.com", claims.Email) - } - }) - } -} - func TestValidateAndConvertTimestamp(t *testing.T) { tests := []struct { name string @@ -496,7 +447,7 @@ func TestConstructFunnelResponse(t *testing.T) { Identifiable: types.Identifiable{ ID: userID, }, - Email: "funnel@example.com", + Email: valuer.MustNewEmail("funnel@example.com"), }, Steps: []*FunnelStep{ { diff --git a/pkg/types/user.go b/pkg/types/user.go index d42941f121..a1e29896de 100644 --- a/pkg/types/user.go +++ b/pkg/types/user.go @@ -3,19 +3,18 @@ package types import ( "context" "encoding/json" - "net/url" "time" "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/valuer" - "github.com/google/uuid" "github.com/uptrace/bun" ) var ( + ErrCodeUserNotFound = errors.MustNewCode("user_not_found") + ErrCodeAmbiguousUser = errors.MustNewCode("ambiguous_user") ErrUserAlreadyExists = errors.MustNewCode("user_already_exists") ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists") - ErrUserNotFound = errors.MustNewCode("user_not_found") ErrResetPasswordTokenAlreadyExists = errors.MustNewCode("reset_password_token_already_exists") ErrPasswordNotFound = errors.MustNewCode("password_not_found") ErrResetPasswordTokenNotFound = errors.MustNewCode("reset_password_token_not_found") @@ -23,24 +22,29 @@ var ( ErrAPIKeyNotFound = errors.MustNewCode("api_key_not_found") ) -type GettableUser struct { - User - Organization string `json:"organization"` -} +type GettableUser = User type User struct { bun.BaseModel `bun:"table:users"` Identifiable + DisplayName string `bun:"display_name" json:"displayName"` + Email valuer.Email `bun:"email,type:text" json:"email"` + Role Role `bun:"role,type:text" json:"role"` + OrgID valuer.UUID `bun:"org_id,type:text" json:"orgId"` TimeAuditable - DisplayName string `bun:"display_name,type:text,notnull" json:"displayName"` - Email string `bun:"email,type:text,notnull,unique:org_email" json:"email"` - Role string `bun:"role,type:text,notnull" json:"role"` - OrgID string `bun:"org_id,type:text,notnull,unique:org_email" json:"orgId"` } -func NewUser(displayName string, email string, role string, orgID string) (*User, error) { - if email == "" { +type PostableRegisterOrgAndAdmin struct { + Name string `json:"name"` + Email valuer.Email `json:"email"` + Password string `json:"password"` + OrgDisplayName string `json:"orgDisplayName"` + OrgName string `json:"orgName"` +} + +func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUID) (*User, error) { + if email.IsZero() { return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required") } @@ -48,7 +52,7 @@ func NewUser(displayName string, email string, role string, orgID string) (*User return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "role is required") } - if orgID == "" { + if orgID.IsZero() { return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is required") } @@ -56,93 +60,27 @@ func NewUser(displayName string, email string, role string, orgID string) (*User Identifiable: Identifiable{ ID: valuer.GenerateUUID(), }, - TimeAuditable: TimeAuditable{ - CreatedAt: time.Now(), - }, DisplayName: displayName, Email: email, Role: role, OrgID: orgID, + TimeAuditable: TimeAuditable{ + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + }, }, nil } -type PostableRegisterOrgAndAdmin struct { - Name string `json:"name"` - OrgID string `json:"orgId"` - OrgDisplayName string `json:"orgDisplayName"` - OrgName string `json:"orgName"` - Email string `json:"email"` - Password string `json:"password"` -} - -type PostableAcceptInvite struct { - DisplayName string `json:"displayName"` - InviteToken string `json:"token"` - Password string `json:"password"` - - // reference URL to track where the register request is coming from - SourceURL string `json:"sourceUrl"` -} - -type PostableLoginRequest struct { - OrgID string `json:"orgId"` - Email string `json:"email"` - Password string `json:"password"` - RefreshToken string `json:"refreshToken"` -} - -type GettableUserJwt struct { - AccessJwt string `json:"accessJwt"` - AccessJwtExpiry int64 `json:"accessJwtExpiry"` - RefreshJwt string `json:"refreshJwt"` - RefreshJwtExpiry int64 `json:"refreshJwtExpiry"` -} - -type GettableLoginResponse struct { - GettableUserJwt - UserID string `json:"userId"` -} - -type GettableLoginPrecheck struct { - SSO bool `json:"sso"` - SSOUrl string `json:"ssoUrl"` - CanSelfRegister bool `json:"canSelfRegister"` - IsUser bool `json:"isUser"` - SSOError string `json:"ssoError"` - SelectOrg bool `json:"selectOrg"` - Orgs []string `json:"orgs"` -} - func NewTraitsFromUser(user *User) map[string]any { return map[string]any{ "name": user.DisplayName, "role": user.Role, - "email": user.Email, + "email": user.Email.String(), "display_name": user.DisplayName, "created_at": user.CreatedAt, } } -func (request *PostableAcceptInvite) UnmarshalJSON(data []byte) error { - type Alias PostableAcceptInvite - - var temp Alias - if err := json.Unmarshal(data, &temp); err != nil { - return err - } - - if temp.InviteToken == "" { - return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invite token is required") - } - - if !IsPasswordValid(temp.Password) { - return ErrInvalidPassword - } - - *request = PostableAcceptInvite(temp) - return nil -} - func (request *PostableRegisterOrgAndAdmin) UnmarshalJSON(data []byte) error { type Alias PostableRegisterOrgAndAdmin @@ -151,10 +89,6 @@ func (request *PostableRegisterOrgAndAdmin) UnmarshalJSON(data []byte) error { return err } - if temp.Email == "" { - return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required") - } - if !IsPasswordValid(temp.Password) { return ErrInvalidPassword } @@ -168,17 +102,38 @@ type UserStore interface { CreateBulkInvite(ctx context.Context, invites []*Invite) error ListInvite(ctx context.Context, orgID string) ([]*Invite, error) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) error - GetInviteByToken(ctx context.Context, token string) (*GettableInvite, error) - GetInviteByEmailInOrg(ctx context.Context, orgID string, email string) (*Invite, error) + + // Get invite by token. + GetInviteByToken(ctx context.Context, token string) (*Invite, error) + + // Get invite by email and org. + GetInviteByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*Invite, error) // Creates a user. CreateUser(ctx context.Context, user *User) error - GetUserByID(ctx context.Context, orgID string, id string) (*GettableUser, error) - GetUserByEmailInOrg(ctx context.Context, orgID string, email string) (*GettableUser, error) - GetUsersByEmail(ctx context.Context, email string) ([]*GettableUser, error) - GetUsersByRoleInOrg(ctx context.Context, orgID string, role Role) ([]*GettableUser, error) - ListUsers(ctx context.Context, orgID string) ([]*GettableUser, error) - UpdateUser(ctx context.Context, orgID string, id string, user *User) (*User, error) + + // Get user by id. + GetUser(context.Context, valuer.UUID) (*User, error) + + // Get user by orgID and id. + GetByOrgIDAndID(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*User, error) + + // Get user by email and orgID. + GetUserByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*User, error) + + // Get users by email. + GetUsersByEmail(ctx context.Context, email valuer.Email) ([]*User, error) + + // Get users by role and org. + GetUsersByRoleAndOrgID(ctx context.Context, role Role, orgID valuer.UUID) ([]*User, error) + + // List users by org. + ListUsersByOrgID(ctx context.Context, orgID valuer.UUID) ([]*User, error) + + // List users by email and org ids. + ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*User, error) + + UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *User) (*User, error) DeleteUser(ctx context.Context, orgID string, id string) error // Creates a password. @@ -190,19 +145,6 @@ type UserStore interface { GetResetPasswordTokenByPasswordID(ctx context.Context, passwordID valuer.UUID) (*ResetPasswordToken, error) UpdatePassword(ctx context.Context, password *FactorPassword) error - // Auth Domain - GetDomainByName(ctx context.Context, name string) (*StorableOrgDomain, error) - // org domain (auth domains) CRUD ops - GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*GettableOrgDomain, error) - ListDomains(ctx context.Context, orgId valuer.UUID) ([]*GettableOrgDomain, error) - GetDomain(ctx context.Context, id uuid.UUID) (*GettableOrgDomain, error) - CreateDomain(ctx context.Context, d *GettableOrgDomain) error - UpdateDomain(ctx context.Context, domain *GettableOrgDomain) error - DeleteDomain(ctx context.Context, id uuid.UUID) error - - // Temporary func for SSO - GetDefaultOrgID(ctx context.Context) (string, error) - // API KEY CreateAPIKey(ctx context.Context, apiKey *StorableAPIKey) error UpdateAPIKey(ctx context.Context, id valuer.UUID, apiKey *StorableAPIKey, updaterID valuer.UUID) error diff --git a/pkg/valuer/email.go b/pkg/valuer/email.go new file mode 100644 index 0000000000..020a76e5ee --- /dev/null +++ b/pkg/valuer/email.go @@ -0,0 +1,105 @@ +package valuer + +import ( + "database/sql/driver" + "encoding/json" + "reflect" + "regexp" + "strings" + + "github.com/SigNoz/signoz/pkg/errors" +) + +const ( + emailRegexString string = "^(?:(?:(?:(?:[a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(?:\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|(?:(?:\\x22)(?:(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(?:\\x20|\\x09)+)?(?:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(\\x20|\\x09)+)?(?:\\x22))))@(?:(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$" +) + +var ( + emailRegex = regexp.MustCompile(emailRegexString) + _ Valuer = (*Email)(nil) +) + +type Email struct { + val string +} + +func NewEmail(val string) (Email, error) { + if !emailRegex.MatchString(strings.ToLower(strings.TrimSpace(val))) { + return Email{}, errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidValuer, "invalid email %s", val) + } + + return Email{val: strings.ToLower(strings.TrimSpace(val))}, nil +} + +func MustNewEmail(val string) Email { + email, err := NewEmail(val) + if err != nil { + panic(err) + } + + return email +} + +func (enum Email) IsZero() bool { + return enum.val == "" +} + +func (enum Email) StringValue() string { + return enum.val +} + +func (enum Email) String() string { + return enum.val +} + +func (enum Email) MarshalJSON() ([]byte, error) { + return json.Marshal(enum.StringValue()) +} + +func (enum *Email) UnmarshalJSON(data []byte) error { + var str string + if err := json.Unmarshal(data, &str); err != nil { + return err + } + + var err error + *enum, err = NewEmail(str) + if err != nil { + return err + } + + return nil +} + +func (enum Email) Value() (driver.Value, error) { + return enum.StringValue(), nil +} + +func (enum *Email) Scan(val interface{}) error { + if enum == nil { + return errors.Newf(errors.TypeInternal, ErrCodeUnknownValuerScan, "email: (nil \"%s\")", reflect.TypeOf(enum).String()) + } + + str, ok := val.(string) + if !ok { + return errors.Newf(errors.TypeInternal, ErrCodeUnknownValuerScan, "email: (non-string \"%s\")", reflect.TypeOf(val).String()) + } + + var err error + *enum, err = NewEmail(str) + if err != nil { + return err + } + + return nil +} + +func (enum *Email) UnmarshalText(text []byte) error { + var err error + *enum, err = NewEmail(string(text)) + if err != nil { + return err + } + + return nil +} diff --git a/pkg/valuer/email_test.go b/pkg/valuer/email_test.go new file mode 100644 index 0000000000..01aa65954f --- /dev/null +++ b/pkg/valuer/email_test.go @@ -0,0 +1,39 @@ +package valuer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewEmail(t *testing.T) { + tests := []struct { + name string + val string + want Email + pass bool + }{ + {name: "Valid", val: "test@test.com", want: Email{val: "test@test.com"}, pass: true}, + {name: "ValidWithPlus", val: "test+test@test.com", want: Email{val: "test+test@test.com"}, pass: true}, + {name: "ValidWithMultipleDotsInDomain", val: "test.test@test.com.ok", want: Email{val: "test.test@test.com.ok"}, pass: true}, + {name: "InvalidMissingAt", val: "testtest.com", want: Email{val: ""}, pass: false}, + {name: "InvalidMissingDot", val: "test@testcom", want: Email{val: ""}, pass: false}, + {name: "InvalidMissingLocalPart", val: "@test.com", want: Email{val: ""}, pass: false}, + {name: "InvalidMissingDomain", val: "test@.com", want: Email{val: ""}, pass: false}, + {name: "InvalidMissingLocalPartAndDomain", val: "@.com", want: Email{val: ""}, pass: false}, + {name: "InvalidMissingTldAndDomain", val: "test.c", want: Email{val: ""}, pass: false}, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + got, err := NewEmail(test.val) + if !test.pass { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, test.want, got) + }) + } +} diff --git a/pkg/valuer/uuid.go b/pkg/valuer/uuid.go index d892bae66c..da38eda9b5 100644 --- a/pkg/valuer/uuid.go +++ b/pkg/valuer/uuid.go @@ -18,7 +18,7 @@ type UUID struct { func NewUUID(value string) (UUID, error) { val, err := uuid.Parse(value) if err != nil { - return UUID{}, err + return UUID{}, errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidValuer, "invalid uuid %s", value).WithAdditional(err.Error()) } return UUID{ @@ -29,7 +29,7 @@ func NewUUID(value string) (UUID, error) { func NewUUIDFromBytes(value []byte) (UUID, error) { val, err := uuid.FromBytes(value) if err != nil { - return UUID{}, err + return UUID{}, errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidValuer, "invalid uuid %s", value).WithAdditional(err.Error()) } return UUID{ diff --git a/pkg/valuer/valuer.go b/pkg/valuer/valuer.go index 138bfb4263..846c9c9f79 100644 --- a/pkg/valuer/valuer.go +++ b/pkg/valuer/valuer.go @@ -12,6 +12,7 @@ import ( var ( ErrCodeUnknownValuerScan = errors.MustNewCode("unknown_valuer_scan") + ErrCodeInvalidValuer = errors.MustNewCode("invalid_valuer") ) type Valuer interface { diff --git a/templates/email/update_role.gotmpl b/templates/email/update_role.gotmpl index 12f96586fb..0bfd5fecb8 100644 --- a/templates/email/update_role.gotmpl +++ b/templates/email/update_role.gotmpl @@ -3,16 +3,10 @@ Hi {{.CustomerName}},
Your role in SigNoz has been updated by {{.UpdatedByEmail}}. -

Previous Role: {{.OldRole}}
New Role: {{.NewRole}}

- -

- Please note that you will need to log out and log back in for the changes to take effect. -

- {{if eq .OldRole "Admin"}}

If you were not expecting this change or have any questions, please contact us at support@signoz.io. @@ -22,7 +16,6 @@ If you were not expecting this change or have any questions, please reach out to your administrator.

{{end}} -

Best regards,
The SigNoz Team

\ No newline at end of file diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index c4c04fbd59..433c053c76 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -14,6 +14,9 @@ pytest_plugins = [ "fixtures.signoz", "fixtures.logs", "fixtures.traces", + "fixtures.driver", + "fixtures.idp", + "fixtures.idputils", ] diff --git a/tests/integration/fixtures/auth.py b/tests/integration/fixtures/auth.py index ff0e913423..519bfe3fe7 100644 --- a/tests/integration/fixtures/auth.py +++ b/tests/integration/fixtures/auth.py @@ -1,10 +1,20 @@ from http import HTTPStatus -from typing import Callable +from typing import Callable, List import pytest import requests +from wiremock.resources.mappings import ( + HttpMethods, + Mapping, + MappingRequest, + MappingResponse, + WireMockMatchers, +) from fixtures import dev, types +from fixtures.logger import setup_logger + +logger = setup_logger(__name__) USER_ADMIN_NAME = "admin" USER_ADMIN_EMAIL = "admin@integration.test" @@ -20,7 +30,6 @@ def create_user_admin( signoz.self.host_configs["8080"].get("/api/v1/register"), json={ "name": USER_ADMIN_NAME, - "orgId": "", "orgName": "", "email": USER_ADMIN_EMAIL, "password": USER_ADMIN_PASSWORD, @@ -49,19 +58,118 @@ def create_user_admin( ) -@pytest.fixture(name="get_jwt_token", scope="module") -def get_jwt_token(signoz: types.SigNoz) -> Callable[[str, str], str]: - def _get_jwt_token(email: str, password: str) -> str: - response = requests.post( - signoz.self.host_configs["8080"].get("/api/v1/login"), - json={ +@pytest.fixture(name="get_session_context", scope="function") +def get_session_context(signoz: types.SigNoz) -> Callable[[str, str], str]: + def _get_session_context(email: str) -> str: + response = requests.get( + signoz.self.host_configs["8080"].get("/api/v2/sessions/context"), + params={ "email": email, - "password": password, + "ref": f"{signoz.self.host_configs['8080'].base()}", }, timeout=5, ) + assert response.status_code == HTTPStatus.OK + return response.json()["data"] - return response.json()["data"]["accessJwt"] + return _get_session_context - return _get_jwt_token + +@pytest.fixture(name="get_token", scope="function") +def get_token(signoz: types.SigNoz) -> Callable[[str, str], str]: + def _get_token(email: str, password: str) -> str: + response = requests.get( + signoz.self.host_configs["8080"].get("/api/v2/sessions/context"), + params={ + "email": email, + "ref": f"{signoz.self.host_configs['8080'].base()}", + }, + timeout=5, + ) + + assert response.status_code == HTTPStatus.OK + org_id = response.json()["data"]["orgs"][0]["id"] + + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v2/sessions/email_password"), + json={ + "email": email, + "password": password, + "orgId": org_id, + }, + timeout=5, + ) + + assert response.status_code == HTTPStatus.OK + return response.json()["data"]["accessToken"] + + return _get_token + + +# This is not a fixture purposefully, we just want to add a license to the signoz instance. +# This is also idempotent in nature. +def add_license( + signoz: types.SigNoz, + make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None], + get_token: Callable[[str, str], str], # pylint: disable=redefined-outer-name +) -> None: + make_http_mocks( + signoz.zeus, + [ + Mapping( + request=MappingRequest( + method=HttpMethods.GET, + url="/v2/licenses/me", + headers={ + "X-Signoz-Cloud-Api-Key": { + WireMockMatchers.EQUAL_TO: "secret-key" + } + }, + ), + response=MappingResponse( + status=200, + json_body={ + "status": "success", + "data": { + "id": "0196360e-90cd-7a74-8313-1aa815ce2a67", + "key": "secret-key", + "valid_from": 1732146923, + "valid_until": -1, + "status": "VALID", + "state": "EVALUATING", + "plan": { + "name": "ENTERPRISE", + }, + "platform": "CLOUD", + "features": [], + "event_queue": {}, + }, + }, + ), + persistent=False, + ) + ], + ) + + access_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD) + + response = requests.post( + url=signoz.self.host_configs["8080"].get("/api/v3/licenses"), + json={"key": "secret-key"}, + headers={"Authorization": "Bearer " + access_token}, + timeout=5, + ) + + if response.status_code == HTTPStatus.CONFLICT: + return + + assert response.status_code == HTTPStatus.ACCEPTED + + response = requests.post( + url=signoz.zeus.host_configs["8080"].get("/__admin/requests/count"), + json={"method": "GET", "url": "/v2/licenses/me"}, + timeout=5, + ) + + assert response.json()["count"] == 1 diff --git a/tests/integration/fixtures/driver.py b/tests/integration/fixtures/driver.py new file mode 100644 index 0000000000..1802728758 --- /dev/null +++ b/tests/integration/fixtures/driver.py @@ -0,0 +1,21 @@ +import pytest +from selenium import webdriver + + +@pytest.fixture(name="driver", scope="function") +def driver() -> webdriver.Chrome: + """ + Get a driver for the browser. This is not a fixture, it is a helper function to get a driver for the browser. + """ + + options = webdriver.ChromeOptions() + options.add_argument("--headless") + options.add_argument("--incognito") + options.add_argument("--disable-extensions") + options.add_argument("--remote-debugging-port=9222") + options.add_argument("--disable-dev-shm-usage") + + _driver = webdriver.Chrome(options=options) + yield _driver + + _driver.quit() diff --git a/tests/integration/fixtures/http.py b/tests/integration/fixtures/http.py index 369a408a5e..82f4b1857e 100644 --- a/tests/integration/fixtures/http.py +++ b/tests/integration/fixtures/http.py @@ -1,4 +1,4 @@ -from typing import List +from typing import Callable, List import docker import docker.errors @@ -75,8 +75,10 @@ def zeus( @pytest.fixture(name="make_http_mocks", scope="function") -def make_http_mocks(): - def _make_http_mocks(container: types.TestContainerDocker, mappings: List[Mapping]): +def make_http_mocks() -> Callable[[types.TestContainerDocker, List[Mapping]], None]: + def _make_http_mocks( + container: types.TestContainerDocker, mappings: List[Mapping] + ) -> None: Config.base_url = container.host_configs["8080"].get("/__admin") for mapping in mappings: diff --git a/tests/integration/fixtures/idp.py b/tests/integration/fixtures/idp.py new file mode 100644 index 0000000000..e0c23779d3 --- /dev/null +++ b/tests/integration/fixtures/idp.py @@ -0,0 +1,95 @@ +import docker +import docker.errors +import pytest +from testcontainers.core.container import Network +from testcontainers.keycloak import KeycloakContainer + +from fixtures import dev, types +from fixtures.logger import setup_logger + +logger = setup_logger(__name__) + +IDP_ROOT_USERNAME = "admin" +IDP_ROOT_PASSWORD = "password" + + +@pytest.fixture(name="idp", scope="package") +def idp( + network: Network, + request: pytest.FixtureRequest, + pytestconfig: pytest.Config, +) -> types.TestContainerIDP: + """ + Package-scoped fixture for running an idp for SSO/SAML + """ + + def create() -> types.TestContainerIDP: + container = KeycloakContainer( + image="quay.io/keycloak/keycloak:26.3.0", + username=IDP_ROOT_USERNAME, + password=IDP_ROOT_PASSWORD, + port=6060, + management_port=6061, + ) + container.with_env("KC_HTTP_PORT", "6060") + container.with_env("KC_HTTP_MANAGEMENT_PORT", "6061") + container.with_network(network) + container.start() + + return types.TestContainerIDP( + container=types.TestContainerDocker( + id=container.get_wrapped_container().id, + host_configs={ + "6060": types.TestContainerUrlConfig( + "http", + container.get_container_host_ip(), + container.get_exposed_port(6060), + ), + "6061": types.TestContainerUrlConfig( + "http", + container.get_container_host_ip(), + container.get_exposed_port(6061), + ), + }, + container_configs={ + "6060": types.TestContainerUrlConfig( + "http", container.get_wrapped_container().name, 6060 + ), + "6061": types.TestContainerUrlConfig( + "http", container.get_wrapped_container().name, 6061 + ), + }, + ), + ) + + def delete(container: types.TestContainerIDP): + client = docker.from_env() + + try: + client.containers.get(container_id=container.container.id).stop() + client.containers.get(container_id=container.container.id).remove(v=True) + except docker.errors.NotFound: + logger.info( + "Skipping removal of IDP, IDP(%s) not found. Maybe it was manually removed?", + {"id": container.container.id}, + ) + + def restore(cache: dict) -> types.TestContainerIDP: + container = types.TestContainerDocker.from_cache(cache["container"]) + return types.TestContainerIDP( + container=container, + ) + + return dev.wrap( + request, + pytestconfig, + "idp", + lambda: types.TestContainerIDP( + container=types.TestContainerDocker( + id="", host_configs={}, container_configs={} + ) + ), + create, + delete, + restore, + ) diff --git a/tests/integration/fixtures/idputils.py b/tests/integration/fixtures/idputils.py new file mode 100644 index 0000000000..45ad84428c --- /dev/null +++ b/tests/integration/fixtures/idputils.py @@ -0,0 +1,305 @@ +from typing import Callable +from urllib.parse import urljoin +from xml.etree import ElementTree + +import pytest +import requests +from keycloak import KeycloakAdmin +from selenium import webdriver +from selenium.webdriver.common.by import By +from selenium.webdriver.support import expected_conditions as EC +from selenium.webdriver.support.wait import WebDriverWait + +from fixtures import types +from fixtures.idp import IDP_ROOT_PASSWORD, IDP_ROOT_USERNAME + + +@pytest.fixture(name="create_saml_client", scope="function") +def create_saml_client( + idp: types.TestContainerIDP, signoz: types.SigNoz +) -> Callable[[str, str], None]: + def _create_saml_client(client_id: str, callback_path: str) -> None: + client = KeycloakAdmin( + server_url=idp.container.host_configs["6060"].base(), + username=IDP_ROOT_USERNAME, + password=IDP_ROOT_PASSWORD, + realm_name="master", + ) + + client.create_client( + skip_exists=True, + payload={ + "clientId": f"{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}", + "name": f"{client_id}", + "description": f"client for {client_id}", + "rootUrl": "", + "adminUrl": "", + "baseUrl": urljoin( + f"{signoz.self.host_configs['8080'].base()}", callback_path + ), + "surrogateAuthRequired": False, + "enabled": True, + "alwaysDisplayInConsole": False, + "clientAuthenticatorType": "client-secret", + "redirectUris": [f"{signoz.self.host_configs['8080'].base()}/*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": False, + "consentRequired": False, + "standardFlowEnabled": True, + "implicitFlowEnabled": False, + "directAccessGrantsEnabled": False, + "serviceAccountsEnabled": False, + "publicClient": True, + "frontchannelLogout": True, + "protocol": "saml", + "attributes": { + "saml.assertion.signature": "false", + "saml.force.post.binding": "true", + "saml.encrypt": "false", + "saml.server.signature": "true", + "saml.server.signature.keyinfo.ext": "false", + "realm_client": "false", + "saml.artifact.binding": "false", + "saml.signature.algorithm": "RSA_SHA256", + "saml_force_name_id_format": "false", + "saml.client.signature": "false", + "saml.authnstatement": "true", + "display.on.consent.screen": "false", + "saml_name_id_format": "email", + "saml.allow.ecp.flow": "false", + "saml_signature_canonicalization_method": "http://www.w3.org/2001/10/xml-exc-c14n#", + "saml.onetimeuse.condition": "false", + "saml.server.signature.keyinfo.xmlSigKeyInfoKeyNameTransformer": "NONE", + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": True, + "nodeReRegistrationTimeout": -1, + "protocolMappers": [ + { + "name": "X500 givenName", + "protocol": "saml", + "protocolMapper": "saml-user-property-mapper", + "consentRequired": False, + "config": { + "attribute.nameformat": "urn:oasis:names:tc:SAML:2.0:attrname-format:uri", + "user.attribute": "firstName", + "friendly.name": "givenName", + "attribute.name": "urn:oid:2.5.4.42", + }, + }, + { + "name": "X500 email", + "protocol": "saml", + "protocolMapper": "saml-user-property-mapper", + "consentRequired": False, + "config": { + "attribute.nameformat": "urn:oasis:names:tc:SAML:2.0:attrname-format:uri", + "user.attribute": "email", + "friendly.name": "email", + "attribute.name": "urn:oid:1.2.840.113549.1.9.1", + }, + }, + { + "name": "role list", + "protocol": "saml", + "protocolMapper": "saml-role-list-mapper", + "consentRequired": False, + "config": { + "single": "false", + "attribute.nameformat": "Basic", + "attribute.name": "Role", + }, + }, + ], + "defaultClientScopes": ["saml_organization", "role_list"], + "optionalClientScopes": [], + "access": {"view": True, "configure": True, "manage": True}, + }, + ) + + return _create_saml_client + + +@pytest.fixture(name="create_oidc_client", scope="function") +def create_oidc_client( + idp: types.TestContainerIDP, signoz: types.SigNoz +) -> Callable[[str, str], None]: + def _create_oidc_client(client_id: str, callback_path: str) -> None: + client = KeycloakAdmin( + server_url=idp.container.host_configs["6060"].base(), + username=IDP_ROOT_USERNAME, + password=IDP_ROOT_PASSWORD, + realm_name="master", + ) + + client.create_client( + skip_exists=True, + payload={ + "clientId": f"{client_id}", + "name": f"{client_id}", + "description": f"client for {client_id}", + "rootUrl": "", + "adminUrl": "", + "baseUrl": "", + "surrogateAuthRequired": False, + "enabled": True, + "alwaysDisplayInConsole": False, + "clientAuthenticatorType": "client-secret", + "redirectUris": [ + f"{urljoin(signoz.self.host_configs['8080'].base(), callback_path)}" + ], + "webOrigins": ["/*"], + "notBefore": 0, + "bearerOnly": False, + "consentRequired": False, + "standardFlowEnabled": True, + "implicitFlowEnabled": False, + "directAccessGrantsEnabled": False, + "serviceAccountsEnabled": False, + "publicClient": False, + "frontchannelLogout": True, + "protocol": "openid-connect", + "attributes": { + "realm_client": "false", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "true", + "standard.token.exchange.enabled": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": True, + "nodeReRegistrationTimeout": -1, + "defaultClientScopes": [ + "web-origins", + "acr", + "roles", + "profile", + "basic", + "email", + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "organization", + "microprofile-jwt", + ], + "access": {"view": True, "configure": True, "manage": True}, + }, + ) + + return _create_oidc_client + + +@pytest.fixture(name="get_saml_settings", scope="function") +def get_saml_settings(idp: types.TestContainerIDP) -> dict: + def _get_saml_settings() -> dict: + response = requests.get( + f"{idp.container.host_configs['6060'].base()}/realms/master/protocol/saml/descriptor", + timeout=5, + ) + + root = ElementTree.fromstring(response.content) + ns = { + "md": "urn:oasis:names:tc:SAML:2.0:metadata", + "ds": "http://www.w3.org/2000/09/xmldsig#", + } + + entity_id = root.attrib.get("entityID") + certificate_el = root.find(".//ds:X509Certificate", ns) + sso_post_el = root.find( + ".//md:SingleSignOnService[@Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST']", + ns, + ) + + return { + "entityID": entity_id, + "certificate": certificate_el.text if certificate_el is not None else None, + "singleSignOnServiceLocation": ( + sso_post_el.get("Location") if sso_post_el is not None else None + ), + } + + return _get_saml_settings + + +@pytest.fixture(name="get_oidc_settings", scope="function") +def get_oidc_settings(idp: types.TestContainerIDP) -> dict: + def _get_oidc_settings(client_id: str) -> dict: + client = KeycloakAdmin( + server_url=idp.container.host_configs["6060"].base(), + username=IDP_ROOT_USERNAME, + password=IDP_ROOT_PASSWORD, + realm_name="master", + ) + + client_secrets = client.get_client_secrets(client.get_client_id(client_id)) + + response = requests.get( + f"{idp.container.host_configs['6060'].base()}/realms/master/.well-known/openid-configuration", + timeout=5, + ) + + return { + "client_id": client_id, + "client_secret": client_secrets["value"], + "issuer": response.json()["issuer"], + } + + return _get_oidc_settings + + +@pytest.fixture(name="create_user_idp", scope="function") +def create_user_idp(idp: types.TestContainerIDP) -> Callable[[str, str, bool], None]: + client = KeycloakAdmin( + server_url=idp.container.host_configs["6060"].base(), + username=IDP_ROOT_USERNAME, + password=IDP_ROOT_PASSWORD, + realm_name="master", + ) + + created_users = [] + + def _create_user_idp(email: str, password: str, verified: bool = True) -> None: + user_id = client.create_user( + exist_ok=False, + payload={ + "username": email, + "email": email, + "enabled": True, + "emailVerified": verified, + }, + ) + + client.set_user_password(user_id, password, temporary=False) + created_users.append(user_id) + + yield _create_user_idp + + for user_id in created_users: + client.delete_user(user_id) + + +@pytest.fixture(name="idp_login", scope="function") +def idp_login(driver: webdriver.Chrome) -> Callable[[str, str], None]: + def _idp_login(email: str, password: str) -> None: + # Input email. The following element is present in the idp login page. + # + driver.find_element(By.ID, "username").send_keys(email) + + # Input password. The following element is present in the idp login page. + # + driver.find_element(By.ID, "password").send_keys(password) + + # Click login button. The following element is present in the idp login page. + # + driver.find_element(By.ID, "kc-login").click() + + wait = WebDriverWait(driver, 10) + + # Wait till kc-login element has vanished from the page, which means that a redirection is taking place. + wait.until(EC.invisibility_of_element((By.ID, "kc-login"))) + + return _idp_login diff --git a/tests/integration/fixtures/network.py b/tests/integration/fixtures/network.py index ea08eb3ac4..37ed575a1e 100644 --- a/tests/integration/fixtures/network.py +++ b/tests/integration/fixtures/network.py @@ -1,5 +1,7 @@ import docker +import docker.errors import pytest +from testcontainers.core.network import Network from fixtures import dev, types from fixtures.logger import setup_logger @@ -16,9 +18,9 @@ def network( """ def create() -> types.Network: - nw = types.Network() + nw = Network() nw.create() - return nw + return types.Network(id=nw.id, name=nw.name) def delete(nw: types.Network): client = docker.from_env() @@ -31,16 +33,15 @@ def network( ) def restore(existing: dict) -> types.Network: - nw = types.Network() - nw.id = existing.get("id") - nw.name = existing.get("name") - return nw + client = docker.from_env() + nw = client.networks.get(network_id=existing.get("id")) + return types.Network(id=nw.id, name=nw.name) return dev.wrap( request, pytestconfig, "network", - lambda: types.Network(), # pylint: disable=unnecessary-lambda + lambda: types.Network("", ""), # pylint: disable=unnecessary-lambda create, delete, restore, diff --git a/tests/integration/fixtures/postgres.py b/tests/integration/fixtures/postgres.py index 75f89a3fea..2154a6e486 100644 --- a/tests/integration/fixtures/postgres.py +++ b/tests/integration/fixtures/postgres.py @@ -29,8 +29,8 @@ def postgres( password="password", dbname="signoz", driver="psycopg2", - network=network.id, ) + container.with_network(network) container.start() engine = create_engine( diff --git a/tests/integration/fixtures/signoz.py b/tests/integration/fixtures/signoz.py index 313a7f576d..d0f350c8ff 100644 --- a/tests/integration/fixtures/signoz.py +++ b/tests/integration/fixtures/signoz.py @@ -32,24 +32,23 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments # Run the migrations for clickhouse request.getfixturevalue("migrator") + arch = platform.machine() + if arch == "x86_64": + arch = "amd64" + # Build the image self = DockerImage( path="../../", dockerfile_path="cmd/enterprise/Dockerfile.integration", tag="signoz:integration", - ) - - arch = platform.machine() - if arch == "x86_64": - arch = "amd64" - - self.build( buildargs={ "TARGETARCH": arch, "ZEUSURL": zeus.container_configs["8080"].base(), - } + }, ) + self.build() + env = ( { "SIGNOZ_WEB_ENABLED": True, diff --git a/tests/integration/fixtures/types.py b/tests/integration/fixtures/types.py index 90dbb2c45c..73ae710f09 100644 --- a/tests/integration/fixtures/types.py +++ b/tests/integration/fixtures/types.py @@ -7,7 +7,6 @@ import clickhouse_connect.driver import clickhouse_connect.driver.client import py from sqlalchemy import Engine -from testcontainers.core.container import Network LegacyPath = py.path.local @@ -107,6 +106,20 @@ class TestContainerClickhouse: return f"TestContainerClickhouse(container={self.container.__log__()}, env={self.env})" +@dataclass +class TestContainerIDP: + __test__ = False + container: TestContainerDocker + + def __cache__(self) -> dict: + return { + "container": self.container.__cache__(), + } + + def __log__(self) -> str: + return f"TestContainerIDP(container={self.container.__log__()})" + + @dataclass class SigNoz: __test__ = False @@ -134,7 +147,12 @@ class Operation: return f"Operation(name={self.name})" -class Network(Network): # pylint: disable=function-redefined +@dataclass +class Network: + __test__ = False + id: str + name: str + def __cache__(self) -> dict: return { "id": self.id, diff --git a/tests/integration/poetry.lock b/tests/integration/poetry.lock index 1c1f5c13fa..8322a6a959 100644 --- a/tests/integration/poetry.lock +++ b/tests/integration/poetry.lock @@ -1,5 +1,36 @@ # This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +[[package]] +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] + +[[package]] +name = "anyio" +version = "4.11.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc"}, + {file = "anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +trio = ["trio (>=0.31.0)"] + [[package]] name = "astroid" version = "3.3.9" @@ -12,6 +43,38 @@ files = [ {file = "astroid-3.3.9.tar.gz", hash = "sha256:622cc8e3048684aa42c820d9d218978021c3c3d174fb03a9f0d615921744f550"}, ] +[[package]] +name = "async-property" +version = "0.2.2" +description = "Python decorator for async properties." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "async_property-0.2.2-py2.py3-none-any.whl", hash = "sha256:8924d792b5843994537f8ed411165700b27b2bd966cefc4daeefc1253442a9d7"}, + {file = "async_property-0.2.2.tar.gz", hash = "sha256:17d9bd6ca67e27915a75d92549df64b5c7174e9dc806b30a3934dc4ff0506380"}, +] + +[[package]] +name = "attrs" +version = "25.3.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + [[package]] name = "autoflake" version = "2.3.1" @@ -74,14 +137,14 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2025.1.31" +version = "2025.8.3" description = "Python package for providing Mozilla's CA Bundle." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" groups = ["main"] files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, + {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, + {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, ] [[package]] @@ -91,7 +154,6 @@ description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -379,6 +441,129 @@ pandas = ["pandas"] sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] tzlocal = ["tzlocal (>=4.0)"] +[[package]] +name = "clickhouse-driver" +version = "0.2.9" +description = "Python driver with native interface for ClickHouse" +optional = false +python-versions = "<4,>=3.7" +groups = ["main"] +files = [ + {file = "clickhouse-driver-0.2.9.tar.gz", hash = "sha256:050ea4870ead993910b39e7fae965dc1c347b2e8191dcd977cd4b385f9e19f87"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ce04e9d0d0f39561f312d1ac1a8147bc9206e4267e1a23e20e0423ebac95534"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7ae5c8931bf290b9d85582e7955b9aad7f19ff9954e48caa4f9a180ea4d01078"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e51792f3bd12c32cb15a907f12de3c9d264843f0bb33dce400e3966c9f09a3f"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42fc546c31e4a04c97b749769335a679c9044dc693fa7a93e38c97fd6727173d"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a383a403d185185c64e49edd6a19b2ec973c5adcb8ebff7ed2fc539a2cc65a5"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f05321a97e816afc75b3e4f9eda989848fecf14ecf1a91d0f22c04258123d1f7"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be47e793846aac28442b6b1c6554e0731b848a5a7759a54aa2489997354efe4a"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:780e42a215d1ae2f6d695d74dd6f087781fb2fa51c508b58f79e68c24c5364e0"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9e28f1fe850675e173db586e9f1ac790e8f7edd507a4227cd54cd7445f8e75b6"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:125aae7f1308d3083dadbb3c78f828ae492e060f13e4007a0cf53a8169ed7b39"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2f3c4fbb61e75c62a1ab93a1070d362de4cb5682f82833b2c12deccb3bae888d"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dc03196a84e32d23b88b665be69afae98f57426f5fdf203e16715b756757961"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-win32.whl", hash = "sha256:25695d78a1d7ad6e221e800612eac08559f6182bf6dee0a220d08de7b612d993"}, + {file = "clickhouse_driver-0.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:367acac95398d721a0a2a6cf87e93638c5588b79498a9848676ce7f182540a6c"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a7353a7a08eee3aa0001d8a5d771cb1f37e2acae1b48178002431f23892121a"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6af1c6cbc3481205503ab72a34aa76d6519249c904aa3f7a84b31e7b435555be"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48033803abd1100bfff6b9a1769d831b672cd3cda5147e0323b956fd1416d38d"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f202a58a540c85e47c31dabc8f84b6fe79dca5315c866450a538d58d6fa0571"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4df50fd84bfa4aa1eb7b52d48136066bfb64fabb7ceb62d4c318b45a296200b"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433a650571a0d7766eb6f402e8f5930222997686c2ee01ded22f1d8fd46af9d4"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:232ee260475611cbf7adb554b81db6b5790b36e634fe2164f4ffcd2ca3e63a71"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:09049f7e71f15c9c9a03f597f77fc1f7b61ababd155c06c0d9e64d1453d945d7"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:424153d1d5f5a807f596a48cc88119f9fb3213ca7e38f57b8d15dcc964dd91f7"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4f078fd1cf19c4ca63b8d1e0803df665310c8d5b644c5b02bf2465e8d6ef8f55"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f138d939e26e767537f891170b69a55a88038919f5c10d8865b67b8777fe4848"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9aafabc7e32942f85dcb46f007f447ab69024831575df97cae28c6ed127654d1"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-win32.whl", hash = "sha256:935e16ebf1a1998d8493979d858821a755503c9b8af572d9c450173d4b88868c"}, + {file = "clickhouse_driver-0.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:306b3102cba278b5dfec6f5f7dc8b78416c403901510475c74913345b56c9e42"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fcb2fd00e58650ae206a6d5dbc83117240e622471aa5124733fbf2805eb8bda0"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7a3e6b0a1eb218e3d870a94c76daaf65da46dca8f6888ea6542f94905c24d88"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d8e2888a857d8db3d98765a5ad23ab561241feaef68bbffc5a0bd9c142342"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85d50c011467f5ff6772c4059345968b854b72e07a0219030b7c3f68419eb7f7"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93b395c1370629ccce8fb3e14cd5be2646d227bd32018c21f753c543e9a7e96b"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dbcee870c60d9835e5dce1456ab6b9d807e6669246357f4b321ef747b90fa43"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fffa5a5f317b1ec92e406a30a008929054cf3164d2324a3c465d0a0330273bf8"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:476702740a279744badbd177ae1c4a2d089ec128bd676861219d1f92078e4530"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5cd6d95fab5ff80e9dc9baedc9a926f62f74072d42d5804388d63b63bec0bb63"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:05027d32d7cf3e46cb8d04f8c984745ae01bd1bc7b3579f9dadf9b3cca735697"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:3d11831842250b4c1b26503a6e9c511fc03db096608b7c6af743818c421a3032"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:81b4b671b785ebb0b8aeabf2432e47072413d81db959eb8cfd8b6ab58c5799c6"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-win32.whl", hash = "sha256:e893bd4e014877174a59e032b0e99809c95ec61328a0e6bd9352c74a2f6111a8"}, + {file = "clickhouse_driver-0.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:de6624e28eeffd01668803d28ae89e3d4e359b1bff8b60e4933e1cb3c6f86f18"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:909205324089a9ee59bee7ecbfa94595435118cca310fd62efdf13f225aa2965"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f31d6e47dc2b0f367f598f5629147ed056d7216c1788e25190fcfbfa02e749"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed84179914b2b7bb434c2322a6e7fd83daa681c97a050450511b66d917a129bb"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67d1bf63efb4ba14ae6c6da99622e4a549e68fc3ee14d859bf611d8e6a61b3fa"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eed23ea41dd582d76f7a2ec7e09cbe5e9fec008f11a4799fa35ce44a3ebd283"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a654291132766efa2703058317749d7c69b69f02d89bac75703eaf7f775e20da"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c26c5ef16d0ef3cabc5bc03e827e01b0a4afb5b4eaf8850b7cf740cee04a1d4"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b57e83d7986d3cbda6096974a9510eb53cb33ad9072288c87c820ba5eee3370e"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:153cc03b36f22cbde55aa6a5bbe99072a025567a54c48b262eb0da15d8cd7c83"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:83a857d99192936091f495826ae97497cd1873af213b1e069d56369fb182ab8e"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb05a9bb22cbe9ad187ad268f86adf7e60df6083331fe59c01571b7b725212dd"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-win32.whl", hash = "sha256:3e282c5c25e32d96ed151e5460d2bf4ecb805ea64449197dd918e84e768016df"}, + {file = "clickhouse_driver-0.2.9-cp37-cp37m-win_amd64.whl", hash = "sha256:c46dccfb04a9afd61a1b0e60bfefceff917f76da2c863f9b36b39248496d5c77"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:612ca9028c718f362c97f552e63d313cf1a70a616ef8532ddb0effdaf12ebef9"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471b884d318e012f68d858476052742048918854f7dfe87d78e819f87a848ffb"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58ee63c35e99da887eb035c8d6d9e64fd298a0efc1460395297dd5cc281a6912"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0819bb63d2c5025a1fb9589f57ef82602687cef11081d6dfa6f2ce44606a1772"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6680ee18870bca1fbab1736c8203a965efaec119ab4c37821ad99add248ee08"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:713c498741b54debd3a10a5529e70b6ed85ca33c3e8629e24ae5cd8160b5a5f2"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:730837b8f63941065c9c955c44286aef0987fb084ffb3f55bf1e4fe07df62269"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9f4e38b2ea09214c8e7848a19391009a18c56a3640e1ba1a606b9e57aeb63404"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:457f1d6639e0345b717ae603c79bd087a35361ce68c1c308d154b80b841e5e7d"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:49a55aeb8ea625a87965a96e361bbb1ad67d0931bfb2a575f899c1064e70c2da"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9230058d8c9b1a04079afae4650fb67745f0f1c39db335728f64d48bd2c19246"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8798258bd556542dd9c6b8ebe62f9c5110c9dcdf97c57fb077e7b8b6d6da0826"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-win32.whl", hash = "sha256:ce8e3f4be46bcc63555863f70ab0035202b082b37e6f16876ef50e7bc4b47056"}, + {file = "clickhouse_driver-0.2.9-cp38-cp38-win_amd64.whl", hash = "sha256:2d982959ff628255808d895a67493f2dab0c3a9bfc65eeda0f00c8ae9962a1b3"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a46b227fab4420566ed24ee70d90076226d16fcf09c6ad4d428717efcf536446"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7eaa2ce5ea08cf5fddebb8c274c450e102f329f9e6966b6cd85aa671c48e5552"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97f0083194d6e23b5ef6156ed0d5388c37847b298118199d7937ba26412a9e2"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6cab5cdbb0f8ee51d879d977b78f07068b585225ac656f3c081896c362e8f83"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdb1b011a53ee71539e9dc655f268b111bac484db300da92829ed59e910a8fd0"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf51bb761b281d20910b4b689c699ef98027845467daa5bb5dfdb53bd6ee404"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8ea462e3cebb121ff55002e9c8a9a0a3fd9b5bbbf688b4960f0a83c0172fb31"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:70bee21c245226ad0d637bf470472e2d487b86911b6d673a862127b934336ff4"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:253a3c223b944d691bf0abbd599f592ea3b36f0a71d2526833b1718f37eca5c2"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a6549b53fc5c403dc556cb39b2ae94d73f9b113daa00438a660bb1dd5380ae4d"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1c685cd4abe61af1c26279ff04b9f567eb4d6c1ec7fb265af7481b1f153043aa"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7e25144219577491929d032a6c3ddd63c6cd7fa764af829a5637f798190d9b26"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-win32.whl", hash = "sha256:0b9925610d25405a8e6d83ff4f54fc2456a121adb0155999972f5edd6ba3efc8"}, + {file = "clickhouse_driver-0.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:b243de483cfa02716053b0148d73558f4694f3c27b97fc1eaa97d7079563a14d"}, + {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45a3d5b1d06750fd6a18c29b871494a2635670099ec7693e756a5885a4a70dbf"}, + {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8415ffebd6ca9eef3024763abc450f8659f1716d015bd563c537d01c7fbc3569"}, + {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace48db993aa4bd31c42de0fa8d38c94ad47405916d6b61f7a7168a48fb52ac1"}, + {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b07123334fe143bfe6fa4e3d4b732d647d5fd2cfb9ec7f2f76104b46fe9d20c6"}, + {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2af3efa73d296420ce6362789f5b1febf75d4aa159a479393f01549115509d5"}, + {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:baf57eede88d07a1eb04352d26fc58a4d97991ca3d8840f7c5d48691dec9f251"}, + {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:275d0ccdab9c3571bdb3e9acfab4497930aa584ff2766b035bb2f854deaf8b82"}, + {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:293da77bfcac3168fb35b27c242f97c1a05502435c0686ecbb8e2e4abcb3de26"}, + {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d6c2e5830705e4eeef33070ca4d5a24dfa221f28f2f540e5e6842c26e70b10b"}, + {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:11934bd78d97dd7e1a23a6222b5edd1e1b4d34e1ead5c846dc2b5c56fdc35ff5"}, + {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b802b6f0fbdcc3ab81b87f09b694dde91ab049f44d1d2c08c3dc8ea9a5950cfa"}, + {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7af871c5315eb829ecf4533c790461ea8f73b3bfd5f533b0467e479fdf6ddcfd"}, + {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d577dd4867b9e26cf60590e1f500990c8701a6e3cfbb9e644f4d0c0fb607028"}, + {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ed3dea2d1eca85fef5b8564ddd76dedb15a610c77d55d555b49d9f7c896b64b"}, + {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:91ec96f2c48e5bdeac9eea43a9bc9cc19acb2d2c59df0a13d5520dfc32457605"}, + {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7667ab423452754f36ba8fb41e006a46baace9c94e2aca2a745689b9f2753dfb"}, + {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:653583b1f3b088d106f180d6f02c90917ecd669ec956b62903a05df4a7f44863"}, + {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef3dd0cbdf2f0171caab90389af0ede068ec802bf46c6a77f14e6edc86671bc"}, + {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11b1833ee8ff8d5df39a34a895e060b57bd81e05ea68822bc60476daff4ce1c8"}, + {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8a3195639e6393b9d4aafe736036881ff86b6be5855d4bf7d9f5c31637181ec3"}, +] + +[package.dependencies] +pytz = "*" +tzlocal = "*" + +[package.extras] +lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4 (<=3.0.1) ; implementation_name == \"pypy\"", "lz4 ; implementation_name != \"pypy\""] +numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"] +zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"] + [[package]] name = "colorama" version = "0.4.6" @@ -392,6 +577,160 @@ files = [ ] markers = {main = "sys_platform == \"win32\"", dev = "sys_platform == \"win32\" or platform_system == \"Windows\""} +[[package]] +name = "cryptography" +version = "45.0.7" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] +markers = "python_full_version >= \"3.14.0\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3"}, + {file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6"}, + {file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd"}, + {file = "cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8"}, + {file = "cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443"}, + {file = "cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17"}, + {file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b"}, + {file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c"}, + {file = "cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5"}, + {file = "cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:de58755d723e86175756f463f2f0bddd45cc36fbd62601228a3f8761c9f58252"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a20e442e917889d1a6b3c570c9e3fa2fdc398c20868abcea268ea33c024c4083"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:258e0dff86d1d891169b5af222d362468a9570e2532923088658aa866eb11130"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d97cf502abe2ab9eff8bd5e4aca274da8d06dd3ef08b759a8d6143f4ad65d4b4"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:c987dad82e8c65ebc985f5dae5e74a3beda9d0a2a4daf8a1115f3772b59e5141"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c13b1e3afd29a5b3b2656257f14669ca8fa8d7956d509926f0b130b600b50ab7"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63"}, + {file = "cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971"}, +] + +[package.dependencies] +cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""] +pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==45.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cryptography" +version = "46.0.0" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.8" +groups = ["main"] +markers = "python_version == \"3.13\" or platform_python_implementation == \"PyPy\"" +files = [ + {file = "cryptography-46.0.0-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:c9c4121f9a41cc3d02164541d986f59be31548ad355a5c96ac50703003c50fb7"}, + {file = "cryptography-46.0.0-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4f70cbade61a16f5e238c4b0eb4e258d177a2fcb59aa0aae1236594f7b0ae338"}, + {file = "cryptography-46.0.0-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d1eccae15d5c28c74b2bea228775c63ac5b6c36eedb574e002440c0bc28750d3"}, + {file = "cryptography-46.0.0-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1b4fba84166d906a22027f0d958e42f3a4dbbb19c28ea71f0fb7812380b04e3c"}, + {file = "cryptography-46.0.0-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:523153480d7575a169933f083eb47b1edd5fef45d87b026737de74ffeb300f69"}, + {file = "cryptography-46.0.0-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:f09a3a108223e319168b7557810596631a8cb864657b0c16ed7a6017f0be9433"}, + {file = "cryptography-46.0.0-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c1f6ccd6f2eef3b2eb52837f0463e853501e45a916b3fc42e5d93cf244a4b97b"}, + {file = "cryptography-46.0.0-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:80a548a5862d6912a45557a101092cd6c64ae1475b82cef50ee305d14a75f598"}, + {file = "cryptography-46.0.0-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:6c39fd5cd9b7526afa69d64b5e5645a06e1b904f342584b3885254400b63f1b3"}, + {file = "cryptography-46.0.0-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d5c0cbb2fb522f7e39b59a5482a1c9c5923b7c506cfe96a1b8e7368c31617ac0"}, + {file = "cryptography-46.0.0-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6d8945bc120dcd90ae39aa841afddaeafc5f2e832809dc54fb906e3db829dfdc"}, + {file = "cryptography-46.0.0-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:88c09da8a94ac27798f6b62de6968ac78bb94805b5d272dbcfd5fdc8c566999f"}, + {file = "cryptography-46.0.0-cp311-abi3-win32.whl", hash = "sha256:3738f50215211cee1974193a1809348d33893696ce119968932ea117bcbc9b1d"}, + {file = "cryptography-46.0.0-cp311-abi3-win_amd64.whl", hash = "sha256:bbaa5eef3c19c66613317dc61e211b48d5f550db009c45e1c28b59d5a9b7812a"}, + {file = "cryptography-46.0.0-cp311-abi3-win_arm64.whl", hash = "sha256:16b5ac72a965ec9d1e34d9417dbce235d45fa04dac28634384e3ce40dfc66495"}, + {file = "cryptography-46.0.0-cp314-abi3-macosx_10_9_universal2.whl", hash = "sha256:91585fc9e696abd7b3e48a463a20dda1a5c0eeeca4ba60fa4205a79527694390"}, + {file = "cryptography-46.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:65e9117ebed5b16b28154ed36b164c20021f3a480e9cbb4b4a2a59b95e74c25d"}, + {file = "cryptography-46.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:da7f93551d39d462263b6b5c9056c49f780b9200bf9fc2656d7c88c7bdb9b363"}, + {file = "cryptography-46.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:be7479f9504bfb46628544ec7cb4637fe6af8b70445d4455fbb9c395ad9b7290"}, + {file = "cryptography-46.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f85e6a7d42ad60024fa1347b1d4ef82c4df517a4deb7f829d301f1a92ded038c"}, + {file = "cryptography-46.0.0-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:d349af4d76a93562f1dce4d983a4a34d01cb22b48635b0d2a0b8372cdb4a8136"}, + {file = "cryptography-46.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:35aa1a44bd3e0efc3ef09cf924b3a0e2a57eda84074556f4506af2d294076685"}, + {file = "cryptography-46.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c457ad3f151d5fb380be99425b286167b358f76d97ad18b188b68097193ed95a"}, + {file = "cryptography-46.0.0-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:399ef4c9be67f3902e5ca1d80e64b04498f8b56c19e1bc8d0825050ea5290410"}, + {file = "cryptography-46.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:378eff89b040cbce6169528f130ee75dceeb97eef396a801daec03b696434f06"}, + {file = "cryptography-46.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c3648d6a5878fd1c9a22b1d43fa75efc069d5f54de12df95c638ae7ba88701d0"}, + {file = "cryptography-46.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2fc30be952dd4334801d345d134c9ef0e9ccbaa8c3e1bc18925cbc4247b3e29c"}, + {file = "cryptography-46.0.0-cp314-cp314t-win32.whl", hash = "sha256:b8e7db4ce0b7297e88f3d02e6ee9a39382e0efaf1e8974ad353120a2b5a57ef7"}, + {file = "cryptography-46.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:40ee4ce3c34acaa5bc347615ec452c74ae8ff7db973a98c97c62293120f668c6"}, + {file = "cryptography-46.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:07a1be54f995ce14740bf8bbe1cc35f7a37760f992f73cf9f98a2a60b9b97419"}, + {file = "cryptography-46.0.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:1d2073313324226fd846e6b5fc340ed02d43fd7478f584741bd6b791c33c9fee"}, + {file = "cryptography-46.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83af84ebe7b6e9b6de05050c79f8cc0173c864ce747b53abce6a11e940efdc0d"}, + {file = "cryptography-46.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c3cd09b1490c1509bf3892bde9cef729795fae4a2fee0621f19be3321beca7e4"}, + {file = "cryptography-46.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d14eaf1569d6252280516bedaffdd65267428cdbc3a8c2d6de63753cf0863d5e"}, + {file = "cryptography-46.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ab3a14cecc741c8c03ad0ad46dfbf18de25218551931a23bca2731d46c706d83"}, + {file = "cryptography-46.0.0-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:8e8b222eb54e3e7d3743a7c2b1f7fa7df7a9add790307bb34327c88ec85fe087"}, + {file = "cryptography-46.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7f3f88df0c9b248dcc2e76124f9140621aca187ccc396b87bc363f890acf3a30"}, + {file = "cryptography-46.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9aa85222f03fdb30defabc7a9e1e3d4ec76eb74ea9fe1504b2800844f9c98440"}, + {file = "cryptography-46.0.0-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:f9aaf2a91302e1490c068d2f3af7df4137ac2b36600f5bd26e53d9ec320412d3"}, + {file = "cryptography-46.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:32670ca085150ff36b438c17f2dfc54146fe4a074ebf0a76d72fb1b419a974bc"}, + {file = "cryptography-46.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0f58183453032727a65e6605240e7a3824fd1d6a7e75d2b537e280286ab79a52"}, + {file = "cryptography-46.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4bc257c2d5d865ed37d0bd7c500baa71f939a7952c424f28632298d80ccd5ec1"}, + {file = "cryptography-46.0.0-cp38-abi3-win32.whl", hash = "sha256:df932ac70388be034b2e046e34d636245d5eeb8140db24a6b4c2268cd2073270"}, + {file = "cryptography-46.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:274f8b2eb3616709f437326185eb563eb4e5813d01ebe2029b61bfe7d9995fbb"}, + {file = "cryptography-46.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:249c41f2bbfa026615e7bdca47e4a66135baa81b08509ab240a2e666f6af5966"}, + {file = "cryptography-46.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fe9ff1139b2b1f59a5a0b538bbd950f8660a39624bbe10cf3640d17574f973bb"}, + {file = "cryptography-46.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:77e3bd53c9c189cea361bc18ceb173959f8b2dd8f8d984ae118e9ac641410252"}, + {file = "cryptography-46.0.0-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:75d2ddde8f1766ab2db48ed7f2aa3797aeb491ea8dfe9b4c074201aec00f5c16"}, + {file = "cryptography-46.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f9f85d9cf88e3ba2b2b6da3c2310d1cf75bdf04a5bc1a2e972603054f82c4dd5"}, + {file = "cryptography-46.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:834af45296083d892e23430e3b11df77e2ac5c042caede1da29c9bf59016f4d2"}, + {file = "cryptography-46.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:c39f0947d50f74b1b3523cec3931315072646286fb462995eb998f8136779319"}, + {file = "cryptography-46.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6460866a92143a24e3ed68eaeb6e98d0cedd85d7d9a8ab1fc293ec91850b1b38"}, + {file = "cryptography-46.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bf1961037309ee0bdf874ccba9820b1c2f720c2016895c44d8eb2316226c1ad5"}, + {file = "cryptography-46.0.0.tar.gz", hash = "sha256:99f64a6d15f19f3afd78720ad2978f6d8d4c68cd4eb600fab82ab1a7c2071dca"}, +] + +[package.dependencies] +cffi = {version = ">=1.14", markers = "python_full_version < \"3.14.0\" and platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox[uv] (>=2024.4.15)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==46.0.0)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + [[package]] name = "dill" version = "0.3.9" @@ -500,6 +839,65 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil", "setuptools"] +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "idna" version = "3.10" @@ -559,6 +957,22 @@ files = [ colors = ["colorama"] plugins = ["setuptools"] +[[package]] +name = "jwcrypto" +version = "1.5.6" +description = "Implementation of JOSE Web standards" +optional = false +python-versions = ">= 3.8" +groups = ["main"] +files = [ + {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"}, + {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"}, +] + +[package.dependencies] +cryptography = ">=3.4" +typing-extensions = ">=4.5.0" + [[package]] name = "lz4" version = "4.4.4" @@ -723,6 +1137,21 @@ files = [ {file = "numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48"}, ] +[[package]] +name = "outcome" +version = "1.3.0.post0" +description = "Capture the outcome of Python function calls." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, + {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, +] + +[package.dependencies] +attrs = ">=19.2.0" + [[package]] name = "packaging" version = "24.2" @@ -807,7 +1236,6 @@ description = "C parser in Python" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -850,6 +1278,19 @@ tomlkit = ">=0.10.1" spelling = ["pyenchant (>=3.2,<4.0)"] testutils = ["gitpython (>3)"] +[[package]] +name = "pysocks" +version = "1.7.1" +description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, + {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, + {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, +] + [[package]] name = "pytest" version = "8.3.5" @@ -897,6 +1338,27 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-keycloak" +version = "5.8.1" +description = "python-keycloak is a Python package providing access to the Keycloak API." +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "python_keycloak-5.8.1-py3-none-any.whl", hash = "sha256:f80accf3e63b6c907f0f873ffac7a07705bd89d935520ba235259ba81b9ed864"}, + {file = "python_keycloak-5.8.1.tar.gz", hash = "sha256:b99a1efc7eb8715c3a7d915005728f8ba2ee03c81cdf12210c65ce794cd148ad"}, +] + +[package.dependencies] +aiofiles = ">=24.1.0" +async-property = ">=0.2.2" +deprecation = ">=2.1.0" +httpx = ">=0.23.2" +jwcrypto = ">=1.5.4" +requests = ">=2.20.0" +requests-toolbelt = ">=0.6.0" + [[package]] name = "pytz" version = "2025.2" @@ -958,6 +1420,65 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "selenium" +version = "4.35.0" +description = "Official Python bindings for Selenium WebDriver" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "selenium-4.35.0-py3-none-any.whl", hash = "sha256:90bb6c6091fa55805785cf1660fa1e2176220475ccdb466190f654ef8eef6114"}, + {file = "selenium-4.35.0.tar.gz", hash = "sha256:83937a538afb40ef01e384c1405c0863fa184c26c759d34a1ebbe7b925d3481c"}, +] + +[package.dependencies] +certifi = ">=2025.6.15" +trio = ">=0.30.0,<0.31.0" +trio-websocket = ">=0.12.2,<0.13.0" +typing_extensions = ">=4.14.0,<4.15.0" +urllib3 = {version = ">=2.5.0,<3.0", extras = ["socks"]} +websocket-client = ">=1.8.0,<1.9.0" + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + [[package]] name = "sqlalchemy" version = "2.0.43" @@ -1070,19 +1591,21 @@ python-baseconv = "*" [[package]] name = "testcontainers" -version = "4.10.0" +version = "4.13.1" description = "Python library for throwaway instances of anything that can run in a Docker container" optional = false -python-versions = "<4.0,>=3.9" +python-versions = "<4.0,>=3.9.2" groups = ["main"] files = [ - {file = "testcontainers-4.10.0-py3-none-any.whl", hash = "sha256:31ed1a81238c7e131a2a29df6db8f23717d892b592fa5a1977fd0dcd0c23fc23"}, - {file = "testcontainers-4.10.0.tar.gz", hash = "sha256:03f85c3e505d8b4edeb192c72a961cebbcba0dd94344ae778b4a159cb6dcf8d3"}, + {file = "testcontainers-4.13.1-py3-none-any.whl", hash = "sha256:10e6013a215eba673a0bcc153c8809d6f1c53c245e0a236e3877807652af4952"}, + {file = "testcontainers-4.13.1.tar.gz", hash = "sha256:4a6c5b2faa3e8afb91dff18b389a14b485f3e430157727b58e65d30c8dcde3f3"}, ] [package.dependencies] +clickhouse-driver = {version = "*", optional = true, markers = "extra == \"clickhouse\""} docker = "*" python-dotenv = "*" +python-keycloak = {version = "*", optional = true, markers = "extra == \"keycloak\""} typing-extensions = "*" urllib3 = "*" wrapt = "*" @@ -1091,10 +1614,10 @@ wrapt = "*" arangodb = ["python-arango (>=7.8,<8.0)"] aws = ["boto3", "httpx"] azurite = ["azure-storage-blob (>=12.19,<13.0)"] -chroma = ["chromadb-client"] +chroma = ["chromadb-client (>=1.0.0,<2.0.0)"] clickhouse = ["clickhouse-driver"] cosmosdb = ["azure-cosmos"] -db2 = ["ibm_db_sa", "sqlalchemy"] +db2 = ["ibm_db_sa ; platform_machine != \"aarch64\" and platform_machine != \"arm64\"", "sqlalchemy"] generic = ["httpx", "redis"] google = ["google-cloud-datastore (>=2)", "google-cloud-pubsub (>=2)"] influxdb = ["influxdb", "influxdb-client"] @@ -1104,10 +1627,11 @@ localstack = ["boto3"] mailpit = ["cryptography"] minio = ["minio"] mongodb = ["pymongo"] -mssql = ["pymssql", "sqlalchemy"] +mssql = ["pymssql ; platform_machine != \"arm64\" or python_version >= \"3.10\"", "sqlalchemy"] mysql = ["pymysql[rsa]", "sqlalchemy"] nats = ["nats-py"] neo4j = ["neo4j"] +openfga = ["openfga-sdk ; python_version >= \"3.10\""] opensearch = ["opensearch-py"] oracle = ["oracledb", "sqlalchemy"] oracle-free = ["oracledb", "sqlalchemy"] @@ -1135,17 +1659,85 @@ files = [ ] [[package]] -name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +name = "trio" +version = "0.30.0" +description = "A friendly Python library for async concurrency and I/O" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "trio-0.30.0-py3-none-any.whl", hash = "sha256:3bf4f06b8decf8d3cf00af85f40a89824669e2d033bb32469d34840edcfc22a5"}, + {file = "trio-0.30.0.tar.gz", hash = "sha256:0781c857c0c81f8f51e0089929a26b5bb63d57f927728a5586f7e36171f064df"}, +] + +[package.dependencies] +attrs = ">=23.2.0" +cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""} +idna = "*" +outcome = "*" +sniffio = ">=1.3.0" +sortedcontainers = "*" + +[[package]] +name = "trio-websocket" +version = "0.12.2" +description = "WebSocket library for Trio" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, + {file = "trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6"}, + {file = "trio_websocket-0.12.2.tar.gz", hash = "sha256:22c72c436f3d1e264d0910a3951934798dcc5b00ae56fc4ee079d46c7cf20fae"}, ] +[package.dependencies] +outcome = ">=1.2.0" +trio = ">=0.11" +wsproto = ">=0.14" + +[[package]] +name = "typing-extensions" +version = "4.14.1" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, + {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, +] + +[[package]] +name = "tzdata" +version = "2025.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, +] + +[[package]] +name = "tzlocal" +version = "5.3.1" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d"}, + {file = "tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd"}, +] + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "urllib3" version = "2.5.0" @@ -1158,12 +1750,32 @@ files = [ {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] +[package.dependencies] +pysocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""} + [package.extras] brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + [[package]] name = "wiremock" version = "2.6.1" @@ -1272,6 +1884,21 @@ files = [ {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, ] +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +groups = ["main"] +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + [[package]] name = "zstandard" version = "0.23.0" @@ -1388,4 +2015,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = "^3.13" -content-hash = "fc17158ab90e70dbd94668e3346d6126384cb17cf28c3b3ec82e5ed067058380" +content-hash = "0c627ad77350031fd9760005434de7ece8b43bc00c7a4a0dc5ff88df8ecf9649" diff --git a/tests/integration/pyproject.toml b/tests/integration/pyproject.toml index 3068aafc86..2978eab995 100644 --- a/tests/integration/pyproject.toml +++ b/tests/integration/pyproject.toml @@ -9,13 +9,14 @@ readme = "README.md" python = "^3.13" pytest = "^8.3.5" psycopg2 = "^2.9.10" -testcontainers = "^4.10.0" +testcontainers = {extras = ["clickhouse", "keycloak", "postgres"], version = "^4.13.1"} wiremock = "^2.6.1" numpy = "^2.3.2" clickhouse-connect = "^0.8.18" svix-ksuid = "^0.6.2" requests = "^2.32.4" sqlalchemy = "^2.0.43" +selenium = "^4.35.0" [tool.poetry.group.dev.dependencies] @@ -33,7 +34,7 @@ python_files = "src/**/**.py" log_cli = true log_format = "%(asctime)s [%(levelname)s] (%(filename)s:%(lineno)s) %(message)s" log_date_format = "%Y-%m-%d %H:%M:%S" -addopts = "-ra" +addopts = "-ra -p no:warnings" [tool.pylint.main] ignore = [".venv"] @@ -42,7 +43,7 @@ ignore = [".venv"] max-line-length = "400" [tool.pylint."messages control"] -disable = ["missing-module-docstring", "missing-function-docstring", "missing-class-docstring", "duplicate-code", "dangerous-default-value", "too-many-positional-arguments", "too-many-arguments", "too-few-public-methods", "too-many-instance-attributes", "too-many-locals", "too-many-statements"] +disable = ["missing-module-docstring", "missing-function-docstring", "missing-class-docstring", "duplicate-code", "dangerous-default-value", "too-many-positional-arguments", "too-many-arguments", "too-few-public-methods", "too-many-instance-attributes", "too-many-locals", "too-many-statements", "too-many-lines"] [tool.isort] profile = "black" diff --git a/tests/integration/src/bootstrap/setup.py b/tests/integration/src/bootstrap/setup.py index 8a781fdfbb..3cf533d460 100644 --- a/tests/integration/src/bootstrap/setup.py +++ b/tests/integration/src/bootstrap/setup.py @@ -23,6 +23,7 @@ def test_telemetry_databases_exist(signoz: types.SigNoz) -> None: "signoz_traces", "signoz_metadata", "signoz_analytics", + "signoz_meter", ] for db_name in required_databases: @@ -31,5 +32,10 @@ def test_telemetry_databases_exist(signoz: types.SigNoz) -> None: ), f"Database {db_name} not found" -def test_teardown(signoz: types.SigNoz) -> None: # pylint: disable=unused-argument +def test_teardown( + signoz: types.SigNoz, # pylint: disable=unused-argument + idp: types.TestContainerIDP, # pylint: disable=unused-argument + create_user_admin: types.Operation, # pylint: disable=unused-argument + migrator: types.Operation, # pylint: disable=unused-argument +) -> None: pass diff --git a/tests/integration/src/auth/__init__.py b/tests/integration/src/callbackauthn/__init__.py similarity index 100% rename from tests/integration/src/auth/__init__.py rename to tests/integration/src/callbackauthn/__init__.py diff --git a/tests/integration/src/callbackauthn/a_domain.py b/tests/integration/src/callbackauthn/a_domain.py new file mode 100644 index 0000000000..faa7fc77da --- /dev/null +++ b/tests/integration/src/callbackauthn/a_domain.py @@ -0,0 +1,167 @@ +from http import HTTPStatus +from typing import Callable + +import requests + +from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD +from fixtures.types import Operation, SigNoz + + +def test_create_and_get_domain( + signoz: SigNoz, + create_user_admin: Operation, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], +): + admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD) + + # Get domains which should be an empty list + response = requests.get( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.OK + assert response.json()["status"] == "success" + data = response.json()["data"] + assert len(data) == 0 + + # Create a domain with google auth config + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + json={ + "name": "domain-google.integration.test", + "config": { + "ssoEnabled": True, + "ssoType": "google_auth", + "googleAuthConfig": { + "clientId": "client-id", + "clientSecret": "client-secret", + "redirectURI": "redirect-uri", + }, + }, + }, + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.CREATED + + # Create a domain with saml config + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + json={ + "name": "domain-saml.integration.test", + "config": { + "ssoEnabled": True, + "ssoType": "saml", + "samlConfig": { + "samlEntity": "saml-entity", + "samlIdp": "saml-idp", + "samlCert": "saml-cert", + }, + }, + }, + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.CREATED + + # List the domains + response = requests.get( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.OK + assert response.json()["status"] == "success" + data = response.json()["data"] + assert len(data) == 2 + assert data[0]["name"] == "domain-google.integration.test" + assert data[0]["ssoType"] == "google_auth" + assert data[1]["name"] == "domain-saml.integration.test" + assert data[1]["ssoType"] == "saml" + + +def test_create_invalid( + signoz: SigNoz, + create_user_admin: Operation, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], +): + admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD) + + # Create a domain with type saml and body for oidc, this should fail because oidcConfig is not allowed for saml + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + json={ + "name": "domain.integration.test", + "config": { + "ssoEnabled": True, + "ssoType": "saml", + "oidcConfig": { + "clientId": "client-id", + "clientSecret": "client-secret", + "issuer": "issuer", + }, + }, + }, + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.BAD_REQUEST + + # Create a domain with invalid name + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + json={ + "name": "$%^invalid", + "config": { + "ssoEnabled": True, + "ssoType": "saml", + "samlConfig": { + "samlEntity": "saml-entity", + "samlIdp": "saml-idp", + "samlCert": "saml-cert", + }, + }, + }, + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.BAD_REQUEST + + # Create a domain with no name + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + json={ + "config": { + "ssoEnabled": True, + "ssoType": "saml", + "samlConfig": { + "samlEntity": "saml-entity", + "samlIdp": "saml-idp", + "samlCert": "saml-cert", + }, + } + }, + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.BAD_REQUEST + + # Create a domain with no config + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + json={ + "name": "domain.integration.test", + }, + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.BAD_REQUEST diff --git a/tests/integration/src/callbackauthn/b_saml.py b/tests/integration/src/callbackauthn/b_saml.py new file mode 100644 index 0000000000..b3c056a796 --- /dev/null +++ b/tests/integration/src/callbackauthn/b_saml.py @@ -0,0 +1,108 @@ +from http import HTTPStatus +from typing import Callable, List + +import requests +from selenium import webdriver +from wiremock.resources.mappings import Mapping + +from fixtures.auth import ( + USER_ADMIN_EMAIL, + USER_ADMIN_PASSWORD, + add_license, +) +from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP + + +def test_apply_license( + signoz: SigNoz, + create_user_admin: Operation, # pylint: disable=unused-argument + make_http_mocks: Callable[[TestContainerDocker, List[Mapping]], None], + get_token: Callable[[str, str], str], +) -> None: + add_license(signoz, make_http_mocks, get_token) + + +def test_create_auth_domain( + signoz: SigNoz, + idp: TestContainerIDP, # pylint: disable=unused-argument + create_saml_client: Callable[[str, str], None], + get_saml_settings: Callable[[], dict], + create_user_admin: Callable[[], None], # pylint: disable=unused-argument + get_token: Callable[[str, str], str], +) -> None: + # Create a saml client in the idp. + create_saml_client("saml.integration.test", "/api/v1/complete/saml") + + # Get the saml settings from keycloak. + settings = get_saml_settings() + + # Create a auth domain in signoz. + admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD) + + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + json={ + "name": "saml.integration.test", + "config": { + "ssoEnabled": True, + "ssoType": "saml", + "samlConfig": { + "samlEntity": settings["entityID"], + "samlIdp": settings["singleSignOnServiceLocation"], + "samlCert": settings["certificate"], + }, + }, + }, + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.CREATED + + +def test_saml_authn( + signoz: SigNoz, + idp: TestContainerIDP, # pylint: disable=unused-argument + driver: webdriver.Chrome, + create_user_idp: Callable[[str, str], None], + idp_login: Callable[[str, str], None], + get_token: Callable[[str, str], str], + get_session_context: Callable[[str], str], +) -> None: + # Create a user in the idp. + create_user_idp("viewer@saml.integration.test", "password", True) + + # Get the session context from signoz which will give the SAML login URL. + session_context = get_session_context("viewer@saml.integration.test") + + assert len(session_context["orgs"]) == 1 + assert len(session_context["orgs"][0]["authNSupport"]["callback"]) == 1 + + url = session_context["orgs"][0]["authNSupport"]["callback"][0]["url"] + + driver.get(url) + idp_login("viewer@saml.integration.test", "password") + + admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD) + + # Assert that the user was created in signoz. + response = requests.get( + signoz.self.host_configs["8080"].get("/api/v1/user"), + timeout=2, + headers={"Authorization": f"Bearer {admin_token}"}, + ) + + assert response.status_code == HTTPStatus.OK + + user_response = response.json()["data"] + found_user = next( + ( + user + for user in user_response + if user["email"] == "viewer@saml.integration.test" + ), + None, + ) + + assert found_user is not None + assert found_user["role"] == "VIEWER" diff --git a/tests/integration/src/callbackauthn/c_oidc.py b/tests/integration/src/callbackauthn/c_oidc.py new file mode 100644 index 0000000000..556f594ac8 --- /dev/null +++ b/tests/integration/src/callbackauthn/c_oidc.py @@ -0,0 +1,129 @@ +from http import HTTPStatus +from typing import Callable, List +from urllib.parse import urlparse + +import requests +from selenium import webdriver +from wiremock.resources.mappings import Mapping + +from fixtures.auth import ( + USER_ADMIN_EMAIL, + USER_ADMIN_PASSWORD, + add_license, +) +from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP + + +def test_apply_license( + signoz: SigNoz, + create_user_admin: Operation, # pylint: disable=unused-argument + make_http_mocks: Callable[[TestContainerDocker, List[Mapping]], None], + get_token: Callable[[str, str], str], +) -> None: + """ + This applies a license to the signoz instance. + """ + add_license(signoz, make_http_mocks, get_token) + + +def test_create_auth_domain( + signoz: SigNoz, + idp: TestContainerIDP, # pylint: disable=unused-argument + create_oidc_client: Callable[[str, str], None], + get_oidc_settings: Callable[[], dict], + create_user_admin: Callable[[], None], # pylint: disable=unused-argument + get_token: Callable[[str, str], str], +) -> None: + """ + This creates an OIDC auth domain in signoz. + """ + client_id = f"oidc.integration.test.{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}" + # Create a saml client in the idp. + create_oidc_client(client_id, "/api/v1/complete/oidc") + + # Get the saml settings from keycloak. + settings = get_oidc_settings(client_id) + + # Create a auth domain in signoz. + admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD) + + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/domains"), + json={ + "name": "oidc.integration.test", + "config": { + "ssoEnabled": True, + "ssoType": "oidc", + "oidcConfig": { + "clientId": settings["client_id"], + "clientSecret": settings["client_secret"], + # Change the hostname of the issuer to the internal resolvable hostname of the idp + "issuer": f"{idp.container.container_configs['6060'].get(urlparse(settings["issuer"]).path)}", + "issuerAlias": settings["issuer"], + "getUserInfo": True, + }, + }, + }, + headers={"Authorization": f"Bearer {admin_token}"}, + timeout=2, + ) + + assert response.status_code == HTTPStatus.CREATED + + +def test_oidc_authn( + signoz: SigNoz, + idp: TestContainerIDP, # pylint: disable=unused-argument + driver: webdriver.Chrome, + create_user_idp: Callable[[str, str, bool], None], + idp_login: Callable[[str, str], None], + get_token: Callable[[str, str], str], + get_session_context: Callable[[str], str], +) -> None: + """ + This tests the OIDC authn flow. + It uses a web browser to login to the idp and then asserts that the user was created in signoz. + """ + # Create a user in the idp. + create_user_idp("viewer@oidc.integration.test", "password123", True) + + # Get the session context from signoz which will give the OIDC login URL. + session_context = get_session_context("viewer@oidc.integration.test") + + assert len(session_context["orgs"]) == 1 + assert len(session_context["orgs"][0]["authNSupport"]["callback"]) == 1 + + url = session_context["orgs"][0]["authNSupport"]["callback"][0]["url"] + + # change the url to the external resolvable hostname of the idp + parsed_url = urlparse(url) + actual_url = ( + f"{idp.container.host_configs['6060'].get(parsed_url.path)}?{parsed_url.query}" + ) + + driver.get(actual_url) + idp_login("viewer@oidc.integration.test", "password123") + + admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD) + + # Assert that the user was created in signoz. + response = requests.get( + signoz.self.host_configs["8080"].get("/api/v1/user"), + timeout=2, + headers={"Authorization": f"Bearer {admin_token}"}, + ) + + assert response.status_code == HTTPStatus.OK + + user_response = response.json()["data"] + found_user = next( + ( + user + for user in user_response + if user["email"] == "viewer@oidc.integration.test" + ), + None, + ) + + assert found_user is not None + assert found_user["role"] == "VIEWER" diff --git a/tests/integration/src/passwordauthn/__init__.py b/tests/integration/src/passwordauthn/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/src/auth/a_register.py b/tests/integration/src/passwordauthn/a_register.py similarity index 78% rename from tests/integration/src/auth/a_register.py rename to tests/integration/src/passwordauthn/a_register.py index 0402964d36..93f444e19d 100644 --- a/tests/integration/src/auth/a_register.py +++ b/tests/integration/src/passwordauthn/a_register.py @@ -1,4 +1,5 @@ from http import HTTPStatus +from typing import Callable import requests @@ -8,7 +9,12 @@ from fixtures.logger import setup_logger logger = setup_logger(__name__) -def test_register_with_invalid_password(signoz: types.SigNoz) -> None: +def test_register_with_invalid_input(signoz: types.SigNoz) -> None: + """ + Test the register endpoint with invalid input. + 1. Invalid Password + 2. Invalid Email + """ response = requests.post( signoz.self.host_configs["8080"].get("/api/v1/register"), json={ @@ -16,7 +22,21 @@ def test_register_with_invalid_password(signoz: types.SigNoz) -> None: "orgId": "", "orgName": "integration.test", "email": "admin@integration.test", - "password": "password", + "password": "password", # invalid password + }, + timeout=2, + ) + + assert response.status_code == HTTPStatus.BAD_REQUEST + + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/register"), + json={ + "name": "admin", + "orgId": "", + "orgName": "integration.test", + "email": "admin", # invalid email + "password": "password123Z$", }, timeout=2, ) @@ -24,7 +44,7 @@ def test_register_with_invalid_password(signoz: types.SigNoz) -> None: assert response.status_code == HTTPStatus.BAD_REQUEST -def test_register(signoz: types.SigNoz, get_jwt_token) -> None: +def test_register(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None: response = requests.get( signoz.self.host_configs["8080"].get("/api/v1/version"), timeout=2 ) @@ -52,7 +72,7 @@ def test_register(signoz: types.SigNoz, get_jwt_token) -> None: assert response.status_code == HTTPStatus.OK assert response.json()["setupCompleted"] is True - admin_token = get_jwt_token("admin@integration.test", "password123Z$") + admin_token = get_token("admin@integration.test", "password123Z$") response = requests.get( signoz.self.host_configs["8080"].get("/api/v1/user"), @@ -81,14 +101,16 @@ def test_register(signoz: types.SigNoz, get_jwt_token) -> None: assert response.json()["data"]["role"] == "ADMIN" -def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None: +def test_invite_and_register( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +) -> None: # Generate an invite token for the editor user response = requests.post( signoz.self.host_configs["8080"].get("/api/v1/invite"), - json={"email": "editor@integration.test", "role": "EDITOR"}, + json={"email": "editor@integration.test", "role": "EDITOR", "name": "editor"}, timeout=2, headers={ - "Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password123Z$")}" + "Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}" }, ) @@ -98,7 +120,7 @@ def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None: signoz.self.host_configs["8080"].get("/api/v1/invite"), timeout=2, headers={ - "Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password123Z$")}" + "Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}" }, ) @@ -122,7 +144,7 @@ def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None: }, timeout=2, ) - assert response.status_code == HTTPStatus.OK + assert response.status_code == HTTPStatus.CREATED # Verify that the invite token has been deleted response = requests.get( @@ -137,7 +159,7 @@ def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None: signoz.self.host_configs["8080"].get("/api/v1/user"), timeout=2, headers={ - "Authorization": f"Bearer {get_jwt_token("editor@integration.test", "password123Z$")}" + "Authorization": f"Bearer {get_token("editor@integration.test", "password123Z$")}" }, ) @@ -148,7 +170,7 @@ def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None: signoz.self.host_configs["8080"].get("/api/v1/user"), timeout=2, headers={ - "Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password123Z$")}" + "Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}" }, ) @@ -166,8 +188,10 @@ def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None: assert found_user["email"] == "editor@integration.test" -def test_revoke_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None: - admin_token = get_jwt_token("admin@integration.test", "password123Z$") +def test_revoke_invite_and_register( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +) -> None: + admin_token = get_token("admin@integration.test", "password123Z$") # Generate an invite token for the viewer user response = requests.post( signoz.self.host_configs["8080"].get("/api/v1/invite"), @@ -182,7 +206,7 @@ def test_revoke_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None signoz.self.host_configs["8080"].get("/api/v1/invite"), timeout=2, headers={ - "Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password123Z$")}" + "Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}" }, ) @@ -218,8 +242,10 @@ def test_revoke_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None assert response.status_code in (HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND) -def test_self_access(signoz: types.SigNoz, get_jwt_token) -> None: - admin_token = get_jwt_token("admin@integration.test", "password123Z$") +def test_self_access( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +) -> None: + admin_token = get_token("admin@integration.test", "password123Z$") response = requests.get( signoz.self.host_configs["8080"].get("/api/v1/user"), diff --git a/tests/integration/src/auth/b_license.py b/tests/integration/src/passwordauthn/b_license.py similarity index 86% rename from tests/integration/src/auth/b_license.py rename to tests/integration/src/passwordauthn/b_license.py index ac148c0064..61b475c999 100644 --- a/tests/integration/src/auth/b_license.py +++ b/tests/integration/src/passwordauthn/b_license.py @@ -1,5 +1,6 @@ import http import json +from typing import Callable, List import requests from sqlalchemy import sql @@ -11,10 +12,14 @@ from wiremock.client import ( WireMockMatchers, ) -from fixtures.types import SigNoz +from fixtures import types -def test_apply_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: +def test_apply_license( + signoz: types.SigNoz, + make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None], + get_token: Callable[[str, str], str], +) -> None: make_http_mocks( signoz.zeus, [ @@ -53,7 +58,7 @@ def test_apply_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: ], ) - access_token = get_jwt_token("admin@integration.test", "password123Z$") + access_token = get_token("admin@integration.test", "password123Z$") response = requests.post( url=signoz.self.host_configs["8080"].get("/api/v3/licenses"), @@ -73,7 +78,11 @@ def test_apply_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: assert response.json()["count"] == 1 -def test_refresh_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: +def test_refresh_license( + signoz: types.SigNoz, + make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None], + get_token: Callable[[str, str], str], +) -> None: make_http_mocks( signoz.zeus, [ @@ -112,7 +121,7 @@ def test_refresh_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None ], ) - access_token = get_jwt_token("admin@integration.test", "password123Z$") + access_token = get_token("admin@integration.test", "password123Z$") response = requests.put( url=signoz.self.host_configs["8080"].get("/api/v3/licenses"), @@ -139,7 +148,11 @@ def test_refresh_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None assert response.json()["count"] == 1 -def test_license_checkout(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: +def test_license_checkout( + signoz: types.SigNoz, + make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None], + get_token: Callable[[str, str], str], +) -> None: make_http_mocks( signoz.zeus, [ @@ -165,7 +178,7 @@ def test_license_checkout(signoz: SigNoz, make_http_mocks, get_jwt_token) -> Non ], ) - access_token = get_jwt_token("admin@integration.test", "password123Z$") + access_token = get_token("admin@integration.test", "password123Z$") response = requests.post( url=signoz.self.host_configs["8080"].get("/api/v1/checkout"), @@ -186,7 +199,11 @@ def test_license_checkout(signoz: SigNoz, make_http_mocks, get_jwt_token) -> Non assert response.json()["count"] == 1 -def test_license_portal(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: +def test_license_portal( + signoz: types.SigNoz, + make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None], + get_token: Callable[[str, str], str], +) -> None: make_http_mocks( signoz.zeus, [ @@ -212,7 +229,7 @@ def test_license_portal(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: ], ) - access_token = get_jwt_token("admin@integration.test", "password123Z$") + access_token = get_token("admin@integration.test", "password123Z$") response = requests.post( url=signoz.self.host_configs["8080"].get("/api/v1/portal"), diff --git a/tests/integration/src/auth/c_apikey.py b/tests/integration/src/passwordauthn/c_apikey.py similarity index 89% rename from tests/integration/src/auth/c_apikey.py rename to tests/integration/src/passwordauthn/c_apikey.py index 35fecaa9ce..6656079fca 100644 --- a/tests/integration/src/auth/c_apikey.py +++ b/tests/integration/src/passwordauthn/c_apikey.py @@ -1,12 +1,13 @@ from http import HTTPStatus +from typing import Callable import requests from fixtures import types -def test_api_key(signoz: types.SigNoz, get_jwt_token) -> None: - admin_token = get_jwt_token("admin@integration.test", "password123Z$") +def test_api_key(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None: + admin_token = get_token("admin@integration.test", "password123Z$") response = requests.post( signoz.self.host_configs["8080"].get("/api/v1/pats"), diff --git a/tests/integration/src/auth/d_password.py b/tests/integration/src/passwordauthn/d_password.py similarity index 87% rename from tests/integration/src/auth/d_password.py rename to tests/integration/src/passwordauthn/d_password.py index 1c6c2e57af..1b87b235ad 100644 --- a/tests/integration/src/auth/d_password.py +++ b/tests/integration/src/passwordauthn/d_password.py @@ -1,4 +1,5 @@ from http import HTTPStatus +from typing import Callable import requests from sqlalchemy import sql @@ -9,8 +10,10 @@ from fixtures.logger import setup_logger logger = setup_logger(__name__) -def test_change_password(signoz: types.SigNoz, get_jwt_token) -> None: - admin_token = get_jwt_token("admin@integration.test", "password123Z$") +def test_change_password( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +) -> None: + admin_token = get_token("admin@integration.test", "password123Z$") # Create another admin user response = requests.post( @@ -62,7 +65,7 @@ def test_change_password(signoz: types.SigNoz, get_jwt_token) -> None: timeout=2, ) - assert response.status_code == HTTPStatus.OK + assert response.status_code == HTTPStatus.CREATED # Get the user id response = requests.get( @@ -84,7 +87,7 @@ def test_change_password(signoz: types.SigNoz, get_jwt_token) -> None: ) # Try logging in with the password - token = get_jwt_token("admin+password@integration.test", "password123Z$") + token = get_token("admin+password@integration.test", "password123Z$") assert token is not None # Try changing the password with a bad old password which should fail @@ -120,12 +123,14 @@ def test_change_password(signoz: types.SigNoz, get_jwt_token) -> None: assert response.status_code == HTTPStatus.NO_CONTENT # Try logging in with the new password - token = get_jwt_token("admin+password@integration.test", "password123Znew$") + token = get_token("admin+password@integration.test", "password123Znew$") assert token is not None -def test_reset_password(signoz: types.SigNoz, get_jwt_token) -> None: - admin_token = get_jwt_token("admin@integration.test", "password123Z$") +def test_reset_password( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +) -> None: + admin_token = get_token("admin@integration.test", "password123Z$") # Get the user id for admin+password@integration.test response = requests.get( @@ -176,12 +181,14 @@ def test_reset_password(signoz: types.SigNoz, get_jwt_token) -> None: assert response.status_code == HTTPStatus.NO_CONTENT - token = get_jwt_token("admin+password@integration.test", "password123Z$NEWNEW#!") + token = get_token("admin+password@integration.test", "password123Z$NEWNEW#!") assert token is not None -def test_reset_password_with_no_password(signoz: types.SigNoz, get_jwt_token) -> None: - admin_token = get_jwt_token("admin@integration.test", "password123Z$") +def test_reset_password_with_no_password( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +) -> None: + admin_token = get_token("admin@integration.test", "password123Z$") # Get the user id for admin+password@integration.test response = requests.get( @@ -231,5 +238,5 @@ def test_reset_password_with_no_password(signoz: types.SigNoz, get_jwt_token) -> assert response.status_code == HTTPStatus.NO_CONTENT - token = get_jwt_token("admin+password@integration.test", "FINALPASSword123!#[") + token = get_token("admin+password@integration.test", "FINALPASSword123!#[") assert token is not None diff --git a/tests/integration/src/querier/a_logs.py b/tests/integration/src/querier/a_logs.py index 4f19e69095..b661c11fe1 100644 --- a/tests/integration/src/querier/a_logs.py +++ b/tests/integration/src/querier/a_logs.py @@ -12,7 +12,7 @@ from fixtures.logs import Logs def test_logs_list( signoz: types.SigNoz, create_user_admin: None, # pylint: disable=unused-argument - get_jwt_token: Callable[[], str], + get_token: Callable[[str, str], str], insert_logs: Callable[[List[Logs]], None], ) -> None: """ @@ -75,7 +75,7 @@ def test_logs_list( ] ) - token = get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) # Query Logs for the last 10 seconds and check if the logs are returned in the correct order response = requests.post( @@ -312,7 +312,7 @@ def test_logs_list( def test_logs_time_series_count( signoz: types.SigNoz, create_user_admin: None, # pylint: disable=unused-argument - get_jwt_token: Callable[[str, str], str], + get_token: Callable[[str, str], str], insert_logs: Callable[[List[Logs]], None], ) -> None: """ @@ -414,7 +414,7 @@ def test_logs_time_series_count( ) insert_logs(logs) - token = get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) # count() of all logs for the last 5 minutes response = requests.post( @@ -821,7 +821,7 @@ def test_logs_time_series_count( def test_datatype_collision( signoz: types.SigNoz, create_user_admin: None, # pylint: disable=unused-argument - get_jwt_token: Callable[[str, str], str], + get_token: Callable[[str, str], str], insert_logs: Callable[[List[Logs]], None], ) -> None: """ @@ -887,11 +887,13 @@ def test_datatype_collision( "code.function": "com.example.Integration.process", "code.line": i + 1, "telemetry.sdk.language": "go", - "http.status_code": 404, # Numeric value - "response.time": 456.78, # Numeric value + "http.status_code": 404, # Numeric value + "response.time": 456.78, # Numeric value }, body=f"Test log {i+4} with numeric values", - severity_text=severity_levels_2[i], # ERROR(17-20), FATAL(21-24), TRACE(1-4), DEBUG(5-8) + severity_text=severity_levels_2[ + i + ], # ERROR(17-20), FATAL(21-24), TRACE(1-4), DEBUG(5-8) ) ) @@ -914,8 +916,8 @@ def test_datatype_collision( "code.function": "com.example.Integration.process", "code.line": 1, "telemetry.sdk.language": "python", - "http.status_code": "", # Empty string - "response.time": 0, # Zero value + "http.status_code": "", # Empty string + "response.time": 0, # Zero value }, body="Edge case test log", severity_text="ERROR", @@ -924,7 +926,7 @@ def test_datatype_collision( insert_logs(logs) - token = get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) # count() of all logs for the where severity_number > '7' response = requests.post( @@ -1062,7 +1064,9 @@ def test_datatype_collision( "signal": "logs", "stepInterval": 60, "disabled": False, - "filter": {"expression": "severity_number = '13'"}, # String comparison with numeric field + "filter": { + "expression": "severity_number = '13'" + }, # String comparison with numeric field "having": {"expression": ""}, "aggregations": [{"expression": "count()"}], }, @@ -1115,7 +1119,9 @@ def test_datatype_collision( "signal": "logs", "stepInterval": 60, "disabled": False, - "filter": {"expression": "http.status_code = 200"}, # Numeric comparison with string field + "filter": { + "expression": "http.status_code = 200" + }, # Numeric comparison with string field "having": {"expression": ""}, "aggregations": [{"expression": "count()"}], }, @@ -1168,7 +1174,9 @@ def test_datatype_collision( "signal": "logs", "stepInterval": 60, "disabled": False, - "filter": {"expression": "http.status_code = '404'"}, # String comparison with numeric field + "filter": { + "expression": "http.status_code = '404'" + }, # String comparison with numeric field "having": {"expression": ""}, "aggregations": [{"expression": "count()"}], }, @@ -1189,7 +1197,6 @@ def test_datatype_collision( # Should return 4 logs with http.status_code = 404 (next 4 logs have numeric value 404) assert count == 4 - # Test 5: Edge case - empty string comparison response = requests.post( signoz.self.host_configs["8080"].get("/api/v5/query_range"), @@ -1222,7 +1229,9 @@ def test_datatype_collision( "signal": "logs", "stepInterval": 60, "disabled": False, - "filter": {"expression": "http.status_code = ''"}, # Empty string comparison + "filter": { + "expression": "http.status_code = ''" + }, # Empty string comparison "having": {"expression": ""}, "aggregations": [{"expression": "count()"}], }, @@ -1241,4 +1250,4 @@ def test_datatype_collision( count = results[0]["data"][0][0] # Should return 1 log with empty http.status_code (edge case log) - assert count == 1 \ No newline at end of file + assert count == 1 diff --git a/tests/integration/src/querier/b_traces.py b/tests/integration/src/querier/b_traces.py index bb586fda06..23d921ed65 100644 --- a/tests/integration/src/querier/b_traces.py +++ b/tests/integration/src/querier/b_traces.py @@ -12,7 +12,7 @@ from fixtures.traces import TraceIdGenerator, Traces, TracesKind, TracesStatusCo def test_traces_list( signoz: types.SigNoz, create_user_admin: None, # pylint: disable=unused-argument - get_jwt_token: Callable[[str, str], str], + get_token: Callable[[str, str], str], insert_traces: Callable[[List[Traces]], None], ) -> None: """ @@ -138,7 +138,7 @@ def test_traces_list( ] ) - token = get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) # Query all traces for the past 5 minutes response = requests.post( diff --git a/tests/integration/src/ttl/a_ttl.py b/tests/integration/src/ttl/a_ttl.py index 543d68d780..8f754da131 100644 --- a/tests/integration/src/ttl/a_ttl.py +++ b/tests/integration/src/ttl/a_ttl.py @@ -6,6 +6,7 @@ It verifies the correct behavior of TTL settings for traces, metrics, and logs, import time from http import HTTPStatus +from typing import Callable import pytest import requests @@ -26,7 +27,9 @@ def ttl_test_suite_setup(create_user_admin): # pylint: disable=unused-argument yield -def test_set_ttl_traces_success(signoz: types.SigNoz, get_jwt_token): +def test_set_ttl_traces_success( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +): """Test setting TTL for traces with new ttlConfig structure.""" payload = { "type": "traces", @@ -34,7 +37,7 @@ def test_set_ttl_traces_success(signoz: types.SigNoz, get_jwt_token): } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -80,7 +83,7 @@ def test_set_ttl_traces_success(signoz: types.SigNoz, get_jwt_token): assert all("toIntervalSecond(12960000)" in ttl_part for ttl_part in ttl_parts) -def test_set_ttl_traces_with_cold_storage(signoz: types.SigNoz, get_jwt_token): +def test_set_ttl_traces_with_cold_storage(signoz: types.SigNoz, get_token: Callable[[str, str], str]): """Test setting TTL for traces with cold storage configuration.""" payload = { "type": "traces", @@ -90,7 +93,7 @@ def test_set_ttl_traces_with_cold_storage(signoz: types.SigNoz, get_jwt_token): } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -106,7 +109,9 @@ def test_set_ttl_traces_with_cold_storage(signoz: types.SigNoz, get_jwt_token): assert "successfully set up" in response_data["message"].lower() -def test_set_ttl_metrics_success(signoz: types.SigNoz, get_jwt_token): +def test_set_ttl_metrics_success( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +): """Test setting TTL for metrics using the new setTTLMetrics method.""" payload = { "type": "metrics", @@ -116,7 +121,7 @@ def test_set_ttl_metrics_success(signoz: types.SigNoz, get_jwt_token): } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -163,7 +168,9 @@ def test_set_ttl_metrics_success(signoz: types.SigNoz, get_jwt_token): assert all("toIntervalSecond(7776000)" in ttl_part for ttl_part in ttl_parts) -def test_set_ttl_metrics_with_cold_storage(signoz: types.SigNoz, get_jwt_token): +def test_set_ttl_metrics_with_cold_storage( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +): """Test setting TTL for metrics with cold storage configuration.""" payload = { "type": "metrics", @@ -173,7 +180,7 @@ def test_set_ttl_metrics_with_cold_storage(signoz: types.SigNoz, get_jwt_token): } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -189,7 +196,9 @@ def test_set_ttl_metrics_with_cold_storage(signoz: types.SigNoz, get_jwt_token): assert "successfully set up" in response_data["message"].lower() -def test_set_ttl_invalid_type(signoz: types.SigNoz, get_jwt_token): +def test_set_ttl_invalid_type( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +): """Test setting TTL with invalid type returns error.""" payload = { "type": "invalid_type", @@ -199,7 +208,7 @@ def test_set_ttl_invalid_type(signoz: types.SigNoz, get_jwt_token): } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -212,7 +221,9 @@ def test_set_ttl_invalid_type(signoz: types.SigNoz, get_jwt_token): assert response.status_code == HTTPStatus.BAD_REQUEST -def test_set_custom_retention_ttl_basic(signoz: types.SigNoz, get_jwt_token): +def test_set_custom_retention_ttl_basic( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +): """Test setting custom retention TTL with basic configuration.""" payload = { "type": "logs", @@ -223,7 +234,7 @@ def test_set_custom_retention_ttl_basic(signoz: types.SigNoz, get_jwt_token): } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -283,7 +294,7 @@ def test_set_custom_retention_ttl_basic(signoz: types.SigNoz, get_jwt_token): def test_set_custom_retention_ttl_with_conditions( - signoz: types.SigNoz, get_jwt_token, insert_logs + signoz: types.SigNoz, get_token: Callable[[str, str], str], insert_logs ): """Test setting custom retention TTL with filter conditions.""" @@ -303,7 +314,7 @@ def test_set_custom_retention_ttl_with_conditions( } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -334,7 +345,7 @@ def test_set_custom_retention_ttl_with_conditions( def test_set_custom_retention_ttl_with_cold_storage( - signoz: types.SigNoz, get_jwt_token, insert_logs + signoz: types.SigNoz, get_token: Callable[[str, str], str], insert_logs ): """Test setting custom retention TTL with cold storage configuration.""" payload = { @@ -357,7 +368,7 @@ def test_set_custom_retention_ttl_with_cold_storage( insert_logs(logs) headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -375,7 +386,7 @@ def test_set_custom_retention_ttl_with_cold_storage( def test_set_custom_retention_ttl_duplicate_conditions( - signoz: types.SigNoz, get_jwt_token + signoz: types.SigNoz, get_token: Callable[[str, str], str] ): """Test that duplicate TTL conditions are rejected.""" payload = { @@ -401,7 +412,7 @@ def test_set_custom_retention_ttl_duplicate_conditions( } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -416,7 +427,7 @@ def test_set_custom_retention_ttl_duplicate_conditions( def test_set_custom_retention_ttl_invalid_condition( - signoz: types.SigNoz, get_jwt_token + signoz: types.SigNoz, get_token: Callable[[str, str], str] ): """Test that conditions with empty values are rejected.""" payload = { @@ -438,7 +449,7 @@ def test_set_custom_retention_ttl_invalid_condition( } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post( @@ -452,7 +463,9 @@ def test_set_custom_retention_ttl_invalid_condition( assert response.status_code == HTTPStatus.BAD_REQUEST -def test_get_custom_retention_ttl(signoz: types.SigNoz, get_jwt_token, insert_logs): +def test_get_custom_retention_ttl( + signoz: types.SigNoz, get_token: Callable[[str, str], str], insert_logs +): """Test getting custom retention TTL configuration.""" # First set a custom retention TTL set_payload = { @@ -475,7 +488,7 @@ def test_get_custom_retention_ttl(signoz: types.SigNoz, get_jwt_token, insert_lo insert_logs(logs) headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } set_response = requests.post( signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"), @@ -490,7 +503,7 @@ def test_get_custom_retention_ttl(signoz: types.SigNoz, get_jwt_token, insert_lo # Now get the TTL configuration headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } get_response = requests.get( @@ -513,7 +526,9 @@ def test_get_custom_retention_ttl(signoz: types.SigNoz, get_jwt_token, insert_lo ] -def test_get_ttl_traces_success(signoz: types.SigNoz, get_jwt_token): +def test_get_ttl_traces_success( + signoz: types.SigNoz, get_token: Callable[[str, str], str] +): """Test getting TTL for traces.""" # First set a TTL configuration for traces set_payload = { @@ -522,7 +537,7 @@ def test_get_ttl_traces_success(signoz: types.SigNoz, get_jwt_token): } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } set_response = requests.post( @@ -561,7 +576,9 @@ def test_get_ttl_traces_success(signoz: types.SigNoz, get_jwt_token): ) # -1 indicates no cold storage configured -def test_large_ttl_conditions_list(signoz: types.SigNoz, get_jwt_token, insert_logs): +def test_large_ttl_conditions_list( + signoz: types.SigNoz, get_token: Callable[[str, str], str], insert_logs +): """Test custom retention TTL with many conditions.""" # Create a list of many TTL conditions to test performance and limits conditions = [] @@ -588,7 +605,7 @@ def test_large_ttl_conditions_list(signoz: types.SigNoz, get_jwt_token, insert_l } headers = { - "Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" + "Authorization": f"Bearer {get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}" } response = requests.post(