Compare commits

..

11 Commits

1622 changed files with 42362 additions and 175633 deletions

View File

@@ -1,4 +1,5 @@
services: services:
clickhouse: clickhouse:
image: clickhouse/clickhouse-server:24.1.2-alpine image: clickhouse/clickhouse-server:24.1.2-alpine
container_name: clickhouse container_name: clickhouse
@@ -23,6 +24,7 @@ services:
retries: 3 retries: 3
depends_on: depends_on:
- zookeeper - zookeeper
zookeeper: zookeeper:
image: bitnami/zookeeper:3.7.1 image: bitnami/zookeeper:3.7.1
container_name: zookeeper container_name: zookeeper
@@ -39,8 +41,9 @@ services:
interval: 30s interval: 30s
timeout: 5s timeout: 5s
retries: 3 retries: 3
schema-migrator-sync: schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.111.42 image: signoz/signoz-schema-migrator:0.111.29
container_name: schema-migrator-sync container_name: schema-migrator-sync
command: command:
- sync - sync
@@ -52,8 +55,9 @@ services:
clickhouse: clickhouse:
condition: service_healthy condition: service_healthy
restart: on-failure restart: on-failure
schema-migrator-async: schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.111.42 image: signoz/signoz-schema-migrator:0.111.29
container_name: schema-migrator-async container_name: schema-migrator-async
command: command:
- async - async

View File

@@ -1,27 +0,0 @@
services:
postgres:
image: postgres:15
container_name: postgres
environment:
POSTGRES_DB: signoz
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
healthcheck:
test:
[
"CMD",
"pg_isready",
"-d",
"signoz",
"-U",
"postgres"
]
interval: 30s
timeout: 30s
retries: 3
restart: on-failure
ports:
- "127.0.0.1:5432:5432/tcp"
volumes:
- ${PWD}/fs/tmp/var/lib/postgresql/data/:/var/lib/postgresql/data/

4
.github/CODEOWNERS vendored
View File

@@ -2,7 +2,7 @@
# Owners are automatically requested for review for PRs that changes code # Owners are automatically requested for review for PRs that changes code
# that they own. # that they own.
/frontend/ @SigNoz/frontend @YounixM /frontend/ @YounixM
/frontend/src/container/MetricsApplication @srikanthccv /frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv /frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
/deploy/ @SigNoz/devops /deploy/ @SigNoz/devops
@@ -11,5 +11,3 @@
/pkg/errors/ @grandwizard28 /pkg/errors/ @grandwizard28
/pkg/factory/ @grandwizard28 /pkg/factory/ @grandwizard28
/pkg/types/ @grandwizard28 /pkg/types/ @grandwizard28
.golangci.yml @grandwizard28
**/(zeus|licensing|sqlmigration)/ @vikrantgupta25

View File

@@ -1,72 +1,17 @@
## 📄 Summary ### Summary
<!-- Describe the purpose of the PR in a few sentences. What does it fix/add/update? --> <!-- ✍️ A clear and concise description...-->
--- #### Related Issues / PR's
## ✅ Changes <!-- ✍️ Add the issues being resolved here and related PR's where applicable -->
- [ ] Feature: Brief description #### Screenshots
- [ ] Bug fix: Brief description
--- NA
## 🏷️ Required: Add Relevant Labels <!-- ✍️ Add screenshots of before and after changes where applicable-->
> ⚠️ **Manually add appropriate labels in the PR sidebar** #### Affected Areas and Manually Tested Areas
Please select one or more labels (as applicable):
ex: <!-- ✍️ Add details of blast radius and dev testing areas where applicable-->
- `frontend`
- `backend`
- `devops`
- `bug`
- `enhancement`
- `ui`
- `test`
---
## 👥 Reviewers
> Tag the relevant teams for review:
- frontend / backend / devops
---
## 🧪 How to Test
<!-- Describe how reviewers can test this PR -->
1. ...
2. ...
3. ...
---
## 🔍 Related Issues
<!-- Reference any related issues (e.g. Fixes #123, Closes #456) -->
Closes #
---
## 📸 Screenshots / Screen Recording (if applicable / mandatory for UI related changes)
<!-- Add screenshots or GIFs to help visualize changes -->
---
## 📋 Checklist
- [ ] Dev Review
- [ ] Test cases added (Unit/ Integration / E2E)
- [ ] Manually tested the changes
---
## 👀 Notes for Reviewers
<!-- Anything reviewers should keep in mind while reviewing -->

42
.github/workflows/README.md vendored Normal file
View File

@@ -0,0 +1,42 @@
# Github actions
## Testing the UI manually on each PR
First we need to make sure the UI is ready
* Check the `Start tunnel` step in `e2e-k8s/deploy-on-k3s-cluster` job and make sure you see `your url is: https://pull-<number>-signoz.loca.lt`
* This job will run until the PR is merged or closed to keep the local tunneling alive
- github will cancel this job if the PR wasn't merged after 6h
- if the job was cancel, go to the action and press `Re-run all jobs`
Now you can open your browser at https://pull-<number>-signoz.loca.lt and check the UI.
## Environment Variables
To run GitHub workflow, a few environment variables needs to add in GitHub secrets
<table>
<tr>
<th> Variables </th>
<th> Description </th>
<th> Example </th>
</tr>
<tr>
<td> REPONAME </td>
<td> Provide the DockerHub user/organisation name of the image. </td>
<td> signoz</td>
</tr>
<tr>
<td> DOCKERHUB_USERNAME </td>
<td> Docker hub username </td>
<td> signoz</td>
</tr>
<tr>
<td> DOCKERHUB_TOKEN </td>
<td> Docker hub password/token with push permission </td>
<td> **** </td>
</tr>
<tr>
<td> SONAR_TOKEN </td>
<td> <a href="https://sonarcloud.io">SonarCloud</a> token </td>
<td> **** </td>
</tr>

View File

@@ -62,7 +62,6 @@ jobs:
secrets: inherit secrets: inherit
with: with:
PRIMUS_REF: main PRIMUS_REF: main
GO_VERSION: 1.23
GO_NAME: signoz-community GO_NAME: signoz-community
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }} GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build GO_INPUT_ARTIFACT_PATH: frontend/build
@@ -74,8 +73,7 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.variant=community -X github.com/SigNoz/signoz/pkg/version.variant=community
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }} -X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }} -X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }} -X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
GO_CGO_ENABLED: 1 GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}' DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch

View File

@@ -67,8 +67,8 @@ jobs:
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
- name: cache-dotenv - name: cache-dotenv
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
@@ -93,7 +93,6 @@ jobs:
secrets: inherit secrets: inherit
with: with:
PRIMUS_REF: main PRIMUS_REF: main
GO_VERSION: 1.23
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }} GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service GO_BUILD_CONTEXT: ./ee/query-service
@@ -105,11 +104,8 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }} -X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }} -X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }} -X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1 -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
GO_CGO_ENABLED: 1 GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}' DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch

View File

@@ -64,10 +64,8 @@ jobs:
run: | run: |
mkdir -p frontend mkdir -p frontend
echo 'CI=1' > frontend/.env echo 'CI=1' > frontend/.env
echo 'TUNNEL_URL="${{ secrets.NP_TUNNEL_URL }}"' >> frontend/.env echo 'TUNNEL_URL=https://telemetry.staging.signoz.cloud/tunnel' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env echo 'TUNNEL_DOMAIN=https://telemetry.staging.signoz.cloud' >> frontend/.env
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
- name: cache-dotenv - name: cache-dotenv
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
@@ -92,7 +90,6 @@ jobs:
secrets: inherit secrets: inherit
with: with:
PRIMUS_REF: main PRIMUS_REF: main
GO_VERSION: 1.23
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }} GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service GO_BUILD_CONTEXT: ./ee/query-service
@@ -104,11 +101,8 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }} -X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }} -X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }} -X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1 -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
GO_CGO_ENABLED: 1 GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}' DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch

View File

@@ -18,7 +18,6 @@ jobs:
with: with:
PRIMUS_REF: main PRIMUS_REF: main
GO_TEST_CONTEXT: ./... GO_TEST_CONTEXT: ./...
GO_VERSION: 1.23
fmt: fmt:
if: | if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) || (github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -27,7 +26,6 @@ jobs:
secrets: inherit secrets: inherit
with: with:
PRIMUS_REF: main PRIMUS_REF: main
GO_VERSION: 1.23
lint: lint:
if: | if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) || (github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -36,16 +34,6 @@ jobs:
secrets: inherit secrets: inherit
with: with:
PRIMUS_REF: main PRIMUS_REF: main
GO_VERSION: 1.23
deps:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
uses: signoz/primus.workflows/.github/workflows/go-deps.yaml@main
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
build: build:
if: | if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) || (github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -57,7 +45,7 @@ jobs:
- name: go-install - name: go-install
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.23" go-version: "1.22"
- name: qemu-install - name: qemu-install
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v3
- name: aarch64-install - name: aarch64-install

View File

@@ -58,7 +58,7 @@ jobs:
- name: setup-go - name: setup-go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.23" go-version: "1.22"
- name: cross-compilation-tools - name: cross-compilation-tools
if: matrix.os == 'ubuntu-latest' if: matrix.os == 'ubuntu-latest'
run: | run: |
@@ -122,7 +122,7 @@ jobs:
- name: setup-go - name: setup-go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.23" go-version: "1.22"
# copy the caches from build # copy the caches from build
- name: get-sha - name: get-sha

View File

@@ -33,8 +33,8 @@ jobs:
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> .env echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> .env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> .env echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> .env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> .env
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> .env
- name: build-frontend - name: build-frontend
run: make js-build run: make js-build
- name: upload-frontend-artifact - name: upload-frontend-artifact
@@ -72,7 +72,7 @@ jobs:
- name: setup-go - name: setup-go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.23" go-version: "1.22"
- name: cross-compilation-tools - name: cross-compilation-tools
if: matrix.os == 'ubuntu-latest' if: matrix.os == 'ubuntu-latest'
run: | run: |
@@ -135,7 +135,7 @@ jobs:
- name: setup-go - name: setup-go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.23" go-version: "1.22"
# copy the caches from build # copy the caches from build
- name: get-sha - name: get-sha

View File

@@ -44,8 +44,10 @@ jobs:
- name: run - name: run
run: | run: |
cd tests/integration && \ cd tests/integration && \
poetry run pytest \ poetry run pytest -ra \
--basetemp=./tmp/ \ --basetemp=./tmp/ \
-vv \
--capture=no \
src/${{matrix.src}} \ src/${{matrix.src}} \
--sqlstore-provider ${{matrix.sqlstore-provider}} \ --sqlstore-provider ${{matrix.sqlstore-provider}} \
--postgres-version ${{matrix.postgres-version}} \ --postgres-version ${{matrix.postgres-version}} \

16
.github/workflows/remove-label.yaml vendored Normal file
View File

@@ -0,0 +1,16 @@
name: remove-label
on:
pull_request_target:
types: [synchronize]
jobs:
remove:
runs-on: ubuntu-latest
steps:
- name: Remove label testing-deploy from PR
uses: buildsville/add-remove-label@v2.0.0
with:
label: testing-deploy
type: remove
token: ${{ secrets.GITHUB_TOKEN }}

3
.gitignore vendored
View File

@@ -60,13 +60,14 @@ ee/query-service/db
e2e/node_modules/ e2e/node_modules/
e2e/test-results/ e2e/test-results/
e2e/playwright-report/
e2e/blob-report/ e2e/blob-report/
e2e/playwright/.cache/
e2e/.auth e2e/.auth
# go # go
vendor/ vendor/
**/main/** **/main/**
__debug_bin**
# git-town # git-town
.git-branches.toml .git-branches.toml

View File

@@ -1,33 +0,0 @@
linters:
default: standard
enable:
- bodyclose
- misspell
- nilnil
- sloglint
- depguard
- iface
linters-settings:
sloglint:
no-mixed-args: true
kv-only: true
no-global: all
context: all
static-msg: true
msg-style: lowercased
key-naming-case: snake
depguard:
rules:
nozap:
deny:
- pkg: "go.uber.org/zap"
desc: "Do not use zap logger. Use slog instead."
iface:
enable:
- identical
issues:
exclude-dirs:
- "pkg/query-service"
- "ee/query-service"
- "scripts/"

View File

@@ -14,9 +14,9 @@ ARCHS ?= amd64 arm64
TARGET_DIR ?= $(shell pwd)/target TARGET_DIR ?= $(shell pwd)/target
ZEUS_URL ?= https://api.signoz.cloud ZEUS_URL ?= https://api.signoz.cloud
GO_BUILD_LDFLAG_ZEUS_URL = -X github.com/SigNoz/signoz/ee/zeus.url=$(ZEUS_URL) GO_BUILD_LDFLAG_ZEUS_URL = -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=$(ZEUS_URL)
LICENSE_URL ?= https://license.signoz.io LICENSE_URL ?= https://license.signoz.io/api/v1
GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO = -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=$(LICENSE_URL) GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO = -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=$(LICENSE_URL)
GO_BUILD_VERSION_LDFLAGS = -X github.com/SigNoz/signoz/pkg/version.version=$(VERSION) -X github.com/SigNoz/signoz/pkg/version.hash=$(COMMIT_SHORT_SHA) -X github.com/SigNoz/signoz/pkg/version.time=$(TIMESTAMP) -X github.com/SigNoz/signoz/pkg/version.branch=$(BRANCH_NAME) GO_BUILD_VERSION_LDFLAGS = -X github.com/SigNoz/signoz/pkg/version.version=$(VERSION) -X github.com/SigNoz/signoz/pkg/version.hash=$(COMMIT_SHORT_SHA) -X github.com/SigNoz/signoz/pkg/version.time=$(TIMESTAMP) -X github.com/SigNoz/signoz/pkg/version.branch=$(BRANCH_NAME)
GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS)) GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))
@@ -56,11 +56,6 @@ devenv-clickhouse: ## Run clickhouse in devenv
@cd .devenv/docker/clickhouse; \ @cd .devenv/docker/clickhouse; \
docker compose -f compose.yaml up -d docker compose -f compose.yaml up -d
.PHONY: devenv-postgres
devenv-postgres: ## Run postgres in devenv
@cd .devenv/docker/postgres; \
docker compose -f compose.yaml up -d
############################################################## ##############################################################
# go commands # go commands
############################################################## ##############################################################
@@ -76,7 +71,9 @@ go-run-enterprise: ## Runs the enterprise go backend server
go run -race \ go run -race \
$(GO_BUILD_CONTEXT_ENTERPRISE)/main.go \ $(GO_BUILD_CONTEXT_ENTERPRISE)/main.go \
--config ./conf/prometheus.yml \ --config ./conf/prometheus.yml \
--cluster cluster --cluster cluster \
--use-logs-new-schema true \
--use-trace-new-schema true
.PHONY: go-test .PHONY: go-test
go-test: ## Runs go unit tests go-test: ## Runs go unit tests
@@ -94,7 +91,9 @@ go-run-community: ## Runs the community go backend server
go run -race \ go run -race \
$(GO_BUILD_CONTEXT_COMMUNITY)/main.go \ $(GO_BUILD_CONTEXT_COMMUNITY)/main.go \
--config ./conf/prometheus.yml \ --config ./conf/prometheus.yml \
--cluster cluster --cluster cluster \
--use-logs-new-schema true \
--use-trace-new-schema true
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY) .PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
go-build-community: ## Builds the go backend server for community go-build-community: ## Builds the go backend server for community

View File

@@ -8,7 +8,7 @@
<p align="center">All your logs, metrics, and traces in one place. Monitor your application, spot issues before they occur and troubleshoot downtime quickly with rich context. SigNoz is a cost-effective open-source alternative to Datadog and New Relic. Visit <a href="https://signoz.io" target="_blank">signoz.io</a> for the full documentation, tutorials, and guide.</p> <p align="center">All your logs, metrics, and traces in one place. Monitor your application, spot issues before they occur and troubleshoot downtime quickly with rich context. SigNoz is a cost-effective open-source alternative to Datadog and New Relic. Visit <a href="https://signoz.io" target="_blank">signoz.io</a> for the full documentation, tutorials, and guide.</p>
<p align="center"> <p align="center">
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/signoz.svg?label=Docker%20Downloads"> </a> <img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/query-service?label=Docker Downloads"> </a>
<img alt="GitHub issues" src="https://img.shields.io/github/issues/signoz/signoz"> </a> <img alt="GitHub issues" src="https://img.shields.io/github/issues/signoz/signoz"> </a>
<a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNozHQ&hashtags=opensource,signoz,observability"> <a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNozHQ&hashtags=opensource,signoz,observability">
<img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a> <img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a>

View File

@@ -50,7 +50,7 @@ cache:
# Time-to-live for cache entries in memory. Specify the duration in ns # Time-to-live for cache entries in memory. Specify the duration in ns
ttl: 60000000000 ttl: 60000000000
# The interval at which the cache will be cleaned up # The interval at which the cache will be cleaned up
cleanup_interval: 1m cleanupInterval: 1m
# redis: Uses Redis as the caching backend. # redis: Uses Redis as the caching backend.
redis: redis:
# The hostname or IP address of the Redis server. # The hostname or IP address of the Redis server.
@@ -103,13 +103,6 @@ telemetrystore:
clickhouse: clickhouse:
# The DSN to use for clickhouse. # The DSN to use for clickhouse.
dsn: tcp://localhost:9000 dsn: tcp://localhost:9000
# The query settings for clickhouse.
settings:
max_execution_time: 0
max_execution_time_leaf: 0
timeout_before_checking_execution_speed: 0
max_bytes_to_read: 0
max_result_rows_for_ch_query: 0
##################### Prometheus ##################### ##################### Prometheus #####################
prometheus: prometheus:
@@ -164,63 +157,3 @@ alertmanager:
maintenance_interval: 15m maintenance_interval: 15m
# Retention of the notification logs. # Retention of the notification logs.
retention: 120h retention: 120h
##################### Emailing #####################
emailing:
# Whether to enable emailing.
enabled: false
templates:
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
directory: /opt/signoz/conf/templates/email
smtp:
# The SMTP server address.
address: localhost:25
# The email address to use for the SMTP server.
from:
# The hello message to use for the SMTP server.
hello:
# The static headers to send with the email.
headers: {}
auth:
# The username to use for the SMTP server.
username:
# The password to use for the SMTP server.
password:
# The secret to use for the SMTP server.
secret:
# The identity to use for the SMTP server.
identity:
tls:
# Whether to enable TLS. It should be false in most cases since the authentication mechanism should use the STARTTLS extension instead.
enabled: false
# Whether to skip TLS verification.
insecure_skip_verify: false
# The path to the CA file.
ca_file_path:
# The path to the key file.
key_file_path:
# The path to the certificate file.
cert_file_path:
##################### Sharder (experimental) #####################
sharder:
# Specifies the sharder provider to use.
provider: noop
single:
# The org id to which this instance belongs to.
org_id: org_id
##################### Analytics #####################
analytics:
# Whether to enable analytics.
enabled: false
segment:
# The key to use for segment.
key: ""
##################### StatsReporter #####################
statsreporter:
# Whether to enable stats reporter. This is used to provide valuable insights to the SigNoz team. It does not collect any sensitive/PII data.
enabled: true
# The interval at which the stats are collected.
interval: 6h

View File

@@ -174,9 +174,11 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz: signoz:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz:v0.87.0 image: signoz/signoz:v0.79.1
command: command:
- --config=/root/config/prometheus.yml - --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
- --use-trace-new-schema=true
ports: ports:
- "8080:8080" # signoz port - "8080:8080" # signoz port
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
@@ -206,7 +208,7 @@ services:
retries: 3 retries: 3
otel-collector: otel-collector:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.42 image: signoz/signoz-otel-collector:v0.111.39
command: command:
- --config=/etc/otel-collector-config.yaml - --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml - --manager-config=/etc/manager-config.yaml
@@ -230,7 +232,7 @@ services:
- signoz - signoz
schema-migrator: schema-migrator:
!!merge <<: *common !!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.42 image: signoz/signoz-schema-migrator:v0.111.39
deploy: deploy:
restart_policy: restart_policy:
condition: on-failure condition: on-failure

View File

@@ -110,9 +110,11 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz: signoz:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz:v0.87.0 image: signoz/signoz:v0.79.1
command: command:
- --config=/root/config/prometheus.yml - --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
- --use-trace-new-schema=true
ports: ports:
- "8080:8080" # signoz port - "8080:8080" # signoz port
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
@@ -141,7 +143,7 @@ services:
retries: 3 retries: 3
otel-collector: otel-collector:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.42 image: signoz/signoz-otel-collector:v0.111.39
command: command:
- --config=/etc/otel-collector-config.yaml - --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml - --manager-config=/etc/manager-config.yaml
@@ -165,7 +167,7 @@ services:
- signoz - signoz
schema-migrator: schema-migrator:
!!merge <<: *common !!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.42 image: signoz/signoz-schema-migrator:v0.111.39
deploy: deploy:
restart_policy: restart_policy:
condition: on-failure condition: on-failure

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system] detectors: [env, system]
timeout: 2s timeout: 2s
signozspanmetrics/delta: signozspanmetrics/delta:
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics metrics_exporter: clickhousemetricswrite
metrics_flush_interval: 60s metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000 dimensions_cache_size: 100000
@@ -64,10 +64,8 @@ exporters:
endpoint: tcp://clickhouse:9000/signoz_metrics endpoint: tcp://clickhouse:9000/signoz_metrics
resource_to_telemetry_conversion: resource_to_telemetry_conversion:
enabled: true enabled: true
disable_v2: true
clickhousemetricswrite/prometheus: clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics: signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter: clickhouselogsexporter:

View File

@@ -177,10 +177,12 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz: signoz:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.87.0} image: signoz/signoz:${VERSION:-v0.79.1}
container_name: signoz container_name: signoz
command: command:
- --config=/root/config/prometheus.yml - --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
- --use-trace-new-schema=true
ports: ports:
- "8080:8080" # signoz port - "8080:8080" # signoz port
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
@@ -210,7 +212,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing? # TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector: otel-collector:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42} image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
container_name: signoz-otel-collector container_name: signoz-otel-collector
command: command:
- --config=/etc/otel-collector-config.yaml - --config=/etc/otel-collector-config.yaml
@@ -236,7 +238,7 @@ services:
condition: service_healthy condition: service_healthy
schema-migrator-sync: schema-migrator-sync:
!!merge <<: *common !!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-sync container_name: schema-migrator-sync
command: command:
- sync - sync
@@ -247,7 +249,7 @@ services:
condition: service_healthy condition: service_healthy
schema-migrator-async: schema-migrator-async:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-async container_name: schema-migrator-async
command: command:
- async - async

View File

@@ -110,10 +110,12 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz: signoz:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.87.0} image: signoz/signoz:${VERSION:-v0.79.1}
container_name: signoz container_name: signoz
command: command:
- --config=/root/config/prometheus.yml - --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
- --use-trace-new-schema=true
ports: ports:
- "8080:8080" # signoz port - "8080:8080" # signoz port
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
@@ -142,7 +144,7 @@ services:
retries: 3 retries: 3
otel-collector: otel-collector:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42} image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
container_name: signoz-otel-collector container_name: signoz-otel-collector
command: command:
- --config=/etc/otel-collector-config.yaml - --config=/etc/otel-collector-config.yaml
@@ -164,7 +166,7 @@ services:
condition: service_healthy condition: service_healthy
schema-migrator-sync: schema-migrator-sync:
!!merge <<: *common !!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-sync container_name: schema-migrator-sync
command: command:
- sync - sync
@@ -176,7 +178,7 @@ services:
restart: on-failure restart: on-failure
schema-migrator-async: schema-migrator-async:
!!merge <<: *db-depend !!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-async container_name: schema-migrator-async
command: command:
- async - async

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system] detectors: [env, system]
timeout: 2s timeout: 2s
signozspanmetrics/delta: signozspanmetrics/delta:
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics metrics_exporter: clickhousemetricswrite
metrics_flush_interval: 60s metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000 dimensions_cache_size: 100000
@@ -62,12 +62,10 @@ exporters:
use_new_schema: true use_new_schema: true
clickhousemetricswrite: clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
resource_to_telemetry_conversion: resource_to_telemetry_conversion:
enabled: true enabled: true
clickhousemetricswrite/prometheus: clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics: signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter: clickhouselogsexporter:

View File

@@ -93,7 +93,7 @@ check_os() {
;; ;;
Red\ Hat*) Red\ Hat*)
desired_os=1 desired_os=1
os="rhel" os="red hat"
package_manager="yum" package_manager="yum"
;; ;;
CentOS*) CentOS*)

View File

@@ -1,51 +0,0 @@
# Endpoint
This guide outlines the recommended approach for designing endpoints, with a focus on entity relationships, RESTful structure, and examples from the codebase.
## How do we design an endpoint?
### Understand the core entities and their relationships
Start with understanding the core entities and their relationships. For example:
- **Organization**: an organization can have multiple users
### Structure Endpoints RESTfully
Endpoints should reflect the resource hierarchy and follow RESTful conventions. Use clear, **pluralized resource names** and versioning. For example:
- `POST /v1/organizations` — Create an organization
- `GET /v1/organizations/:id` — Get an organization by id
- `DELETE /v1/organizations/:id` — Delete an organization by id
- `PUT /v1/organizations/:id` — Update an organization by id
- `GET /v1/organizations/:id/users` — Get all users in an organization
- `GET /v1/organizations/me/users` — Get all users in my organization
Think in terms of resource navigation in a file system. For example, to find your organization, you would navigate to the root of the file system and then to the `organizations` directory. To find a user in an organization, you would navigate to the `organizations` directory and then to the `id` directory.
```bash
v1/
├── organizations/
│ └── 123/
│ └── users/
```
`me` endpoints are special. They are used to determine the actual id via some auth/external mechanism. For `me` endpoints, think of the `me` directory being symlinked to your organization directory. For example, if you are a part of the organization `123`, the `me` directory will be symlinked to `/v1/organizations/123`:
```bash
v1/
├── organizations/
│ └── me/ -> symlink to /v1/organizations/123
│ └── users/
│ └── 123/
│ └── users/
```
> 💡 **Note**: There are various ways to structure endpoints. Some prefer to use singular resource names instead of `me`. Others prefer to use singular resource names for all endpoints. We have, however, chosen to standardize our endpoints in the manner described above.
## What should I remember?
- Use clear, **plural resource names**
- Use `me` endpoints for determining the actual id via some auth mechanism
> 💡 **Note**: When in doubt, diagram the relationships and walk through the user flows as if navigating a file system. This will help you design endpoints that are both logical and user-friendly.

View File

@@ -1,103 +0,0 @@
# Errors
SigNoz includes its own structured [errors](/pkg/errors/errors.go) package. It's built on top of Go's `error` interface, extending it to add additional context that helps provide more meaningful error messages throughout the application.
## How to use it?
To use the SigNoz structured errors package, use these functions instead of the standard library alternatives:
```go
// Instead of errors.New()
errors.New(typ, code, message)
// Instead of fmt.Errorf()
errors.Newf(typ, code, message, args...)
```
### Typ
The Typ (read as Type, defined as `typ`) is used to categorize errors across the codebase and is loosely coupled with HTTP/GRPC status codes. All predefined types can be found in [pkg/errors/type.go](/pkg/errors/type.go). For example:
- `TypeInvalidInput` - Indicates invalid input was provided
- `TypeNotFound` - Indicates a resource was not found
By design, `typ` is unexported and cannot be declared outside of [errors](/pkg/errors/errors.go) package. This ensures that it is consistent across the codebase and is used in a way that is meaningful.
### Code
Codes are used to provide more granular categorization within types. For instance, a type of `TypeInvalidInput` might have codes like `CodeInvalidEmail` or `CodeInvalidPassword`.
To create new error codes, use the `errors.MustNewCode` function:
```go
var (
CodeThingAlreadyExists = errors.MustNewCode("thing_already_exists")
CodeThingNotFound = errors.MustNewCode("thing_not_found")
)
```
> 💡 **Note**: Error codes must match the regex `^[a-z_]+$` otherwise the code will panic.
## Show me some examples
### Using the error
A basic example of using the error:
```go
var (
CodeThingAlreadyExists = errors.MustNewCode("thing_already_exists")
)
func CreateThing(id string) error {
t, err := thing.GetFromStore(id)
if err != nil {
if errors.As(err, errors.TypeNotFound) {
// thing was not found, create it
return thing.Create(id)
}
// something else went wrong, wrap the error with more context
return errors.Wrapf(err, errors.TypeInternal, errors.CodeUnknown, "failed to get thing from store")
}
return errors.Newf(errors.TypeAlreadyExists, CodeThingAlreadyExists, "thing with id %s already exists", id)
}
```
### Changing the error
Sometimes you may want to change the error while preserving the message:
```go
func GetUserSecurely(id string) (*User, error) {
user, err := repository.GetUser(id)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
// Convert NotFound to Forbidden for security reasons
return nil, errors.New(errors.TypeForbidden, errors.CodeAccessDenied, "access denied to requested resource")
}
return nil, err
}
return user, nil
}
```
## Why do we need this?
In a large codebase like SigNoz, error handling is critical for maintaining reliability, debuggability, and a good user experience. We believe that it is the **responsibility of a function** to return **well-defined** errors that **accurately describe what went wrong**. With our structured error system:
- Functions can create precise errors with appropriate additional context
- Callers can make informed decisions based on the additional context
- Error context is preserved and enhanced as it moves up the call stack
The caller (which can be another function or a HTTP/gRPC handler or something else entirely), can then choose to use this error to take appropriate actions such as:
- A function can branch into different paths based on the context
- An HTTP/gRPC handler can derive the correct status code and message from the error and send it to the client
- Logging systems can capture structured error information for better diagnostics
Although there might be cases where this might seem too verbose, it makes the code more maintainable and consistent. A little verbose code is better than clever code that doesn't provide enough context.
## What should I remember?
- Think about error handling as you write your code, not as an afterthought.
- Always use the [errors](/pkg/errors/errors.go) package instead of the standard library's `errors.New()` or `fmt.Errorf()`.
- Always assign appropriate codes to errors when creating them instead of using the "catch all" error codes defined in [pkg/errors/code.go](/pkg/errors/code.go).
- Use `errors.Wrapf()` to add context to errors while preserving the original when appropriate.

View File

@@ -1,106 +0,0 @@
# Provider
SigNoz is built on the provider pattern, a design approach where code is organized into providers that handle specific application responsibilities. Providers act as adapter components that integrate with external services and deliver required functionality to the application.
> 💡 **Note**: Coming from a DDD background? Providers are similar (not exactly the same) to adapter/infrastructure services.
## How to create a new provider?
To create a new provider, create a directory in the `pkg/` directory named after your provider. The provider package consists of four key components:
- **Interface** (`pkg/<name>/<name>.go`): Defines the provider's interface. Other packages should import this interface to use the provider.
- **Config** (`pkg/<name>/config.go`): Contains provider configuration, implementing the `factory.Config` interface from [factory/config.go](/pkg/factory/config.go).
- **Implementation** (`pkg/<name>/<implname><name>/provider.go`): Contains the provider implementation, including a `NewProvider` function that returns a `factory.Provider` interface from [factory/provider.go](/pkg/factory/provider.go).
- **Mock** (`pkg/<name>/<name>test.go`): Provides mocks for the provider, typically used by dependent packages for unit testing.
For example, the [prometheus](/pkg/prometheus) provider delivers a prometheus engine to the application:
- `pkg/prometheus/prometheus.go` - Interface definition
- `pkg/prometheus/config.go` - Configuration
- `pkg/prometheus/clickhouseprometheus/provider.go` - Clickhouse-powered implementation
- `pkg/prometheus/prometheustest/provider.go` - Mock implementation
## How to wire it up?
The `pkg/signoz` package contains the inversion of control container responsible for wiring providers. It handles instantiation, configuration, and assembly of providers based on configuration metadata.
> 💡 **Note**: Coming from a Java background? Providers are similar to Spring beans.
Wiring up a provider involves three steps:
1. Wiring up the configuration
Add your config from `pkg/<name>/config.go` to the `pkg/signoz/config.Config` struct and in new factories:
```go
type Config struct {
...
MyProvider myprovider.Config `mapstructure:"myprovider"`
...
}
func NewConfig(ctx context.Context, resolverConfig config.ResolverConfig, ....) (Config, error) {
...
configFactories := []factory.ConfigFactory{
myprovider.NewConfigFactory(),
}
...
}
```
2. Wiring up the provider
Add available provider implementations in `pkg/signoz/provider.go`:
```go
func NewMyProviderFactories() factory.NamedMap[factory.ProviderFactory[myprovider.MyProvider, myprovider.Config]] {
return factory.MustNewNamedMap(
myproviderone.NewFactory(),
myprovidertwo.NewFactory(),
)
}
```
3. Instantiate the provider by adding it to the `SigNoz` struct in `pkg/signoz/signoz.go`:
```go
type SigNoz struct {
...
MyProvider myprovider.MyProvider
...
}
func New(...) (*SigNoz, error) {
...
myprovider, err := myproviderone.New(ctx, settings, config.MyProvider, "one/two")
if err != nil {
return nil, err
}
...
}
```
## How to use it?
To use a provider, import its interface. For example, to use the prometheus provider, import `pkg/prometheus/prometheus.go`:
```go
import "github.com/SigNoz/signoz/pkg/prometheus/prometheus"
func CreateSomething(ctx context.Context, prometheus prometheus.Prometheus) {
...
prometheus.DoSomething()
...
}
```
## Why do we need this?
Like any dependency injection framework, providers decouple the codebase from implementation details. This is especially valuable in SigNoz's large codebase, where we need to swap implementations without changing dependent code. The provider pattern offers several benefits apart from the obvious one of decoupling:
- Configuration is **defined with each provider and centralized in one place**, making it easier to understand and manage through various methods (environment variables, config files, etc.)
- Provider mocking is **straightforward for unit testing**, with a consistent pattern for locating mocks
- **Multiple implementations** of the same provider are **supported**, as demonstrated by our sqlstore provider
## What should I remember?
- Use the provider pattern wherever applicable.
- Always create a provider **irrespective of the number of implementations**. This makes it easier to add new implementations in the future.

View File

@@ -1,11 +0,0 @@
# Go
This document provides an overview of contributing to the SigNoz backend written in Go. The SigNoz backend is built with Go, focusing on performance, maintainability, and developer experience. We strive for clean, idiomatic code that follows established Go practices while addressing the unique needs of an observability platform.
We adhere to three primary style guides as our foundation:
- [Effective Go](https://go.dev/doc/effective_go) - For writing idiomatic Go code
- [Code Review Comments](https://go.dev/wiki/CodeReviewComments) - For understanding common comments in code reviews
- [Google Style Guide](https://google.github.io/styleguide/go/) - Additional practices from Google
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development. In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package.

View File

@@ -1,94 +0,0 @@
# SQL
SigNoz utilizes a relational database to store metadata including organization information, user data and other settings.
## How to use it?
The database interface is defined in [SQLStore](/pkg/sqlstore/sqlstore.go). SigNoz leverages the Bun ORM to interact with the underlying database. To access the database instance, use the `BunDBCtx` function. For operations that require transactions across multiple database operations, use the `RunInTxCtx` function. This function embeds a transaction in the context, which propagates through various functions in the callback.
```go
type Thing struct {
bun.BaseModel
ID types.Identifiable `bun:",embed"`
SomeColumn string `bun:"some_column"`
TimeAuditable types.TimeAuditable `bun:",embed"`
OrgID string `bun:"org_id"`
}
func GetThing(ctx context.Context, id string) (*Thing, error) {
thing := new(Thing)
err := sqlstore.
BunDBCtx(ctx).
NewSelect().
Model(thing).
Where("id = ?", id).
Scan(ctx)
return thing, err
}
func CreateThing(ctx context.Context, thing *Thing) error {
return sqlstore.
BunDBCtx(ctx).
NewInsert().
Model(thing).
Exec(ctx)
}
```
> 💡 **Note**: Always use line breaks while working with SQL queries to enhance code readability.
> 💡 **Note**: Always use the `new` function to create new instances of structs.
## What are hooks?
Hooks are user-defined functions that execute before and/or after specific database operations. These hooks are particularly useful for generating telemetry data such as logs, traces, and metrics, providing visibility into database interactions. Hooks are defined in the [SQLStoreHook](/pkg/sqlstore/sqlstore.go) interface.
## How is the schema designed?
SigNoz implements a star schema design with the organizations table as the central entity. All other tables link to the organizations table via foreign key constraints on the `org_id` column. This design ensures that every entity within the system is either directly or indirectly associated with an organization.
```mermaid
erDiagram
ORGANIZATIONS {
string id PK
timestamp created_at
timestamp updated_at
}
ENTITY_A {
string id PK
timestamp created_at
timestamp updated_at
string org_id FK
}
ENTITY_B {
string id PK
timestamp created_at
timestamp updated_at
string org_id FK
}
ORGANIZATIONS ||--o{ ENTITY_A : contains
ORGANIZATIONS ||--o{ ENTITY_B : contains
```
> 💡 **Note**: There are rare exceptions to the above star schema design. Consult with the maintainers before deviating from the above design.
All tables follow a consistent primary key pattern using a `id` column (referenced by the `types.Identifiable` struct) and include `created_at` and `updated_at` columns (referenced by the `types.TimeAuditable` struct) for audit purposes.
## How to write migrations?
For schema migrations, use the [SQLMigration](/pkg/sqlmigration/sqlmigration.go) interface and write the migration in the same package. When creating migrations, adhere to these guidelines:
- Do not implement **`ON CASCADE` foreign key constraints**. Deletion operations should be handled explicitly in application logic rather than delegated to the database.
- Do not **import types from the types package** in the `sqlmigration` package. Instead, define the required types within the migration package itself. This practice ensures migration stability as the core types evolve over time.
- Do not implement **`Down` migrations**. As the codebase matures, we may introduce this capability, but for now, the `Down` function should remain empty.
- Always write **idempotent** migrations. This means that if the migration is run multiple times, it should not cause an error.
- A migration which is **dependent on the underlying dialect** (sqlite, postgres, etc) should be written as part of the [SQLDialect](/pkg/sqlstore/sqlstore.go) interface. The implementation needs to go in the dialect specific package of the respective database.
## What should I remember?
- Use `BunDBCtx` and `RunInTxCtx` to access the database instance and execute transactions respectively.
- While designing new tables, ensure the consistency of `id`, `created_at`, `updated_at` and an `org_id` column with a foreign key constraint to the `organizations` table (unless the table serves as a transitive entity not directly associated with an organization but indirectly associated with one).
- Implement deletion logic in the application rather than relying on cascading deletes in the database.
- While writing migrations, adhere to the guidelines mentioned above.

85
ee/http/middleware/pat.go Normal file
View File

@@ -0,0 +1,85 @@
package middleware
import (
"net/http"
"time"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"go.uber.org/zap"
)
type Pat struct {
store sqlstore.SQLStore
uuid *authtypes.UUID
headers []string
}
func NewPat(store sqlstore.SQLStore, headers []string) *Pat {
return &Pat{store: store, uuid: authtypes.NewUUID(), headers: headers}
}
func (p *Pat) Wrap(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
var values []string
var patToken string
var pat eeTypes.StorablePersonalAccessToken
for _, header := range p.headers {
values = append(values, r.Header.Get(header))
}
ctx, err := p.uuid.ContextFromRequest(r.Context(), values...)
if err != nil {
next.ServeHTTP(w, r)
return
}
patToken, ok := authtypes.UUIDFromContext(ctx)
if !ok {
next.ServeHTTP(w, r)
return
}
err = p.store.BunDB().NewSelect().Model(&pat).Where("token = ?", patToken).Scan(r.Context())
if err != nil {
next.ServeHTTP(w, r)
return
}
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
next.ServeHTTP(w, r)
return
}
// get user from db
user := types.User{}
err = p.store.BunDB().NewSelect().Model(&user).Where("id = ?", pat.UserID).Scan(r.Context())
if err != nil {
next.ServeHTTP(w, r)
return
}
jwt := authtypes.Claims{
UserID: user.ID,
GroupID: user.GroupID,
Email: user.Email,
OrgID: user.OrgID,
}
ctx = authtypes.NewContextWithClaims(ctx, jwt)
r = r.WithContext(ctx)
next.ServeHTTP(w, r)
pat.LastUsed = time.Now().Unix()
_, err = p.store.BunDB().NewUpdate().Model(&pat).Column("last_used").Where("token = ?", patToken).Where("revoked = false").Exec(r.Context())
if err != nil {
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
}
})
}

View File

@@ -1,26 +0,0 @@
package licensing
import (
"fmt"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/licensing"
)
var (
config licensing.Config
once sync.Once
)
// initializes the licensing configuration
func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
once.Do(func() {
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
if err := config.Validate(); err != nil {
panic(fmt.Errorf("invalid licensing config: %w", err))
}
})
return config
}

View File

@@ -1,168 +0,0 @@
package httplicensing
import (
"context"
"encoding/json"
"net/http"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type licensingAPI struct {
licensing licensing.Licensing
}
func NewLicensingAPI(licensing licensing.Licensing) licensing.API {
return &licensingAPI{licensing: licensing}
}
func (api *licensingAPI) Activate(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
req := new(licensetypes.PostableLicense)
err = json.NewDecoder(r.Body).Decode(&req)
if err != nil {
render.Error(rw, err)
return
}
err = api.licensing.Activate(r.Context(), orgID, req.Key)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusAccepted, nil)
}
func (api *licensingAPI) GetActive(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
license, err := api.licensing.GetActive(r.Context(), orgID)
if err != nil {
render.Error(rw, err)
return
}
gettableLicense := licensetypes.NewGettableLicense(license.Data, license.Key)
render.Success(rw, http.StatusOK, gettableLicense)
}
func (api *licensingAPI) Refresh(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
err = api.licensing.Refresh(r.Context(), orgID)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusNoContent, nil)
}
func (api *licensingAPI) Checkout(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
req := new(licensetypes.PostableSubscription)
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
render.Error(rw, err)
return
}
gettableSubscription, err := api.licensing.Checkout(ctx, orgID, req)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusCreated, gettableSubscription)
}
func (api *licensingAPI) Portal(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
req := new(licensetypes.PostableSubscription)
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
render.Error(rw, err)
return
}
gettableSubscription, err := api.licensing.Portal(ctx, orgID, req)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusCreated, gettableSubscription)
}

View File

@@ -1,226 +0,0 @@
package httplicensing
import (
"context"
"encoding/json"
"time"
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
type provider struct {
store licensetypes.Store
zeus zeus.Zeus
config licensing.Config
settings factory.ScopedProviderSettings
orgGetter organization.Getter
stopChan chan struct{}
}
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
return New(ctx, providerSettings, config, store, zeus, orgGetter)
})
}
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) (licensing.Licensing, error) {
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing")
licensestore := sqllicensingstore.New(sqlstore)
return &provider{
store: licensestore,
zeus: zeus,
config: config,
settings: settings,
orgGetter: orgGetter,
stopChan: make(chan struct{}),
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
tick := time.NewTicker(provider.config.PollInterval)
defer tick.Stop()
err := provider.Validate(ctx)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
}
for {
select {
case <-provider.stopChan:
return nil
case <-tick.C:
err := provider.Validate(ctx)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
}
}
}
}
func (provider *provider) Stop(ctx context.Context) error {
provider.settings.Logger().DebugContext(ctx, "license validation stopped")
close(provider.stopChan)
return nil
}
func (provider *provider) Validate(ctx context.Context) error {
organizations, err := provider.orgGetter.ListByOwnedKeyRange(ctx)
if err != nil {
return err
}
for _, organization := range organizations {
err := provider.Refresh(ctx, organization.ID)
if err != nil {
return err
}
}
return nil
}
func (provider *provider) Activate(ctx context.Context, organizationID valuer.UUID, key string) error {
data, err := provider.zeus.GetLicense(ctx, key)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to fetch license data with upstream server")
}
license, err := licensetypes.NewLicense(data, organizationID)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity")
}
storableLicense := licensetypes.NewStorableLicenseFromLicense(license)
err = provider.store.Create(ctx, storableLicense)
if err != nil {
return err
}
return nil
}
func (provider *provider) GetActive(ctx context.Context, organizationID valuer.UUID) (*licensetypes.License, error) {
storableLicenses, err := provider.store.GetAll(ctx, organizationID)
if err != nil {
return nil, err
}
activeLicense, err := licensetypes.GetActiveLicenseFromStorableLicenses(storableLicenses, organizationID)
if err != nil {
return nil, err
}
return activeLicense, nil
}
func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUID) error {
activeLicense, err := provider.GetActive(ctx, organizationID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil
}
provider.settings.Logger().ErrorContext(ctx, "license validation failed", "org_id", organizationID.StringValue())
return err
}
data, err := provider.zeus.GetLicense(ctx, activeLicense.Key)
if err != nil {
if time.Since(activeLicense.LastValidatedAt) > time.Duration(provider.config.FailureThreshold)*provider.config.PollInterval {
activeLicense.UpdateFeatures(licensetypes.BasicPlan)
updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense)
err = provider.store.Update(ctx, organizationID, updatedStorableLicense)
if err != nil {
return err
}
return nil
}
return err
}
err = activeLicense.Update(data)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity from license data")
}
updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense)
err = provider.store.Update(ctx, organizationID, updatedStorableLicense)
if err != nil {
return err
}
return nil
}
func (provider *provider) Checkout(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) {
activeLicense, err := provider.GetActive(ctx, organizationID)
if err != nil {
return nil, err
}
body, err := json.Marshal(postableSubscription)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal checkout payload")
}
response, err := provider.zeus.GetCheckoutURL(ctx, activeLicense.Key, body)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate checkout session")
}
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
}
func (provider *provider) Portal(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) {
activeLicense, err := provider.GetActive(ctx, organizationID)
if err != nil {
return nil, err
}
body, err := json.Marshal(postableSubscription)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal portal payload")
}
response, err := provider.zeus.GetPortalURL(ctx, activeLicense.Key, body)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate portal session")
}
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
}
func (provider *provider) GetFeatureFlags(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.Feature, error) {
license, err := provider.GetActive(ctx, organizationID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return licensetypes.BasicPlan, nil
}
return nil, err
}
return license.Features, nil
}
func (provider *provider) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
activeLicense, err := provider.GetActive(ctx, orgID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return map[string]any{}, nil
}
return nil, err
}
return licensetypes.NewStatsFromLicense(activeLicense), nil
}

View File

@@ -1,81 +0,0 @@
package sqllicensingstore
import (
"context"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type store struct {
sqlstore sqlstore.SQLStore
}
func New(sqlstore sqlstore.SQLStore) licensetypes.Store {
return &store{sqlstore}
}
func (store *store) Create(ctx context.Context, storableLicense *licensetypes.StorableLicense) error {
_, err := store.
sqlstore.
BunDB().
NewInsert().
Model(storableLicense).
Exec(ctx)
if err != nil {
return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "license with ID: %s already exists", storableLicense.ID)
}
return nil
}
func (store *store) Get(ctx context.Context, organizationID valuer.UUID, licenseID valuer.UUID) (*licensetypes.StorableLicense, error) {
storableLicense := new(licensetypes.StorableLicense)
err := store.
sqlstore.
BunDB().
NewSelect().
Model(storableLicense).
Where("org_id = ?", organizationID).
Where("id = ?", licenseID).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "license with ID: %s does not exist", licenseID)
}
return storableLicense, nil
}
func (store *store) GetAll(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.StorableLicense, error) {
storableLicenses := make([]*licensetypes.StorableLicense, 0)
err := store.
sqlstore.
BunDB().
NewSelect().
Model(&storableLicenses).
Where("org_id = ?", organizationID).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "licenses for organizationID: %s does not exists", organizationID)
}
return storableLicenses, nil
}
func (store *store) Update(ctx context.Context, organizationID valuer.UUID, storableLicense *licensetypes.StorableLicense) error {
_, err := store.
sqlstore.
BunDB().
NewUpdate().
Model(storableLicense).
WherePK().
Where("org_id = ?", organizationID).
Exec(ctx)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to update license with ID: %s", storableLicense.ID)
}
return nil
}

View File

@@ -35,11 +35,8 @@ builds:
- -X github.com/SigNoz/signoz/pkg/version.hash={{ .ShortCommit }} - -X github.com/SigNoz/signoz/pkg/version.hash={{ .ShortCommit }}
- -X github.com/SigNoz/signoz/pkg/version.time={{ .CommitTimestamp }} - -X github.com/SigNoz/signoz/pkg/version.time={{ .CommitTimestamp }}
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }} - -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud - -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1 - -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
- >- - >-
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }} {{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
mod_timestamp: "{{ .CommitTimestamp }}" mod_timestamp: "{{ .CommitTimestamp }}"

View File

@@ -1,4 +1,4 @@
FROM golang:1.23-bullseye FROM golang:1.22-bullseye
ARG OS="linux" ARG OS="linux"
ARG TARGETARCH ARG TARGETARCH

View File

@@ -5,7 +5,6 @@ import (
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2" querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder" "github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/valuer"
) )
type DailyProvider struct { type DailyProvider struct {
@@ -38,7 +37,7 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
return dp return dp
} }
func (p *DailyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) { func (p *DailyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
req.Seasonality = SeasonalityDaily req.Seasonality = SeasonalityDaily
return p.getAnomalies(ctx, orgID, req) return p.getAnomalies(ctx, req)
} }

View File

@@ -5,7 +5,6 @@ import (
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2" querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder" "github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/valuer"
) )
type HourlyProvider struct { type HourlyProvider struct {
@@ -38,7 +37,7 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
return hp return hp
} }
func (p *HourlyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) { func (p *HourlyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
req.Seasonality = SeasonalityHourly req.Seasonality = SeasonalityHourly
return p.getAnomalies(ctx, orgID, req) return p.getAnomalies(ctx, req)
} }

View File

@@ -2,10 +2,8 @@ package anomaly
import ( import (
"context" "context"
"github.com/SigNoz/signoz/pkg/valuer"
) )
type Provider interface { type Provider interface {
GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
} }

View File

@@ -5,12 +5,11 @@ import (
"math" "math"
"time" "time"
"github.com/SigNoz/signoz/pkg/cache" "github.com/SigNoz/signoz/pkg/query-service/cache"
"github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/postprocess" "github.com/SigNoz/signoz/pkg/query-service/postprocess"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels" "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap" "go.uber.org/zap"
) )
@@ -60,9 +59,9 @@ func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomaly
return prepareAnomalyQueryParams(req.Params, req.Seasonality) return prepareAnomalyQueryParams(req.Params, req.Seasonality)
} }
func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID, params *anomalyQueryParams) (*anomalyQueryResults, error) { func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQueryParams) (*anomalyQueryResults, error) {
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery)) zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery) currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -73,7 +72,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
} }
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery)) zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery) pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -84,7 +83,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
} }
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery)) zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery) currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -95,7 +94,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
} }
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery)) zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery) pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -106,7 +105,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
} }
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery)) zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery) past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past2SeasonQuery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -117,7 +116,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
} }
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery)) zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery) past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past3SeasonQuery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -336,9 +335,9 @@ func (p *BaseSeasonalProvider) getAnomalyScores(
return anomalyScoreSeries return anomalyScoreSeries
} }
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) { func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
anomalyParams := p.getQueryParams(req) anomalyParams := p.getQueryParams(req)
anomalyQueryResults, err := p.getResults(ctx, orgID, anomalyParams) anomalyQueryResults, err := p.getResults(ctx, anomalyParams)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -5,7 +5,6 @@ import (
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2" querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder" "github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/valuer"
) )
type WeeklyProvider struct { type WeeklyProvider struct {
@@ -37,7 +36,7 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
return wp return wp
} }
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) { func (p *WeeklyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
req.Seasonality = SeasonalityWeekly req.Seasonality = SeasonalityWeekly
return p.getAnomalies(ctx, orgID, req) return p.getAnomalies(ctx, req)
} }

View File

@@ -5,34 +5,43 @@ import (
"net/http/httputil" "net/http/httputil"
"time" "time"
"github.com/SigNoz/signoz/ee/licensing/httplicensing" "github.com/SigNoz/signoz/ee/query-service/dao"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway" "github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/interfaces" "github.com/SigNoz/signoz/ee/query-service/interfaces"
"github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/usage" "github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager" "github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields" "github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/http/middleware" "github.com/SigNoz/signoz/pkg/modules/preference"
querierAPI "github.com/SigNoz/signoz/pkg/querier" preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app" baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations" "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations" "github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline" "github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/cache"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model" basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules" rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
"github.com/SigNoz/signoz/pkg/version" "github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux" "github.com/gorilla/mux"
) )
type APIHandlerOptions struct { type APIHandlerOptions struct {
DataConnector interfaces.DataConnector DataConnector interfaces.DataConnector
SkipConfig *basemodel.SkipConfig
PreferSpanMetrics bool PreferSpanMetrics bool
AppDao dao.ModelDao
RulesManager *rules.Manager RulesManager *rules.Manager
UsageManager *usage.Manager UsageManager *usage.Manager
FeatureFlags baseint.FeatureLookup
LicenseManager *license.Manager
IntegrationsController *integrations.Controller IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Cache cache.Cache
Gateway *httputil.ReverseProxy Gateway *httputil.ReverseProxy
GatewayUrl string GatewayUrl string
// Querier Influx Interval // Querier Influx Interval
@@ -49,19 +58,26 @@ type APIHandler struct {
// NewAPIHandler returns an APIHandler // NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) { func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(signoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{ baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector, Reader: opts.DataConnector,
SkipConfig: opts.SkipConfig,
PreferSpanMetrics: opts.PreferSpanMetrics, PreferSpanMetrics: opts.PreferSpanMetrics,
AppDao: opts.AppDao,
RuleManager: opts.RulesManager, RuleManager: opts.RulesManager,
FeatureFlags: opts.FeatureFlags,
IntegrationsController: opts.IntegrationsController, IntegrationsController: opts.IntegrationsController,
CloudIntegrationsController: opts.CloudIntegrationsController, CloudIntegrationsController: opts.CloudIntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController, LogsParsingPipelineController: opts.LogsParsingPipelineController,
Cache: opts.Cache,
FluxInterval: opts.FluxInterval, FluxInterval: opts.FluxInterval,
UseLogsNewSchema: opts.UseLogsNewSchema,
UseTraceNewSchema: opts.UseTraceNewSchema,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager), AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing), FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz, Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Querier), Preference: preference,
}) })
if err != nil { if err != nil {
@@ -75,39 +91,98 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
return ah, nil return ah, nil
} }
func (ah *APIHandler) FF() baseint.FeatureLookup {
return ah.opts.FeatureFlags
}
func (ah *APIHandler) RM() *rules.Manager { func (ah *APIHandler) RM() *rules.Manager {
return ah.opts.RulesManager return ah.opts.RulesManager
} }
func (ah *APIHandler) LM() *license.Manager {
return ah.opts.LicenseManager
}
func (ah *APIHandler) UM() *usage.Manager { func (ah *APIHandler) UM() *usage.Manager {
return ah.opts.UsageManager return ah.opts.UsageManager
} }
func (ah *APIHandler) AppDao() dao.ModelDao {
return ah.opts.AppDao
}
func (ah *APIHandler) Gateway() *httputil.ReverseProxy { func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
return ah.opts.Gateway return ah.opts.Gateway
} }
func (ah *APIHandler) CheckFeature(f string) bool {
err := ah.FF().CheckFeature(f)
return err == nil
}
// RegisterRoutes registers routes for this handler on the given router // RegisterRoutes registers routes for this handler on the given router
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddleware) {
// note: add ee override methods first // note: add ee override methods first
// routes available only in ee version // routes available only in ee version
router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/featureFlags",
am.OpenAccess(ah.getFeatureFlags)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/loginPrecheck",
am.OpenAccess(ah.precheckLogin)).
Methods(http.MethodGet)
// paid plans specific routes // paid plans specific routes
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost) router.HandleFunc("/api/v1/complete/saml",
am.OpenAccess(ah.receiveSAML)).
Methods(http.MethodPost)
router.HandleFunc("/api/v1/complete/google",
am.OpenAccess(ah.receiveGoogleAuth)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/orgs/{orgId}/domains",
am.AdminAccess(ah.listDomainsByOrg)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/domains",
am.AdminAccess(ah.postDomain)).
Methods(http.MethodPost)
router.HandleFunc("/api/v1/domains/{id}",
am.AdminAccess(ah.putDomain)).
Methods(http.MethodPut)
router.HandleFunc("/api/v1/domains/{id}",
am.AdminAccess(ah.deleteDomain)).
Methods(http.MethodDelete)
// base overrides // base overrides
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet) router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.LicensingAPI.Checkout)).Methods(http.MethodPost) // PAT APIs
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.getPATs)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.updatePAT)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.revokePAT)).Methods(http.MethodDelete)
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet) router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost) router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.portalSession)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/dashboards/{uuid}/lock", am.EditAccess(ah.lockDashboard)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/dashboards/{uuid}/unlock", am.EditAccess(ah.unlockDashboard)).Methods(http.MethodPut)
// v3 // v3
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost) router.HandleFunc("/api/v3/licenses", am.ViewAccess(ah.listLicensesV3)).Methods(http.MethodGet)
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut) router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.applyLicenseV3)).Methods(http.MethodPost)
router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.LicensingAPI.GetActive)).Methods(http.MethodGet) router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.refreshLicensesV3)).Methods(http.MethodPut)
router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.getActiveLicenseV3)).Methods(http.MethodGet)
// v4 // v4
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost) router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
@@ -119,7 +194,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
} }
func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) { func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *baseapp.AuthMiddleware) {
ah.APIHandler.RegisterCloudIntegrationsRoutes(router, am) ah.APIHandler.RegisterCloudIntegrationsRoutes(router, am)

View File

@@ -3,16 +3,192 @@ package api
import ( import (
"context" "context"
"encoding/base64" "encoding/base64"
"encoding/json"
"fmt" "fmt"
"io"
"net/http" "net/http"
"net/url" "net/url"
"github.com/gorilla/mux"
"go.uber.org/zap" "go.uber.org/zap"
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/ee/query-service/model"
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
) )
func parseRequest(r *http.Request, req interface{}) error {
defer r.Body.Close()
requestBody, err := io.ReadAll(r.Body)
if err != nil {
return err
}
err = json.Unmarshal(requestBody, &req)
return err
}
// loginUser overrides base handler and considers SSO case.
func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
req := basemodel.LoginRequest{}
err := parseRequest(r, &req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
ctx := context.Background()
if req.Email != "" && ah.CheckFeature(model.SSO) {
var apierr basemodel.BaseApiError
_, apierr = ah.AppDao().CanUsePassword(ctx, req.Email)
if apierr != nil && !apierr.IsNil() {
RespondError(w, apierr, nil)
}
}
// if all looks good, call auth
resp, err := baseauth.Login(ctx, &req, ah.opts.JWT)
if ah.HandleError(w, err, http.StatusUnauthorized) {
return
}
ah.WriteJSON(w, r, resp)
}
// registerUser registers a user and responds with a precheck
// so the front-end can decide the login method
func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(model.SSO) {
ah.APIHandler.Register(w, r)
return
}
ctx := context.Background()
var req *baseauth.RegisterRequest
defer r.Body.Close()
requestBody, err := io.ReadAll(r.Body)
if err != nil {
zap.L().Error("received no input in api", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
err = json.Unmarshal(requestBody, &req)
if err != nil {
zap.L().Error("received invalid user registration request", zap.Error(err))
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
return
}
// get invite object
invite, err := baseauth.ValidateInvite(ctx, req)
if err != nil {
zap.L().Error("failed to validate invite token", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
if invite == nil {
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
return
}
// get auth domain from email domain
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
}
precheckResp := &basemodel.PrecheckResponse{
SSO: false,
IsUser: false,
}
if domain != nil && domain.SsoEnabled {
// sso is enabled, create user and respond precheck data
user, apierr := baseauth.RegisterInvitedUser(ctx, req, true)
if apierr != nil {
RespondError(w, apierr, nil)
return
}
var precheckError basemodel.BaseApiError
precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl)
if precheckError != nil {
RespondError(w, precheckError, precheckResp)
}
} else {
// no-sso, validate password
if err := baseauth.ValidatePassword(req.Password); err != nil {
RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil)
return
}
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager)
if !registerError.IsNil() {
RespondError(w, apierr, nil)
return
}
precheckResp.IsUser = true
}
ah.Respond(w, precheckResp)
}
// getInvite returns the invite object details for the given invite token. We do not need to
// protect this API because invite token itself is meant to be private.
func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
token := mux.Vars(r)["token"]
sourceUrl := r.URL.Query().Get("ref")
ctx := context.Background()
inviteObject, err := baseauth.GetInvite(context.Background(), token)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
resp := model.GettableInvitation{
InvitationResponseObject: inviteObject,
}
precheck, apierr := ah.AppDao().PrecheckLogin(ctx, inviteObject.Email, sourceUrl)
resp.Precheck = precheck
if apierr != nil {
RespondError(w, apierr, resp)
}
ah.WriteJSON(w, r, resp)
}
// PrecheckLogin enables browser login page to display appropriate
// login methods
func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
email := r.URL.Query().Get("email")
sourceUrl := r.URL.Query().Get("ref")
resp, apierr := ah.AppDao().PrecheckLogin(ctx, email, sourceUrl)
if apierr != nil {
RespondError(w, apierr, resp)
}
ah.Respond(w, resp)
}
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) { func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
ssoError := []byte("Login failed. Please contact your system administrator") ssoError := []byte("Login failed. Please contact your system administrator")
dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError))) dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
@@ -21,12 +197,84 @@ func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string)
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther) http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther)
} }
// receiveGoogleAuth completes google OAuth response and forwards a request
// to front-end to sign user in
func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) {
redirectUri := constants.GetDefaultSiteURL()
ctx := context.Background()
if !ah.CheckFeature(model.SSO) {
zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
q := r.URL.Query()
if errType := q.Get("error"); errType != "" {
zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description")))
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently)
return
}
relayState := q.Get("state")
zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
parsedState, err := url.Parse(relayState)
if err != nil || relayState == "" {
zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
// upgrade redirect url from the relay state for better accuracy
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
// fetch domain by parsing relay state.
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
if err != nil {
handleSsoError(w, r, redirectUri)
return
}
// now that we have domain, use domain to fetch sso settings.
// prepare google callback handler using parsedState -
// which contains redirect URL (front-end endpoint)
callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState)
if err != nil {
zap.L().Error("[receiveGoogleAuth] failed to prepare google oauth provider", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
identity, err := callbackHandler.HandleCallback(r)
if err != nil {
zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email, ah.opts.JWT)
if err != nil {
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
http.Redirect(w, r, nextPage, http.StatusSeeOther)
}
// receiveSAML completes a SAML request and gets user logged in // receiveSAML completes a SAML request and gets user logged in
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
// this is the source url that initiated the login request // this is the source url that initiated the login request
redirectUri := constants.GetDefaultSiteURL() redirectUri := constants.GetDefaultSiteURL()
ctx := context.Background() ctx := context.Background()
if !ah.CheckFeature(model.SSO) {
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
err := r.ParseForm() err := r.ParseForm()
if err != nil { if err != nil {
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
@@ -50,25 +298,12 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login") redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
// fetch domain by parsing relay state. // fetch domain by parsing relay state.
domain, err := ah.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState) domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
if err != nil { if err != nil {
handleSsoError(w, r, redirectUri) handleSsoError(w, r, redirectUri)
return return
} }
orgID, err := valuer.NewUUID(domain.OrgID)
if err != nil {
handleSsoError(w, r, redirectUri)
return
}
_, err = ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
sp, err := domain.PrepareSamlRequest(parsedState) sp, err := domain.PrepareSamlRequest(parsedState)
if err != nil { if err != nil {
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err)) zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
@@ -96,7 +331,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
return return
} }
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT) nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
if err != nil { if err != nil {
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri) handleSsoError(w, r, redirectUri)

View File

@@ -11,12 +11,12 @@ import (
"time" "time"
"github.com/SigNoz/signoz/ee/query-service/constants" "github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/pkg/errors" eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/http/render" "github.com/SigNoz/signoz/pkg/query-service/auth"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/dao"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model" basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/gorilla/mux" "github.com/gorilla/mux"
"go.uber.org/zap" "go.uber.org/zap"
@@ -30,18 +30,6 @@ type CloudIntegrationConnectionParamsResponse struct {
} }
func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseWriter, r *http.Request) { func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
cloudProvider := mux.Vars(r)["cloudProvider"] cloudProvider := mux.Vars(r)["cloudProvider"]
if cloudProvider != "aws" { if cloudProvider != "aws" {
RespondError(w, basemodel.BadRequest(fmt.Errorf( RespondError(w, basemodel.BadRequest(fmt.Errorf(
@@ -50,7 +38,15 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return return
} }
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider) currentUser, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, basemodel.UnauthorizedError(fmt.Errorf(
"couldn't deduce current user: %w", err,
)), nil)
return
}
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), currentUser.OrgID, cloudProvider)
if apiErr != nil { if apiErr != nil {
RespondError(w, basemodel.WrapApiError( RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't provision PAT for cloud integration:", apiErr, "couldn't provision PAT for cloud integration:",
@@ -62,9 +58,11 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
SigNozAPIKey: apiKey, SigNozAPIKey: apiKey,
} }
license, err := ah.Signoz.Licensing.GetActive(r.Context(), orgID) license, apiErr := ah.LM().GetRepo().GetActiveLicense(r.Context())
if err != nil { if apiErr != nil {
render.Error(w, err) RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't look for active license",
), nil)
return return
} }
@@ -120,14 +118,7 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
return "", apiErr return "", apiErr
} }
orgIdUUID, err := valuer.NewUUID(orgId) allPats, err := ah.AppDao().ListPATs(ctx, orgId)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't parse orgId: %w", err,
))
}
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
if err != nil { if err != nil {
return "", basemodel.InternalError(fmt.Errorf( return "", basemodel.InternalError(fmt.Errorf(
"couldn't list PATs: %w", err, "couldn't list PATs: %w", err,
@@ -144,25 +135,19 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
zap.String("cloudProvider", cloudProvider), zap.String("cloudProvider", cloudProvider),
) )
newPAT, err := types.NewStorableAPIKey( newPAT := eeTypes.NewGettablePAT(
integrationPATName, integrationPATName,
baseconstants.ViewerGroup,
integrationUser.ID, integrationUser.ID,
types.RoleViewer,
0, 0,
) )
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
if err != nil { if err != nil {
return "", basemodel.InternalError(fmt.Errorf( return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err, "couldn't create cloud integration PAT: %w", err,
)) ))
} }
return integrationPAT.Token, nil
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
}
return newPAT.Token, nil
} }
func (ah *APIHandler) getOrCreateCloudIntegrationUser( func (ah *APIHandler) getOrCreateCloudIntegrationUser(
@@ -171,9 +156,10 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider) cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser) email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email) // TODO(nitya): there should be orgId here
if err != nil && !errors.Ast(err, errors.TypeNotFound) { integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err)) if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
} }
if integrationUserResult != nil { if integrationUserResult != nil {
@@ -185,18 +171,33 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
zap.String("cloudProvider", cloudProvider), zap.String("cloudProvider", cloudProvider),
) )
newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId) newUser := &types.User{
if err != nil { ID: uuid.New().String(),
return nil, basemodel.InternalError(fmt.Errorf( Name: cloudIntegrationUser,
"couldn't create cloud integration user: %w", err, Email: email,
)) TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
},
OrgID: orgId,
} }
password, err := types.NewFactorPassword(uuid.NewString()) viewerGroup, apiErr := dao.DB().GetGroupByName(ctx, baseconstants.ViewerGroup)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't get viewer group for creating integration user")
}
newUser.GroupID = viewerGroup.ID
integrationUser, err := ah.Signoz.Modules.User.CreateUserWithPassword(ctx, newUser, password) passwordHash, err := auth.PasswordHash(uuid.NewString())
if err != nil { if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err)) return nil, basemodel.InternalError(fmt.Errorf(
"couldn't hash random password for cloud integration user: %w", err,
))
}
newUser.Password = passwordHash
integrationUser, apiErr := ah.AppDao().CreateUser(ctx, newUser, false)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't create cloud integration user")
} }
return integrationUser, nil return integrationUser, nil

View File

@@ -0,0 +1,63 @@
package api
import (
"net/http"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
"github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/gorilla/mux"
)
func (ah *APIHandler) lockDashboard(w http.ResponseWriter, r *http.Request) {
ah.lockUnlockDashboard(w, r, true)
}
func (ah *APIHandler) unlockDashboard(w http.ResponseWriter, r *http.Request) {
ah.lockUnlockDashboard(w, r, false)
}
func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request, lock bool) {
// Locking can only be done by the owner of the dashboard
// or an admin
// - Fetch the dashboard
// - Check if the user is the owner or an admin
// - If yes, lock/unlock the dashboard
// - If no, return 403
// Get the dashboard UUID from the request
uuid := mux.Vars(r)["uuid"]
if strings.HasPrefix(uuid, "integration") {
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "dashboards created by integrations cannot be modified"))
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated"))
return
}
dashboard, err := dashboards.GetDashboard(r.Context(), claims.OrgID, uuid)
if err != nil {
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get dashboard"))
return
}
if !auth.IsAdminV2(claims) && (dashboard.CreatedBy != claims.Email) {
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "You are not authorized to lock/unlock this dashboard"))
return
}
// Lock/Unlock the dashboard
err = dashboards.LockUnlockDashboard(r.Context(), claims.OrgID, uuid, lock)
if err != nil {
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to lock/unlock dashboard"))
return
}
ah.Respond(w, "Dashboard updated successfully")
}

View File

@@ -0,0 +1,91 @@
package api
import (
"context"
"encoding/json"
"fmt"
"net/http"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
"github.com/google/uuid"
"github.com/gorilla/mux"
)
func (ah *APIHandler) listDomainsByOrg(w http.ResponseWriter, r *http.Request) {
orgId := mux.Vars(r)["orgId"]
domains, apierr := ah.AppDao().ListDomains(context.Background(), orgId)
if apierr != nil {
RespondError(w, apierr, domains)
return
}
ah.Respond(w, domains)
}
func (ah *APIHandler) postDomain(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := types.GettableOrgDomain{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
if err := req.ValidNew(); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
if apierr := ah.AppDao().CreateDomain(ctx, &req); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, &req)
}
func (ah *APIHandler) putDomain(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
domainIdStr := mux.Vars(r)["id"]
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
req := types.GettableOrgDomain{StorableOrgDomain: types.StorableOrgDomain{ID: domainId}}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
req.ID = domainId
if err := req.Valid(nil); err != nil {
RespondError(w, model.BadRequest(err), nil)
}
if apierr := ah.AppDao().UpdateDomain(ctx, &req); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, &req)
}
func (ah *APIHandler) deleteDomain(w http.ResponseWriter, r *http.Request) {
domainIdStr := mux.Vars(r)["id"]
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
RespondError(w, model.BadRequest(fmt.Errorf("invalid domain id")), nil)
return
}
apierr := ah.AppDao().DeleteDomain(context.Background(), domainId)
if apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, nil)
}

View File

@@ -9,29 +9,13 @@ import (
"time" "time"
"github.com/SigNoz/signoz/ee/query-service/constants" "github.com/SigNoz/signoz/ee/query-service/constants"
pkgError "github.com/SigNoz/signoz/pkg/errors" basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap" "go.uber.org/zap"
) )
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
ctx := r.Context() ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx) featureSet, err := ah.FF().GetFeatureFlags()
if err != nil {
render.Error(w, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(w, pkgError.Newf(pkgError.TypeInvalidInput, pkgError.CodeInvalidInput, "orgId is invalid"))
return
}
featureSet, err := ah.Signoz.Licensing.GetFeatureFlags(r.Context(), orgID)
if err != nil { if err != nil {
ah.HandleError(w, err, http.StatusInternalServerError) ah.HandleError(w, err, http.StatusInternalServerError)
return return
@@ -39,7 +23,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
if constants.FetchFeatures == "true" { if constants.FetchFeatures == "true" {
zap.L().Debug("fetching license") zap.L().Debug("fetching license")
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID) license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
if err != nil { if err != nil {
zap.L().Error("failed to fetch license", zap.Error(err)) zap.L().Error("failed to fetch license", zap.Error(err))
} else if license == nil { } else if license == nil {
@@ -60,16 +44,9 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
} }
if ah.opts.PreferSpanMetrics { if ah.opts.PreferSpanMetrics {
for idx, feature := range featureSet { for idx := range featureSet {
if feature.Name == licensetypes.UseSpanMetrics { feature := &featureSet[idx]
featureSet[idx].Active = true if feature.Name == basemodel.UseSpanMetrics {
}
}
}
if constants.IsDotMetricsEnabled {
for idx, feature := range featureSet {
if feature.Name == licensetypes.DotMetricsEnabled {
featureSet[idx].Active = true featureSet[idx].Active = true
} }
} }
@@ -80,7 +57,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
// fetchZeusFeatures makes an HTTP GET request to the /zeusFeatures endpoint // fetchZeusFeatures makes an HTTP GET request to the /zeusFeatures endpoint
// and returns the FeatureSet. // and returns the FeatureSet.
func fetchZeusFeatures(url, licenseKey string) ([]*licensetypes.Feature, error) { func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) {
// Check if the URL is empty // Check if the URL is empty
if url == "" { if url == "" {
return nil, fmt.Errorf("url is empty") return nil, fmt.Errorf("url is empty")
@@ -139,28 +116,28 @@ func fetchZeusFeatures(url, licenseKey string) ([]*licensetypes.Feature, error)
} }
type ZeusFeaturesResponse struct { type ZeusFeaturesResponse struct {
Status string `json:"status"` Status string `json:"status"`
Data []*licensetypes.Feature `json:"data"` Data basemodel.FeatureSet `json:"data"`
} }
// MergeFeatureSets merges two FeatureSet arrays with precedence to zeusFeatures. // MergeFeatureSets merges two FeatureSet arrays with precedence to zeusFeatures.
func MergeFeatureSets(zeusFeatures, internalFeatures []*licensetypes.Feature) []*licensetypes.Feature { func MergeFeatureSets(zeusFeatures, internalFeatures basemodel.FeatureSet) basemodel.FeatureSet {
// Create a map to store the merged features // Create a map to store the merged features
featureMap := make(map[string]*licensetypes.Feature) featureMap := make(map[string]basemodel.Feature)
// Add all features from the otherFeatures set to the map // Add all features from the otherFeatures set to the map
for _, feature := range internalFeatures { for _, feature := range internalFeatures {
featureMap[feature.Name.StringValue()] = feature featureMap[feature.Name] = feature
} }
// Add all features from the zeusFeatures set to the map // Add all features from the zeusFeatures set to the map
// If a feature already exists (i.e., same name), the zeusFeature will overwrite it // If a feature already exists (i.e., same name), the zeusFeature will overwrite it
for _, feature := range zeusFeatures { for _, feature := range zeusFeatures {
featureMap[feature.Name.StringValue()] = feature featureMap[feature.Name] = feature
} }
// Convert the map back to a FeatureSet slice // Convert the map back to a FeatureSet slice
var mergedFeatures []*licensetypes.Feature var mergedFeatures basemodel.FeatureSet
for _, feature := range featureMap { for _, feature := range featureMap {
mergedFeatures = append(mergedFeatures, feature) mergedFeatures = append(mergedFeatures, feature)
} }

View File

@@ -3,79 +3,78 @@ package api
import ( import (
"testing" "testing"
"github.com/SigNoz/signoz/pkg/types/licensetypes" basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func TestMergeFeatureSets(t *testing.T) { func TestMergeFeatureSets(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
zeusFeatures []*licensetypes.Feature zeusFeatures basemodel.FeatureSet
internalFeatures []*licensetypes.Feature internalFeatures basemodel.FeatureSet
expected []*licensetypes.Feature expected basemodel.FeatureSet
}{ }{
{ {
name: "empty zeusFeatures and internalFeatures", name: "empty zeusFeatures and internalFeatures",
zeusFeatures: []*licensetypes.Feature{}, zeusFeatures: basemodel.FeatureSet{},
internalFeatures: []*licensetypes.Feature{}, internalFeatures: basemodel.FeatureSet{},
expected: []*licensetypes.Feature{}, expected: basemodel.FeatureSet{},
}, },
{ {
name: "non-empty zeusFeatures and empty internalFeatures", name: "non-empty zeusFeatures and empty internalFeatures",
zeusFeatures: []*licensetypes.Feature{ zeusFeatures: basemodel.FeatureSet{
{Name: valuer.NewString("Feature1"), Active: true}, {Name: "Feature1", Active: true},
{Name: valuer.NewString("Feature2"), Active: false}, {Name: "Feature2", Active: false},
}, },
internalFeatures: []*licensetypes.Feature{}, internalFeatures: basemodel.FeatureSet{},
expected: []*licensetypes.Feature{ expected: basemodel.FeatureSet{
{Name: valuer.NewString("Feature1"), Active: true}, {Name: "Feature1", Active: true},
{Name: valuer.NewString("Feature2"), Active: false}, {Name: "Feature2", Active: false},
}, },
}, },
{ {
name: "empty zeusFeatures and non-empty internalFeatures", name: "empty zeusFeatures and non-empty internalFeatures",
zeusFeatures: []*licensetypes.Feature{}, zeusFeatures: basemodel.FeatureSet{},
internalFeatures: []*licensetypes.Feature{ internalFeatures: basemodel.FeatureSet{
{Name: valuer.NewString("Feature1"), Active: true}, {Name: "Feature1", Active: true},
{Name: valuer.NewString("Feature2"), Active: false}, {Name: "Feature2", Active: false},
}, },
expected: []*licensetypes.Feature{ expected: basemodel.FeatureSet{
{Name: valuer.NewString("Feature1"), Active: true}, {Name: "Feature1", Active: true},
{Name: valuer.NewString("Feature2"), Active: false}, {Name: "Feature2", Active: false},
}, },
}, },
{ {
name: "non-empty zeusFeatures and non-empty internalFeatures with no conflicts", name: "non-empty zeusFeatures and non-empty internalFeatures with no conflicts",
zeusFeatures: []*licensetypes.Feature{ zeusFeatures: basemodel.FeatureSet{
{Name: valuer.NewString("Feature1"), Active: true}, {Name: "Feature1", Active: true},
{Name: valuer.NewString("Feature3"), Active: false}, {Name: "Feature3", Active: false},
}, },
internalFeatures: []*licensetypes.Feature{ internalFeatures: basemodel.FeatureSet{
{Name: valuer.NewString("Feature2"), Active: true}, {Name: "Feature2", Active: true},
{Name: valuer.NewString("Feature4"), Active: false}, {Name: "Feature4", Active: false},
}, },
expected: []*licensetypes.Feature{ expected: basemodel.FeatureSet{
{Name: valuer.NewString("Feature1"), Active: true}, {Name: "Feature1", Active: true},
{Name: valuer.NewString("Feature2"), Active: true}, {Name: "Feature2", Active: true},
{Name: valuer.NewString("Feature3"), Active: false}, {Name: "Feature3", Active: false},
{Name: valuer.NewString("Feature4"), Active: false}, {Name: "Feature4", Active: false},
}, },
}, },
{ {
name: "non-empty zeusFeatures and non-empty internalFeatures with conflicts", name: "non-empty zeusFeatures and non-empty internalFeatures with conflicts",
zeusFeatures: []*licensetypes.Feature{ zeusFeatures: basemodel.FeatureSet{
{Name: valuer.NewString("Feature1"), Active: true}, {Name: "Feature1", Active: true},
{Name: valuer.NewString("Feature2"), Active: false}, {Name: "Feature2", Active: false},
}, },
internalFeatures: []*licensetypes.Feature{ internalFeatures: basemodel.FeatureSet{
{Name: valuer.NewString("Feature1"), Active: false}, {Name: "Feature1", Active: false},
{Name: valuer.NewString("Feature3"), Active: true}, {Name: "Feature3", Active: true},
}, },
expected: []*licensetypes.Feature{ expected: basemodel.FeatureSet{
{Name: valuer.NewString("Feature1"), Active: true}, {Name: "Feature1", Active: true},
{Name: valuer.NewString("Feature2"), Active: false}, {Name: "Feature2", Active: false},
{Name: valuer.NewString("Feature3"), Active: true}, {Name: "Feature3", Active: true},
}, },
}, },
} }

View File

@@ -5,26 +5,10 @@ import (
"strings" "strings"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway" "github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
) )
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) { func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context() ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
validPath := false validPath := false
for _, allowedPrefix := range gateway.AllowedPrefix { for _, allowedPrefix := range gateway.AllowedPrefix {
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) { if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
@@ -38,9 +22,9 @@ func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request
return return
} }
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID) license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
if err != nil { if err != nil {
render.Error(rw, err) RespondError(rw, err, nil)
return return
} }

View File

@@ -6,7 +6,9 @@ import (
"net/http" "net/http"
"github.com/SigNoz/signoz/ee/query-service/constants" "github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/model" "github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/http/render"
) )
type DayWiseBreakdown struct { type DayWiseBreakdown struct {
@@ -45,6 +47,10 @@ type details struct {
BillTotal float64 `json:"billTotal"` BillTotal float64 `json:"billTotal"`
} }
type Redirect struct {
RedirectURL string `json:"redirectURL"`
}
type billingDetails struct { type billingDetails struct {
Status string `json:"status"` Status string `json:"status"`
Data struct { Data struct {
@@ -56,6 +62,93 @@ type billingDetails struct {
} `json:"data"` } `json:"data"`
} }
type ApplyLicenseRequest struct {
LicenseKey string `json:"key"`
}
func (ah *APIHandler) listLicensesV3(w http.ResponseWriter, r *http.Request) {
ah.listLicensesV2(w, r)
}
func (ah *APIHandler) getActiveLicenseV3(w http.ResponseWriter, r *http.Request) {
activeLicense, err := ah.LM().GetRepo().GetActiveLicenseV3(r.Context())
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
// return 404 not found if there is no active license
if activeLicense == nil {
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no active license found")}, nil)
return
}
// TODO deprecate this when we move away from key for stripe
activeLicense.Data["key"] = activeLicense.Key
render.Success(w, http.StatusOK, activeLicense.Data)
}
// this function is called by zeus when inserting licenses in the query-service
func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
var licenseKey ApplyLicenseRequest
if err := json.NewDecoder(r.Body).Decode(&licenseKey); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
if licenseKey.LicenseKey == "" {
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
return
}
_, apiError := ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey)
if apiError != nil {
RespondError(w, apiError, nil)
return
}
render.Success(w, http.StatusAccepted, nil)
}
func (ah *APIHandler) refreshLicensesV3(w http.ResponseWriter, r *http.Request) {
apiError := ah.LM().RefreshLicense(r.Context())
if apiError != nil {
RespondError(w, apiError, nil)
return
}
render.Success(w, http.StatusNoContent, nil)
}
func getCheckoutPortalResponse(redirectURL string) *Redirect {
return &Redirect{RedirectURL: redirectURL}
}
func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
checkoutRequest := &model.CheckoutRequest{}
if err := json.NewDecoder(r.Body).Decode(checkoutRequest); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
license := ah.LM().GetActiveLicense()
if license == nil {
RespondError(w, model.BadRequestStr("cannot proceed with checkout without license key"), nil)
return
}
redirectUrl, err := signozio.CheckoutSession(r.Context(), checkoutRequest, license.Key)
if err != nil {
RespondError(w, err, nil)
return
}
ah.Respond(w, getCheckoutPortalResponse(redirectUrl))
}
func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) { func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
licenseKey := r.URL.Query().Get("licenseKey") licenseKey := r.URL.Query().Get("licenseKey")
@@ -89,3 +182,72 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
// TODO(srikanthccv):Fetch the current day usage and add it to the response // TODO(srikanthccv):Fetch the current day usage and add it to the response
ah.Respond(w, billingResponse.Data) ah.Respond(w, billingResponse.Data)
} }
func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License {
licensesV2 := []model.License{}
for _, l := range licenses {
planKeyFromPlanName, ok := model.MapOldPlanKeyToNewPlanName[l.PlanName]
if !ok {
planKeyFromPlanName = model.Basic
}
licenseV2 := model.License{
Key: l.Key,
ActivationId: "",
PlanDetails: "",
FeatureSet: l.Features,
ValidationMessage: "",
IsCurrent: l.IsCurrent,
LicensePlan: model.LicensePlan{
PlanKey: planKeyFromPlanName,
ValidFrom: l.ValidFrom,
ValidUntil: l.ValidUntil,
Status: l.Status},
}
licensesV2 = append(licensesV2, licenseV2)
}
return licensesV2
}
func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
licensesV3, apierr := ah.LM().GetLicensesV3(r.Context())
if apierr != nil {
RespondError(w, apierr, nil)
return
}
licenses := convertLicenseV3ToLicenseV2(licensesV3)
resp := model.Licenses{
TrialStart: -1,
TrialEnd: -1,
OnTrial: false,
WorkSpaceBlock: false,
TrialConvertedToSubscription: false,
GracePeriodEnd: -1,
Licenses: licenses,
}
ah.Respond(w, resp)
}
func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
portalRequest := &model.PortalRequest{}
if err := json.NewDecoder(r.Body).Decode(portalRequest); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
license := ah.LM().GetActiveLicense()
if license == nil {
RespondError(w, model.BadRequestStr("cannot request the portal session without license key"), nil)
return
}
redirectUrl, err := signozio.PortalSession(r.Context(), portalRequest, license.Key)
if err != nil {
RespondError(w, err, nil)
return
}
ah.Respond(w, getCheckoutPortalResponse(redirectUrl))
}

View File

@@ -0,0 +1,199 @@
package api
import (
"context"
"encoding/json"
"fmt"
"net/http"
"slices"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/errors"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/auth"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := model.CreatePATRequestBody{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
user, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
}
pat := eeTypes.NewGettablePAT(
req.Name,
req.Role,
user.ID,
req.ExpiresInDays,
)
err = validatePATRequest(pat)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
var apierr basemodel.BaseApiError
if pat, apierr = ah.AppDao().CreatePAT(ctx, user.OrgID, pat); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, &pat)
}
func validatePATRequest(req eeTypes.GettablePAT) error {
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
return fmt.Errorf("valid role is required")
}
if req.ExpiresAt < 0 {
return fmt.Errorf("valid expiresAt is required")
}
if req.Name == "" {
return fmt.Errorf("valid name is required")
}
return nil
}
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := eeTypes.GettablePAT{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
user, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
err = validatePATRequest(req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
req.UpdatedByUserID = user.ID
req.UpdatedAt = time.Now()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
if apierr = ah.AppDao().UpdatePAT(ctx, user.OrgID, req, id); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, map[string]string{"data": "pat updated successfully"})
}
func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
user, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
}
zap.L().Info("Get PATs for user", zap.String("user_id", user.ID))
pats, apierr := ah.AppDao().ListPATs(ctx, user.OrgID)
if apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, pats)
}
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
user, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, map[string]string{"data": "pat revoked successfully"})
}

View File

@@ -7,27 +7,14 @@ import (
"net/http" "net/http"
"github.com/SigNoz/signoz/ee/query-service/anomaly" "github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/http/render"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app" baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder" "github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap" "go.uber.org/zap"
) )
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(w, err)
return
}
bodyBytes, _ := io.ReadAll(r.Body) bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
@@ -42,7 +29,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
queryRangeParams.Version = "v4" queryRangeParams.Version = "v4"
// add temporality for each metric // add temporality for each metric
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams) temporalityErr := aH.PopulateTemporality(r.Context(), queryRangeParams)
if temporalityErr != nil { if temporalityErr != nil {
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr)) zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
@@ -98,30 +85,30 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
switch seasonality { switch seasonality {
case anomaly.SeasonalityWeekly: case anomaly.SeasonalityWeekly:
provider = anomaly.NewWeeklyProvider( provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](aH.Signoz.Cache), anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()), anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector), anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
) )
case anomaly.SeasonalityDaily: case anomaly.SeasonalityDaily:
provider = anomaly.NewDailyProvider( provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache), anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()), anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector), anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
) )
case anomaly.SeasonalityHourly: case anomaly.SeasonalityHourly:
provider = anomaly.NewHourlyProvider( provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](aH.Signoz.Cache), anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()), anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector), anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
) )
default: default:
provider = anomaly.NewDailyProvider( provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache), anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()), anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector), anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
) )
} }
anomalies, err := provider.GetAnomalies(r.Context(), orgID, &anomaly.GetAnomaliesRequest{Params: queryRangeParams}) anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
if err != nil { if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return return

View File

@@ -23,17 +23,15 @@ func NewDataConnector(
telemetryStore telemetrystore.TelemetryStore, telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus, prometheus prometheus.Prometheus,
cluster string, cluster string,
useLogsNewSchema bool,
useTraceNewSchema bool,
fluxIntervalForTraceDetail time.Duration, fluxIntervalForTraceDetail time.Duration,
cache cache.Cache, cache cache.Cache,
) *ClickhouseReader { ) *ClickhouseReader {
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, fluxIntervalForTraceDetail, cache) chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
return &ClickhouseReader{ return &ClickhouseReader{
conn: telemetryStore.ClickhouseDB(), conn: telemetryStore.ClickhouseDB(),
appdb: sqlDB, appdb: sqlDB,
ClickHouseReader: chReader, ClickHouseReader: chReader,
} }
} }
func (r *ClickhouseReader) GetSQLStore() sqlstore.SQLStore {
return r.appdb
}

View File

@@ -2,6 +2,7 @@ package app
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"net" "net"
"net/http" "net/http"
@@ -11,51 +12,69 @@ import (
"github.com/gorilla/handlers" "github.com/gorilla/handlers"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
eemiddleware "github.com/SigNoz/signoz/ee/http/middleware"
"github.com/SigNoz/signoz/ee/query-service/app/api" "github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/app/db" "github.com/SigNoz/signoz/ee/query-service/app/db"
"github.com/SigNoz/signoz/ee/query-service/constants" "github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/dao"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway" "github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules" "github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager" "github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware" "github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/prometheus" "github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore" "github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/web" "github.com/SigNoz/signoz/pkg/web"
"github.com/rs/cors" "github.com/rs/cors"
"github.com/soheilhy/cmux" "github.com/soheilhy/cmux"
licensepkg "github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/query-service/agentConf" "github.com/SigNoz/signoz/pkg/query-service/agentConf"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app" baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations" "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
baseexplorer "github.com/SigNoz/signoz/pkg/query-service/app/explorer"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations" "github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline" "github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/app/opamp" "github.com/SigNoz/signoz/pkg/query-service/app/opamp"
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model" opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
"github.com/SigNoz/signoz/pkg/query-service/cache"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants" baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/healthcheck" "github.com/SigNoz/signoz/pkg/query-service/healthcheck"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces" baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules" baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/telemetry" "github.com/SigNoz/signoz/pkg/query-service/telemetry"
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"go.uber.org/zap" "go.uber.org/zap"
) )
const AppDbEngine = "sqlite"
type ServerOptions struct { type ServerOptions struct {
Config signoz.Config Config signoz.Config
SigNoz *signoz.SigNoz SigNoz *signoz.SigNoz
HTTPHostPort string PromConfigPath string
PrivateHostPort string SkipTopLvlOpsPath string
HTTPHostPort string
PrivateHostPort string
// alert specific params
DisableRules bool
RuleRepoURL string
PreferSpanMetrics bool PreferSpanMetrics bool
CacheConfigPath string
FluxInterval string FluxInterval string
FluxIntervalForTraceDetail string FluxIntervalForTraceDetail string
Cluster string Cluster string
GatewayUrl string GatewayUrl string
UseLogsNewSchema bool
UseTraceNewSchema bool
Jwt *authtypes.JWT Jwt *authtypes.JWT
} }
@@ -87,11 +106,33 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
// NewServer creates and initializes Server // NewServer creates and initializes Server
func NewServer(serverOptions *ServerOptions) (*Server, error) { func NewServer(serverOptions *ServerOptions) (*Server, error) {
modelDao, err := dao.InitDao(serverOptions.SigNoz.SQLStore)
if err != nil {
return nil, err
}
if err := baseexplorer.InitWithDSN(serverOptions.SigNoz.SQLStore); err != nil {
return nil, err
}
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
return nil, err
}
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix) gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// initiate license manager
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore)
if err != nil {
return nil, err
}
// set license manager as feature flag provider in dao
modelDao.SetFlagProvider(lm)
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail) fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -102,19 +143,41 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
serverOptions.SigNoz.TelemetryStore, serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus, serverOptions.SigNoz.Prometheus,
serverOptions.Cluster, serverOptions.Cluster,
serverOptions.UseLogsNewSchema,
serverOptions.UseTraceNewSchema,
fluxIntervalForTraceDetail, fluxIntervalForTraceDetail,
serverOptions.SigNoz.Cache, serverOptions.SigNoz.Cache,
) )
skipConfig := &basemodel.SkipConfig{}
if serverOptions.SkipTopLvlOpsPath != "" {
// read skip config
skipConfig, err = basemodel.ReadSkipConfig(serverOptions.SkipTopLvlOpsPath)
if err != nil {
return nil, err
}
}
var c cache.Cache
if serverOptions.CacheConfigPath != "" {
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
if err != nil {
return nil, err
}
c = cache.NewCache(cacheOpts)
}
rm, err := makeRulesManager( rm, err := makeRulesManager(
serverOptions.RuleRepoURL,
serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore.SQLxDB(),
reader, reader,
serverOptions.SigNoz.Cache, c,
serverOptions.DisableRules,
serverOptions.UseLogsNewSchema,
serverOptions.UseTraceNewSchema,
serverOptions.SigNoz.Alertmanager, serverOptions.SigNoz.Alertmanager,
serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore, serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus, serverOptions.SigNoz.Prometheus,
serverOptions.SigNoz.Modules.OrgGetter,
) )
if err != nil { if err != nil {
@@ -159,23 +222,17 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
} }
// start the usagemanager // start the usagemanager
usageManager, err := usage.New(serverOptions.SigNoz.Licensing, serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus, serverOptions.SigNoz.Modules.OrgGetter) usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.Config.TelemetryStore.Clickhouse.DSN)
if err != nil { if err != nil {
return nil, err return nil, err
} }
err = usageManager.Start(context.Background()) err = usageManager.Start()
if err != nil { if err != nil {
return nil, err return nil, err
} }
telemetry.GetInstance().SetReader(reader) telemetry.GetInstance().SetReader(reader)
telemetry.GetInstance().SetSqlStore(serverOptions.SigNoz.SQLStore)
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey) telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
telemetry.GetInstance().SetSavedViewsInfoCallback(telemetry.GetSavedViewsInfo)
telemetry.GetInstance().SetAlertsInfoCallback(telemetry.GetAlertsInfo)
telemetry.GetInstance().SetGetUsersCallback(telemetry.GetUsers)
telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount)
telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo)
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval) fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
if err != nil { if err != nil {
@@ -184,15 +241,22 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
apiOpts := api.APIHandlerOptions{ apiOpts := api.APIHandlerOptions{
DataConnector: reader, DataConnector: reader,
SkipConfig: skipConfig,
PreferSpanMetrics: serverOptions.PreferSpanMetrics, PreferSpanMetrics: serverOptions.PreferSpanMetrics,
AppDao: modelDao,
RulesManager: rm, RulesManager: rm,
UsageManager: usageManager, UsageManager: usageManager,
FeatureFlags: lm,
LicenseManager: lm,
IntegrationsController: integrationsController, IntegrationsController: integrationsController,
CloudIntegrationsController: cloudIntegrationsController, CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController, LogsParsingPipelineController: logParsingPipelineController,
Cache: c,
FluxInterval: fluxInterval, FluxInterval: fluxInterval,
Gateway: gatewayProxy, Gateway: gatewayProxy,
GatewayUrl: serverOptions.GatewayUrl, GatewayUrl: serverOptions.GatewayUrl,
UseLogsNewSchema: serverOptions.UseLogsNewSchema,
UseTraceNewSchema: serverOptions.UseTraceNewSchema,
JWT: serverOptions.Jwt, JWT: serverOptions.Jwt,
} }
@@ -202,6 +266,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
} }
s := &Server{ s := &Server{
// logger: logger,
// tracer: tracer,
ruleManager: rm, ruleManager: rm,
serverOptions: serverOptions, serverOptions: serverOptions,
unavailableChannel: make(chan healthcheck.Status), unavailableChannel: make(chan healthcheck.Status),
@@ -227,32 +293,27 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
&opAmpModel.AllAgents, agentConfMgr, &opAmpModel.AllAgents, agentConfMgr,
) )
orgs, err := apiHandler.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background()) errorList := reader.PreloadMetricsMetadata(context.Background())
if err != nil { for _, er := range errorList {
return nil, err zap.L().Error("failed to preload metrics metadata", zap.Error(er))
}
for _, org := range orgs {
errorList := reader.PreloadMetricsMetadata(context.Background(), org.ID)
for _, er := range errorList {
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
}
} }
return s, nil return s, nil
} }
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) { func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
r := baseapp.NewRouter() r := baseapp.NewRouter()
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap) r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap) r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(), r.Use(middleware.NewTimeout(zap.L(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes, s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default, s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max, s.serverOptions.Config.APIServer.Timeout.Max,
).Wrap) ).Wrap)
r.Use(middleware.NewAnalytics().Wrap) r.Use(middleware.NewAnalytics(zap.L()).Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterPrivateRoutes(r) apiHandler.RegisterPrivateRoutes(r)
@@ -273,18 +334,34 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
} }
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) { func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger())
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap) r := baseapp.NewRouter()
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(), // add auth middleware
getUserFromRequest := func(ctx context.Context) (*types.GettableUser, error) {
user, err := auth.GetUserFromReqContext(ctx)
if err != nil {
return nil, err
}
if user.User.OrgID == "" {
return nil, basemodel.UnauthorizedError(errors.New("orgId is missing in the claims"))
}
return user, nil
}
am := baseapp.NewAuthMiddleware(getUserFromRequest)
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
r.Use(middleware.NewTimeout(zap.L(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes, s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default, s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max, s.serverOptions.Config.APIServer.Timeout.Max,
).Wrap) ).Wrap)
r.Use(middleware.NewAnalytics().Wrap) r.Use(middleware.NewAnalytics(zap.L()).Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterRoutes(r, am) apiHandler.RegisterRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am) apiHandler.RegisterLogsRoutes(r, am)
@@ -294,12 +371,10 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterQueryRangeV3Routes(r, am) apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am) apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am) apiHandler.RegisterQueryRangeV4Routes(r, am)
apiHandler.RegisterQueryRangeV5Routes(r, am)
apiHandler.RegisterWebSocketPaths(r, am) apiHandler.RegisterWebSocketPaths(r, am)
apiHandler.RegisterMessagingQueuesRoutes(r, am) apiHandler.RegisterMessagingQueuesRoutes(r, am)
apiHandler.RegisterThirdPartyApiRoutes(r, am) apiHandler.RegisterThirdPartyApiRoutes(r, am)
apiHandler.MetricExplorerRoutes(r, am) apiHandler.MetricExplorerRoutes(r, am)
apiHandler.RegisterTraceFunnelsRoutes(r, am)
c := cors.New(cors.Options{ c := cors.New(cors.Options{
AllowedOrigins: []string{"*"}, AllowedOrigins: []string{"*"},
@@ -355,7 +430,13 @@ func (s *Server) initListeners() error {
// Start listening on http and private http port concurrently // Start listening on http and private http port concurrently
func (s *Server) Start(ctx context.Context) error { func (s *Server) Start(ctx context.Context) error {
s.ruleManager.Start(ctx)
// initiate rule manager first
if !s.serverOptions.DisableRules {
s.ruleManager.Start(ctx)
} else {
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
}
err := s.initListeners() err := s.initListeners()
if err != nil { if err != nil {
@@ -420,15 +501,15 @@ func (s *Server) Start(ctx context.Context) error {
return nil return nil
} }
func (s *Server) Stop(ctx context.Context) error { func (s *Server) Stop() error {
if s.httpServer != nil { if s.httpServer != nil {
if err := s.httpServer.Shutdown(ctx); err != nil { if err := s.httpServer.Shutdown(context.Background()); err != nil {
return err return err
} }
} }
if s.privateHTTP != nil { if s.privateHTTP != nil {
if err := s.privateHTTP.Shutdown(ctx); err != nil { if err := s.privateHTTP.Shutdown(context.Background()); err != nil {
return err return err
} }
} }
@@ -436,40 +517,46 @@ func (s *Server) Stop(ctx context.Context) error {
s.opampServer.Stop() s.opampServer.Stop()
if s.ruleManager != nil { if s.ruleManager != nil {
s.ruleManager.Stop(ctx) s.ruleManager.Stop(context.Background())
} }
// stop usage manager // stop usage manager
s.usageManager.Stop(ctx) s.usageManager.Stop()
return nil return nil
} }
func makeRulesManager( func makeRulesManager(
ruleRepoURL string,
db *sqlx.DB, db *sqlx.DB,
ch baseint.Reader, ch baseint.Reader,
cache cache.Cache, cache cache.Cache,
disableRules bool,
useLogsNewSchema bool,
useTraceNewSchema bool,
alertmanager alertmanager.Alertmanager, alertmanager alertmanager.Alertmanager,
sqlstore sqlstore.SQLStore, sqlstore sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore, telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus, prometheus prometheus.Prometheus,
orgGetter organization.Getter,
) (*baserules.Manager, error) { ) (*baserules.Manager, error) {
// create manager opts // create manager opts
managerOpts := &baserules.ManagerOptions{ managerOpts := &baserules.ManagerOptions{
TelemetryStore: telemetryStore, TelemetryStore: telemetryStore,
Prometheus: prometheus, Prometheus: prometheus,
RepoURL: ruleRepoURL,
DBConn: db, DBConn: db,
Context: context.Background(), Context: context.Background(),
Logger: zap.L(), Logger: zap.L(),
DisableRules: disableRules,
Reader: ch, Reader: ch,
Cache: cache, Cache: cache,
EvalDelay: baseconst.GetEvalDelay(), EvalDelay: baseconst.GetEvalDelay(),
PrepareTaskFunc: rules.PrepareTaskFunc, PrepareTaskFunc: rules.PrepareTaskFunc,
UseLogsNewSchema: useLogsNewSchema,
UseTraceNewSchema: useTraceNewSchema,
PrepareTestRuleFunc: rules.TestNotification, PrepareTestRuleFunc: rules.TestNotification,
Alertmanager: alertmanager, Alertmanager: alertmanager,
SQLStore: sqlstore, SQLStore: sqlstore,
OrgGetter: orgGetter,
} }
// create Manager // create Manager

View File

@@ -33,13 +33,3 @@ func GetOrDefaultEnv(key string, fallback string) string {
func GetDefaultSiteURL() string { func GetDefaultSiteURL() string {
return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL) return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL)
} }
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
var IsDotMetricsEnabled = false
func init() {
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
IsDotMetricsEnabled = true
}
}

View File

@@ -0,0 +1,10 @@
package dao
import (
"github.com/SigNoz/signoz/ee/query-service/dao/sqlite"
"github.com/SigNoz/signoz/pkg/sqlstore"
)
func InitDao(sqlStore sqlstore.SQLStore) (ModelDao, error) {
return sqlite.InitDB(sqlStore)
}

View File

@@ -0,0 +1,44 @@
package dao
import (
"context"
"net/url"
"github.com/SigNoz/signoz/ee/types"
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/uptrace/bun"
)
type ModelDao interface {
basedao.ModelDao
// SetFlagProvider sets the feature lookup provider
SetFlagProvider(flags baseint.FeatureLookup)
DB() *bun.DB
// auth methods
CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError)
GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error)
// org domain (auth domains) CRUD ops
ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError)
GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError)
CreateDomain(ctx context.Context, d *types.GettableOrgDomain) basemodel.BaseApiError
UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
}

View File

@@ -0,0 +1,198 @@
package sqlite
import (
"context"
"fmt"
"net/url"
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/model"
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/google/uuid"
"go.uber.org/zap"
)
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*types.User, basemodel.BaseApiError) {
// get auth domain from email domain
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
return nil, model.InternalErrorStr("failed to get domain from email")
}
if domain == nil {
zap.L().Error("email domain does not match any authenticated domain", zap.String("email", email))
return nil, model.InternalErrorStr("email domain does not match any authenticated domain")
}
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
if err != nil {
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
return nil, model.InternalErrorStr("failed to generate password hash")
}
group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup)
if apiErr != nil {
zap.L().Error("GetGroupByName failed", zap.Error(apiErr))
return nil, apiErr
}
user := &types.User{
ID: uuid.New().String(),
Name: "",
Email: email,
Password: hash,
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
},
ProfilePictureURL: "", // Currently unused
GroupID: group.ID,
OrgID: domain.OrgID,
}
user, apiErr = m.CreateUser(ctx, user, false)
if apiErr != nil {
zap.L().Error("CreateUser failed", zap.Error(apiErr))
return nil, apiErr
}
return user, nil
}
// PrepareSsoRedirect prepares redirect page link after SSO response
// is successfully parsed (i.e. valid email is available)
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError) {
userPayload, apierr := m.GetUserByEmail(ctx, email)
if !apierr.IsNil() {
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
return "", model.BadRequestStr("invalid user email received from the auth provider")
}
user := &types.User{}
if userPayload == nil {
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
user = newUser
if apiErr != nil {
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
return "", apiErr
}
} else {
user = &userPayload.User
}
tokenStore, err := baseauth.GenerateJWTForUser(user, jwt)
if err != nil {
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
return "", model.InternalErrorStr("failed to generate token for the user")
}
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
redirectUri,
tokenStore.AccessJwt,
user.ID,
tokenStore.RefreshJwt), nil
}
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
return false, apierr
}
if domain != nil && domain.SsoEnabled {
// sso is enabled, check if the user has admin role
userPayload, baseapierr := m.GetUserByEmail(ctx, email)
if baseapierr != nil || userPayload == nil {
return false, baseapierr
}
if userPayload.Role != baseconst.AdminGroup {
return false, model.BadRequest(fmt.Errorf("auth method not supported"))
}
}
return true, nil
}
// PrecheckLogin is called when the login or signup page is loaded
// to check sso login is to be prompted
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, basemodel.BaseApiError) {
// assume user is valid unless proven otherwise
resp := &basemodel.PrecheckResponse{IsUser: true, CanSelfRegister: false}
// check if email is a valid user
userPayload, baseApiErr := m.GetUserByEmail(ctx, email)
if baseApiErr != nil {
return resp, baseApiErr
}
if userPayload == nil {
resp.IsUser = false
}
ssoAvailable := true
err := m.checkFeature(model.SSO)
if err != nil {
switch err.(type) {
case basemodel.ErrFeatureUnavailable:
// do nothing, just skip sso
ssoAvailable = false
default:
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return resp, model.BadRequestStr(err.Error())
}
}
if ssoAvailable {
resp.IsUser = true
// find domain from email
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
return resp, apierr
}
if orgDomain != nil && orgDomain.SsoEnabled {
// saml is enabled for this domain, lets prepare sso url
if sourceUrl == "" {
sourceUrl = constants.GetDefaultSiteURL()
}
// parse source url that generated the login request
var err error
escapedUrl, _ := url.QueryUnescape(sourceUrl)
siteUrl, err := url.Parse(escapedUrl)
if err != nil {
zap.L().Error("failed to parse referer", zap.Error(err))
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
}
// build Idp URL that will authenticat the user
// the front-end will redirect user to this url
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
if err != nil {
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
return resp, model.InternalError(err)
}
// set SSO to true, as the url is generated correctly
resp.SSO = true
}
}
return resp, nil
}

View File

@@ -0,0 +1,272 @@
package sqlite
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"net/url"
"strings"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/google/uuid"
"go.uber.org/zap"
)
// GetDomainFromSsoResponse uses relay state received from IdP to fetch
// user domain. The domain is further used to process validity of the response.
// when sending login request to IdP we send relay state as URL (site url)
// with domainId or domainName as query parameter.
func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error) {
// derive domain id from relay state now
var domainIdStr string
var domainNameStr string
var domain *types.GettableOrgDomain
for k, v := range relayState.Query() {
if k == "domainId" && len(v) > 0 {
domainIdStr = strings.Replace(v[0], ":", "-", -1)
}
if k == "domainName" && len(v) > 0 {
domainNameStr = v[0]
}
}
if domainIdStr != "" {
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
zap.L().Error("failed to parse domainId from relay state", zap.Error(err))
return nil, fmt.Errorf("failed to parse domainId from IdP response")
}
domain, err = m.GetDomain(ctx, domainId)
if (err != nil) || domain == nil {
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
}
if domainNameStr != "" {
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
domain = domainFromDB
if (err != nil) || domain == nil {
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
}
if domain != nil {
return domain, nil
}
return nil, fmt.Errorf("failed to find domain received in IdP response")
}
// GetDomainByName returns org domain for a given domain name
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
stored := types.StorableOrgDomain{}
err := m.DB().NewSelect().
Model(&stored).
Where("name = ?", name).
Limit(1).
Scan(ctx)
if err != nil {
if err == sql.ErrNoRows {
return nil, model.BadRequest(fmt.Errorf("invalid domain name"))
}
return nil, model.InternalError(err)
}
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, model.InternalError(err)
}
return domain, nil
}
// GetDomain returns org domain for a given domain id
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError) {
stored := types.StorableOrgDomain{}
err := m.DB().NewSelect().
Model(&stored).
Where("id = ?", id).
Limit(1).
Scan(ctx)
if err != nil {
if err == sql.ErrNoRows {
return nil, model.BadRequest(fmt.Errorf("invalid domain id"))
}
return nil, model.InternalError(err)
}
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, model.InternalError(err)
}
return domain, nil
}
// ListDomains gets the list of auth domains by org id
func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError) {
domains := []types.GettableOrgDomain{}
stored := []types.StorableOrgDomain{}
err := m.DB().NewSelect().
Model(&stored).
Where("org_id = ?", orgId).
Scan(ctx)
if err != nil {
if err == sql.ErrNoRows {
return domains, nil
}
return nil, model.InternalError(err)
}
for _, s := range stored {
domain := types.GettableOrgDomain{StorableOrgDomain: s}
if err := domain.LoadConfig(s.Data); err != nil {
zap.L().Error("ListDomains() failed", zap.Error(err))
}
domains = append(domains, domain)
}
return domains, nil
}
// CreateDomain creates a new auth domain
func (m *modelDao) CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError {
if domain.ID == uuid.Nil {
domain.ID = uuid.New()
}
if domain.OrgID == "" || domain.Name == "" {
return model.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgID, Name "))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
storableDomain := types.StorableOrgDomain{
ID: domain.ID,
Name: domain.Name,
OrgID: domain.OrgID,
Data: string(configJson),
TimeAuditable: ossTypes.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
}
_, err = m.DB().NewInsert().
Model(&storableDomain).
Exec(ctx)
if err != nil {
zap.L().Error("failed to insert domain in db", zap.Error(err))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
return nil
}
// UpdateDomain updates stored config params for a domain
func (m *modelDao) UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError {
if domain.ID == uuid.Nil {
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return model.InternalError(fmt.Errorf("domain update failed"))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.L().Error("domain update failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain update failed"))
}
storableDomain := &types.StorableOrgDomain{
ID: domain.ID,
Name: domain.Name,
OrgID: domain.OrgID,
Data: string(configJson),
TimeAuditable: ossTypes.TimeAuditable{UpdatedAt: time.Now()},
}
_, err = m.DB().NewUpdate().
Model(storableDomain).
Column("data", "updated_at").
WherePK().
Exec(ctx)
if err != nil {
zap.L().Error("domain update failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain update failed"))
}
return nil
}
// DeleteDomain deletes an org domain
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
if id == uuid.Nil {
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
storableDomain := &types.StorableOrgDomain{ID: id}
_, err := m.DB().NewDelete().
Model(storableDomain).
WherePK().
Exec(ctx)
if err != nil {
zap.L().Error("domain delete failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
return nil
}
func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
if email == "" {
return nil, model.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email "))
}
components := strings.Split(email, "@")
if len(components) < 2 {
return nil, model.BadRequest(fmt.Errorf("invalid email address"))
}
parsedDomain := components[1]
stored := types.StorableOrgDomain{}
err := m.DB().NewSelect().
Model(&stored).
Where("name = ?", parsedDomain).
Limit(1).
Scan(ctx)
if err != nil {
if err == sql.ErrNoRows {
return nil, nil
}
return nil, model.InternalError(err)
}
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, model.InternalError(err)
}
return domain, nil
}

View File

@@ -0,0 +1,46 @@
package sqlite
import (
"fmt"
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
basedsql "github.com/SigNoz/signoz/pkg/query-service/dao/sqlite"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
)
type modelDao struct {
*basedsql.ModelDaoSqlite
flags baseint.FeatureLookup
}
// SetFlagProvider sets the feature lookup provider
func (m *modelDao) SetFlagProvider(flags baseint.FeatureLookup) {
m.flags = flags
}
// CheckFeature confirms if a feature is available
func (m *modelDao) checkFeature(key string) error {
if m.flags == nil {
return fmt.Errorf("flag provider not set")
}
return m.flags.CheckFeature(key)
}
// InitDB creates and extends base model DB repository
func InitDB(sqlStore sqlstore.SQLStore) (*modelDao, error) {
dao, err := basedsql.InitDB(sqlStore)
if err != nil {
return nil, err
}
// set package variable so dependent base methods (e.g. AuthCache) will work
basedao.SetDB(dao)
m := &modelDao{ModelDaoSqlite: dao}
return m, nil
}
func (m *modelDao) DB() *bun.DB {
return m.ModelDaoSqlite.DB()
}

View File

@@ -0,0 +1,198 @@
package sqlite
import (
"context"
"fmt"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
p.StorablePersonalAccessToken.OrgID = orgID
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
_, err := m.DB().NewInsert().
Model(&p.StorablePersonalAccessToken).
Exec(ctx)
if err != nil {
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
createdByUser, _ := m.GetUser(ctx, p.UserID)
if createdByUser == nil {
p.CreatedByUser = types.PatUser{
NotFound: true,
}
} else {
p.CreatedByUser = types.PatUser{
User: ossTypes.User{
ID: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: createdByUser.CreatedAt,
UpdatedAt: createdByUser.UpdatedAt,
},
ProfilePictureURL: createdByUser.ProfilePictureURL,
},
NotFound: false,
}
}
return p, nil
}
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
_, err := m.DB().NewUpdate().
Model(&p.StorablePersonalAccessToken).
Column("role", "name", "updated_at", "updated_by_user_id").
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Where("revoked = false").
Exec(ctx)
if err != nil {
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT update failed"))
}
return nil
}
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
Model(&pats).
Where("revoked = false").
Where("org_id = ?", orgID).
Order("updated_at DESC").
Scan(ctx); err != nil {
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
}
patsWithUsers := []types.GettablePAT{}
for i := range pats {
patWithUser := types.GettablePAT{
StorablePersonalAccessToken: pats[i],
}
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
if createdByUser == nil {
patWithUser.CreatedByUser = types.PatUser{
NotFound: true,
}
} else {
patWithUser.CreatedByUser = types.PatUser{
User: ossTypes.User{
ID: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: createdByUser.CreatedAt,
UpdatedAt: createdByUser.UpdatedAt,
},
ProfilePictureURL: createdByUser.ProfilePictureURL,
},
NotFound: false,
}
}
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
if updatedByUser == nil {
patWithUser.UpdatedByUser = types.PatUser{
NotFound: true,
}
} else {
patWithUser.UpdatedByUser = types.PatUser{
User: ossTypes.User{
ID: updatedByUser.ID,
Name: updatedByUser.Name,
Email: updatedByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: updatedByUser.CreatedAt,
UpdatedAt: updatedByUser.UpdatedAt,
},
ProfilePictureURL: updatedByUser.ProfilePictureURL,
},
NotFound: false,
}
}
patsWithUsers = append(patsWithUsers, patWithUser)
}
return patsWithUsers, nil
}
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
updatedAt := time.Now().Unix()
_, err := m.DB().NewUpdate().
Model(&types.StorablePersonalAccessToken{}).
Set("revoked = ?", true).
Set("updated_by_user_id = ?", userID).
Set("updated_at = ?", updatedAt).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Exec(ctx)
if err != nil {
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT revoke failed"))
}
return nil
}
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
Model(&pats).
Where("token = ?", token).
Where("revoked = false").
Scan(ctx); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
if len(pats) != 1 {
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple PATs with same token, %s", token),
}
}
patWithUser := types.GettablePAT{
StorablePersonalAccessToken: pats[0],
}
return &patWithUser, nil
}
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
Model(&pats).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Where("revoked = false").
Scan(ctx); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
if len(pats) != 1 {
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple PATs with same token"),
}
}
patWithUser := types.GettablePAT{
StorablePersonalAccessToken: pats[0],
}
return &patWithUser, nil
}

View File

@@ -0,0 +1,16 @@
package signozio
type status string
type ValidateLicenseResponse struct {
Status status `json:"status"`
Data map[string]interface{} `json:"data"`
}
type CheckoutSessionRedirect struct {
RedirectURL string `json:"url"`
}
type CheckoutResponse struct {
Status status `json:"status"`
Data CheckoutSessionRedirect `json:"data"`
}

View File

@@ -0,0 +1,223 @@
package signozio
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"time"
"github.com/pkg/errors"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/model"
)
var C *Client
const (
POST = "POST"
APPLICATION_JSON = "application/json"
)
type Client struct {
Prefix string
GatewayUrl string
}
func New() *Client {
return &Client{
Prefix: constants.LicenseSignozIo,
GatewayUrl: constants.ZeusURL,
}
}
func init() {
C = New()
}
func ValidateLicenseV3(licenseKey string) (*model.LicenseV3, *model.ApiError) {
// Creating an HTTP client with a timeout for better control
client := &http.Client{
Timeout: 10 * time.Second,
}
req, err := http.NewRequest("GET", C.GatewayUrl+"/v2/licenses/me", nil)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to create request"))
}
// Setting the custom header
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
response, err := client.Do(req)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to make post request"))
}
body, err := io.ReadAll(response.Body)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read validation response from %v", C.GatewayUrl)))
}
defer response.Body.Close()
switch response.StatusCode {
case 200:
a := ValidateLicenseResponse{}
err = json.Unmarshal(body, &a)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
}
license, err := model.NewLicenseV3(a.Data)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to generate new license v3"))
}
return license, nil
case 400:
return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
case 401:
return nil, model.Unauthorized(errors.Wrap(fmt.Errorf(string(body)),
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
default:
return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
}
}
func NewPostRequestWithCtx(ctx context.Context, url string, contentType string, body io.Reader) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, POST, url, body)
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", contentType)
return req, err
}
// SendUsage reports the usage of signoz to license server
func SendUsage(ctx context.Context, usage model.UsagePayload) *model.ApiError {
reqString, _ := json.Marshal(usage)
req, err := NewPostRequestWithCtx(ctx, C.Prefix+"/usage", APPLICATION_JSON, bytes.NewBuffer(reqString))
if err != nil {
return model.BadRequest(errors.Wrap(err, "unable to create http request"))
}
res, err := http.DefaultClient.Do(req)
if err != nil {
return model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
}
body, err := io.ReadAll(res.Body)
if err != nil {
return model.BadRequest(errors.Wrap(err, "failed to read usage response from license.signoz.io"))
}
defer res.Body.Close()
switch res.StatusCode {
case 200, 201:
return nil
case 400, 401:
return model.BadRequest(errors.Wrap(errors.New(string(body)),
"bad request error received from license.signoz.io"))
default:
return model.InternalError(errors.Wrap(errors.New(string(body)),
"internal error received from license.signoz.io"))
}
}
func CheckoutSession(ctx context.Context, checkoutRequest *model.CheckoutRequest, licenseKey string) (string, *model.ApiError) {
hClient := &http.Client{}
reqString, err := json.Marshal(checkoutRequest)
if err != nil {
return "", model.BadRequest(err)
}
req, err := http.NewRequestWithContext(ctx, "POST", C.GatewayUrl+"/v2/subscriptions/me/sessions/checkout", bytes.NewBuffer(reqString))
if err != nil {
return "", model.BadRequest(err)
}
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
response, err := hClient.Do(req)
if err != nil {
return "", model.BadRequest(err)
}
body, err := io.ReadAll(response.Body)
if err != nil {
return "", model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read checkout response from %v", C.GatewayUrl)))
}
defer response.Body.Close()
switch response.StatusCode {
case 201:
a := CheckoutResponse{}
err = json.Unmarshal(body, &a)
if err != nil {
return "", model.BadRequest(errors.Wrap(err, "failed to unmarshal zeus checkout response"))
}
return a.Data.RedirectURL, nil
case 400:
return "", model.BadRequest(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
case 401:
return "", model.Unauthorized(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
default:
return "", model.InternalError(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
}
}
func PortalSession(ctx context.Context, checkoutRequest *model.PortalRequest, licenseKey string) (string, *model.ApiError) {
hClient := &http.Client{}
reqString, err := json.Marshal(checkoutRequest)
if err != nil {
return "", model.BadRequest(err)
}
req, err := http.NewRequestWithContext(ctx, "POST", C.GatewayUrl+"/v2/subscriptions/me/sessions/portal", bytes.NewBuffer(reqString))
if err != nil {
return "", model.BadRequest(err)
}
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
response, err := hClient.Do(req)
if err != nil {
return "", model.BadRequest(err)
}
body, err := io.ReadAll(response.Body)
if err != nil {
return "", model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read portal response from %v", C.GatewayUrl)))
}
defer response.Body.Close()
switch response.StatusCode {
case 201:
a := CheckoutResponse{}
err = json.Unmarshal(body, &a)
if err != nil {
return "", model.BadRequest(errors.Wrap(err, "failed to unmarshal zeus portal response"))
}
return a.Data.RedirectURL, nil
case 400:
return "", model.BadRequest(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
case 401:
return "", model.Unauthorized(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
default:
return "", model.InternalError(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
}
}

View File

@@ -0,0 +1,248 @@
package license
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"time"
"github.com/jmoiron/sqlx"
"github.com/mattn/go-sqlite3"
"github.com/SigNoz/signoz/ee/query-service/model"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"go.uber.org/zap"
)
// Repo is license repo. stores license keys in a secured DB
type Repo struct {
db *sqlx.DB
store sqlstore.SQLStore
}
// NewLicenseRepo initiates a new license repo
func NewLicenseRepo(db *sqlx.DB, store sqlstore.SQLStore) Repo {
return Repo{
db: db,
store: store,
}
}
func (r *Repo) GetLicensesV3(ctx context.Context) ([]*model.LicenseV3, error) {
licensesData := []model.LicenseDB{}
licenseV3Data := []*model.LicenseV3{}
query := "SELECT id,key,data FROM licenses_v3"
err := r.db.Select(&licensesData, query)
if err != nil {
return nil, fmt.Errorf("failed to get licenses from db: %v", err)
}
for _, l := range licensesData {
var licenseData map[string]interface{}
err := json.Unmarshal([]byte(l.Data), &licenseData)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
}
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
if err != nil {
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
}
licenseV3Data = append(licenseV3Data, license)
}
return licenseV3Data, nil
}
// GetActiveLicense fetches the latest active license from DB.
// If the license is not present, expect a nil license and a nil error in the output.
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) {
activeLicenseV3, err := r.GetActiveLicenseV3(ctx)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
}
if activeLicenseV3 == nil {
return nil, nil
}
activeLicenseV2 := model.ConvertLicenseV3ToLicenseV2(activeLicenseV3)
return activeLicenseV2, nil
}
func (r *Repo) GetActiveLicenseV3(ctx context.Context) (*model.LicenseV3, error) {
var err error
licenses := []model.LicenseDB{}
query := "SELECT id,key,data FROM licenses_v3"
err = r.db.Select(&licenses, query)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
}
var active *model.LicenseV3
for _, l := range licenses {
var licenseData map[string]interface{}
err := json.Unmarshal([]byte(l.Data), &licenseData)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
}
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
if err != nil {
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
}
if active == nil &&
(license.ValidFrom != 0) &&
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
active = license
}
if active != nil &&
license.ValidFrom > active.ValidFrom &&
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
active = license
}
}
return active, nil
}
// InsertLicenseV3 inserts a new license v3 in db
func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) *model.ApiError {
query := `INSERT INTO licenses_v3 (id, key, data) VALUES ($1, $2, $3)`
// licsense is the entity of zeus so putting the entire license here without defining schema
licenseData, err := json.Marshal(l.Data)
if err != nil {
return &model.ApiError{Typ: basemodel.ErrorBadData, Err: err}
}
_, err = r.db.ExecContext(ctx,
query,
l.ID,
l.Key,
string(licenseData),
)
if err != nil {
if sqliteErr, ok := err.(sqlite3.Error); ok {
if sqliteErr.ExtendedCode == sqlite3.ErrConstraintUnique {
zap.L().Error("error in inserting license data: ", zap.Error(sqliteErr))
return &model.ApiError{Typ: model.ErrorConflict, Err: sqliteErr}
}
}
zap.L().Error("error in inserting license data: ", zap.Error(err))
return &model.ApiError{Typ: basemodel.ErrorExec, Err: err}
}
return nil
}
// UpdateLicenseV3 updates a new license v3 in db
func (r *Repo) UpdateLicenseV3(ctx context.Context, l *model.LicenseV3) error {
// the key and id for the license can't change so only update the data here!
query := `UPDATE licenses_v3 SET data=$1 WHERE id=$2;`
license, err := json.Marshal(l.Data)
if err != nil {
return fmt.Errorf("insert license failed: license marshal error")
}
_, err = r.db.ExecContext(ctx,
query,
license,
l.ID,
)
if err != nil {
zap.L().Error("error in updating license data: ", zap.Error(err))
return fmt.Errorf("failed to update license in db: %v", err)
}
return nil
}
func (r *Repo) CreateFeature(req *types.FeatureStatus) *basemodel.ApiError {
_, err := r.store.BunDB().NewInsert().
Model(req).
Exec(context.Background())
if err != nil {
return &basemodel.ApiError{Typ: basemodel.ErrorInternal, Err: err}
}
return nil
}
func (r *Repo) GetFeature(featureName string) (types.FeatureStatus, error) {
var feature types.FeatureStatus
err := r.store.BunDB().NewSelect().
Model(&feature).
Where("name = ?", featureName).
Scan(context.Background())
if err != nil {
return feature, err
}
if feature.Name == "" {
return feature, basemodel.ErrFeatureUnavailable{Key: featureName}
}
return feature, nil
}
func (r *Repo) GetAllFeatures() ([]basemodel.Feature, error) {
var feature []basemodel.Feature
err := r.db.Select(&feature,
`SELECT * FROM feature_status;`)
if err != nil {
return feature, err
}
return feature, nil
}
func (r *Repo) UpdateFeature(req types.FeatureStatus) error {
_, err := r.store.BunDB().NewUpdate().
Model(&req).
Where("name = ?", req.Name).
Exec(context.Background())
if err != nil {
return err
}
return nil
}
func (r *Repo) InitFeatures(req []types.FeatureStatus) error {
// get a feature by name, if it doesn't exist, create it. If it does exist, update it.
for _, feature := range req {
currentFeature, err := r.GetFeature(feature.Name)
if err != nil && err == sql.ErrNoRows {
err := r.CreateFeature(&feature)
if err != nil {
return err
}
continue
} else if err != nil {
return err
}
feature.Usage = int(currentFeature.Usage)
if feature.Usage >= feature.UsageLimit && feature.UsageLimit != -1 {
feature.Active = false
}
err = r.UpdateFeature(feature)
if err != nil {
return err
}
}
return nil
}

View File

@@ -0,0 +1,331 @@
package license
import (
"context"
"sync/atomic"
"time"
"github.com/jmoiron/sqlx"
"github.com/pkg/errors"
"sync"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
validate "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/model"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
"go.uber.org/zap"
)
var LM *Manager
// validate and update license every 24 hours
var validationFrequency = 24 * 60 * time.Minute
type Manager struct {
repo *Repo
mutex sync.Mutex
validatorRunning bool
// end the license validation, this is important to gracefully
// stopping validation and protect in-consistent updates
done chan struct{}
// terminated waits for the validate go routine to end
terminated chan struct{}
// last time the license was validated
lastValidated int64
// keep track of validation failure attempts
failedAttempts uint64
// keep track of active license and features
activeLicenseV3 *model.LicenseV3
activeFeatures basemodel.FeatureSet
}
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, features ...basemodel.Feature) (*Manager, error) {
if LM != nil {
return LM, nil
}
repo := NewLicenseRepo(db, store)
m := &Manager{
repo: &repo,
}
if err := m.start(features...); err != nil {
return m, err
}
LM = m
return m, nil
}
// start loads active license in memory and initiates validator
func (lm *Manager) start(features ...basemodel.Feature) error {
return lm.LoadActiveLicenseV3(features...)
}
func (lm *Manager) Stop() {
close(lm.done)
<-lm.terminated
}
func (lm *Manager) SetActiveV3(l *model.LicenseV3, features ...basemodel.Feature) {
lm.mutex.Lock()
defer lm.mutex.Unlock()
if l == nil {
return
}
lm.activeLicenseV3 = l
lm.activeFeatures = append(l.Features, features...)
// set default features
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.L().Panic("Couldn't activate features", zap.Error(err))
}
if !lm.validatorRunning {
// we want to make sure only one validator runs,
// we already have lock() so good to go
lm.validatorRunning = true
go lm.ValidatorV3(context.Background())
}
}
func setDefaultFeatures(lm *Manager) {
lm.activeFeatures = append(lm.activeFeatures, baseconstants.DEFAULT_FEATURE_SET...)
}
func (lm *Manager) LoadActiveLicenseV3(features ...basemodel.Feature) error {
active, err := lm.repo.GetActiveLicenseV3(context.Background())
if err != nil {
return err
}
if active != nil {
lm.SetActiveV3(active, features...)
} else {
zap.L().Info("No active license found, defaulting to basic plan")
// if no active license is found, we default to basic(free) plan with all default features
lm.activeFeatures = model.BasicPlan
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.L().Error("Couldn't initialize features", zap.Error(err))
return err
}
}
return nil
}
func (lm *Manager) GetLicensesV3(ctx context.Context) (response []*model.LicenseV3, apiError *model.ApiError) {
licenses, err := lm.repo.GetLicensesV3(ctx)
if err != nil {
return nil, model.InternalError(err)
}
for _, l := range licenses {
if lm.activeLicenseV3 != nil && l.Key == lm.activeLicenseV3.Key {
l.IsCurrent = true
}
if l.ValidUntil == -1 {
// for subscriptions, there is no end-date as such
// but for showing user some validity we default one year timespan
l.ValidUntil = l.ValidFrom + 31556926
}
response = append(response, l)
}
return response, nil
}
// Validator validates license after an epoch of time
func (lm *Manager) ValidatorV3(ctx context.Context) {
zap.L().Info("ValidatorV3 started!")
defer close(lm.terminated)
tick := time.NewTicker(validationFrequency)
defer tick.Stop()
_ = lm.ValidateV3(ctx)
for {
select {
case <-lm.done:
return
default:
select {
case <-lm.done:
return
case <-tick.C:
_ = lm.ValidateV3(ctx)
}
}
}
}
func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError {
license, apiError := validate.ValidateLicenseV3(lm.activeLicenseV3.Key)
if apiError != nil {
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
return apiError
}
err := lm.repo.UpdateLicenseV3(ctx, license)
if err != nil {
return model.BadRequest(errors.Wrap(err, "failed to update the new license"))
}
lm.SetActiveV3(license)
return nil
}
func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
zap.L().Info("License validation started")
if lm.activeLicenseV3 == nil {
return nil
}
defer func() {
lm.mutex.Lock()
lm.lastValidated = time.Now().Unix()
if reterr != nil {
zap.L().Error("License validation completed with error", zap.Error(reterr))
atomic.AddUint64(&lm.failedAttempts, 1)
// default to basic plan if validation fails for three consecutive times
if atomic.LoadUint64(&lm.failedAttempts) > 3 {
zap.L().Error("License validation completed with error for three consecutive times, defaulting to basic plan", zap.String("license_id", lm.activeLicenseV3.ID), zap.Bool("license_validation", false))
lm.activeLicenseV3 = nil
lm.activeFeatures = model.BasicPlan
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.L().Error("Couldn't initialize features", zap.Error(err))
}
lm.done <- struct{}{}
lm.validatorRunning = false
}
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
map[string]interface{}{"err": reterr.Error()}, "", true, false)
} else {
// reset the failed attempts counter
atomic.StoreUint64(&lm.failedAttempts, 0)
zap.L().Info("License validation completed with no errors")
}
lm.mutex.Unlock()
}()
err := lm.RefreshLicense(ctx)
if err != nil {
return err
}
return nil
}
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (licenseResponse *model.LicenseV3, errResponse *model.ApiError) {
defer func() {
if errResponse != nil {
claims, ok := authtypes.ClaimsFromContext(ctx)
if ok {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
map[string]interface{}{"err": errResponse.Err.Error()}, claims.Email, true, false)
}
}
}()
license, apiError := validate.ValidateLicenseV3(licenseKey)
if apiError != nil {
zap.L().Error("failed to get the license", zap.Error(apiError.Err))
return nil, apiError
}
// insert the new license to the sqlite db
err := lm.repo.InsertLicenseV3(ctx, license)
if err != nil {
zap.L().Error("failed to activate license", zap.Error(err))
return nil, err
}
// license is valid, activate it
lm.SetActiveV3(license)
return license, nil
}
func (lm *Manager) GetActiveLicense() *model.LicenseV3 {
return lm.activeLicenseV3
}
// CheckFeature will be internally used by backend routines
// for feature gating
func (lm *Manager) CheckFeature(featureKey string) error {
feature, err := lm.repo.GetFeature(featureKey)
if err != nil {
return err
}
if feature.Active {
return nil
}
return basemodel.ErrFeatureUnavailable{Key: featureKey}
}
// GetFeatureFlags returns current active features
func (lm *Manager) GetFeatureFlags() (basemodel.FeatureSet, error) {
return lm.repo.GetAllFeatures()
}
func (lm *Manager) InitFeatures(features basemodel.FeatureSet) error {
featureStatus := make([]types.FeatureStatus, len(features))
for i, f := range features {
featureStatus[i] = types.FeatureStatus{
Name: f.Name,
Active: f.Active,
Usage: int(f.Usage),
UsageLimit: int(f.UsageLimit),
Route: f.Route,
}
}
return lm.repo.InitFeatures(featureStatus)
}
func (lm *Manager) UpdateFeatureFlag(feature basemodel.Feature) error {
return lm.repo.UpdateFeature(types.FeatureStatus{
Name: feature.Name,
Active: feature.Active,
Usage: int(feature.Usage),
UsageLimit: int(feature.UsageLimit),
Route: feature.Route,
})
}
func (lm *Manager) GetFeatureFlag(key string) (basemodel.Feature, error) {
featureStatus, err := lm.repo.GetFeature(key)
if err != nil {
return basemodel.Feature{}, err
}
return basemodel.Feature{
Name: featureStatus.Name,
Active: featureStatus.Active,
Usage: int64(featureStatus.Usage),
UsageLimit: int64(featureStatus.UsageLimit),
Route: featureStatus.Route,
}, nil
}
// GetRepo return the license repo
func (lm *Manager) GetRepo() *Repo {
return lm.repo
}

View File

@@ -6,31 +6,22 @@ import (
"os" "os"
"time" "time"
"github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/app" "github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore" "github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/ee/zeus"
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/config" "github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider" "github.com/SigNoz/signoz/pkg/config/envprovider"
"github.com/SigNoz/signoz/pkg/config/fileprovider" "github.com/SigNoz/signoz/pkg/config/fileprovider"
"github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/query-service/auth"
pkglicensing "github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants" baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook" "github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version" "github.com/SigNoz/signoz/pkg/version"
pkgzeus "github.com/SigNoz/signoz/pkg/zeus"
"go.uber.org/zap" "go.uber.org/zap"
"go.uber.org/zap/zapcore" "go.uber.org/zap/zapcore"
) )
// Deprecated: Please use the logger from pkg/instrumentation.
func initZapLog() *zap.Logger { func initZapLog() *zap.Logger {
config := zap.NewProductionConfig() config := zap.NewProductionConfig()
config.EncoderConfig.TimeKey = "timestamp" config.EncoderConfig.TimeKey = "timestamp"
@@ -60,41 +51,29 @@ func main() {
var gatewayUrl string var gatewayUrl string
var useLicensesV3 bool var useLicensesV3 bool
// Deprecated
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs") flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
// Deprecated
flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces") flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces")
// Deprecated
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
// Deprecated
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
// Deprecated
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)") flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
// Deprecated
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)") flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
// Deprecated
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)") flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
// Deprecated
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)") flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
// Deprecated
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)") flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
// Deprecated
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)") flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)") flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)") flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)")
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')") flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)") flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
// Deprecated
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses") flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
flag.Parse() flag.Parse()
loggerMgr := initZapLog() loggerMgr := initZapLog()
zap.ReplaceGlobals(loggerMgr) zap.ReplaceGlobals(loggerMgr)
defer loggerMgr.Sync() // flushes buffer, if any defer loggerMgr.Sync() // flushes buffer, if any
ctx := context.Background()
config, err := signoz.NewConfig(ctx, config.ResolverConfig{ config, err := signoz.NewConfig(context.Background(), config.ResolverConfig{
Uris: []string{"env:"}, Uris: []string{"env:"},
ProviderFactories: []config.ProviderFactory{ ProviderFactories: []config.ProviderFactory{
envprovider.NewFactory(), envprovider.NewFactory(),
@@ -117,6 +96,18 @@ func main() {
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err)) zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
} }
signoz, err := signoz.New(
context.Background(),
config,
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
)
if err != nil {
zap.L().Fatal("Failed to create signoz", zap.Error(err))
}
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET") jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
if len(jwtSecret) == 0 { if len(jwtSecret) == 0 {
@@ -127,36 +118,23 @@ func main() {
jwt := authtypes.NewJWT(jwtSecret, 30*time.Minute, 30*24*time.Hour) jwt := authtypes.NewJWT(jwtSecret, 30*time.Minute, 30*24*time.Hour)
signoz, err := signoz.New(
context.Background(),
config,
jwt,
zeus.Config(),
httpzeus.NewProviderFactory(),
licensing.Config(24*time.Hour, 3),
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter)
},
signoz.NewEmailingProviderFactories(),
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
)
if err != nil {
zap.L().Fatal("Failed to create signoz", zap.Error(err))
}
serverOptions := &app.ServerOptions{ serverOptions := &app.ServerOptions{
Config: config, Config: config,
SigNoz: signoz, SigNoz: signoz,
HTTPHostPort: baseconst.HTTPHostPort, HTTPHostPort: baseconst.HTTPHostPort,
PromConfigPath: promConfigPath,
SkipTopLvlOpsPath: skipTopLvlOpsPath,
PreferSpanMetrics: preferSpanMetrics, PreferSpanMetrics: preferSpanMetrics,
PrivateHostPort: baseconst.PrivateHostPort, PrivateHostPort: baseconst.PrivateHostPort,
DisableRules: disableRules,
RuleRepoURL: ruleRepoURL,
CacheConfigPath: cacheConfigPath,
FluxInterval: fluxInterval, FluxInterval: fluxInterval,
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail, FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
Cluster: cluster, Cluster: cluster,
GatewayUrl: gatewayUrl, GatewayUrl: gatewayUrl,
UseLogsNewSchema: useLogsNewSchema,
UseTraceNewSchema: useTraceNewSchema,
Jwt: jwt, Jwt: jwt,
} }
@@ -165,22 +143,26 @@ func main() {
zap.L().Fatal("Failed to create server", zap.Error(err)) zap.L().Fatal("Failed to create server", zap.Error(err))
} }
if err := server.Start(ctx); err != nil { if err := server.Start(context.Background()); err != nil {
zap.L().Fatal("Could not start server", zap.Error(err)) zap.L().Fatal("Could not start server", zap.Error(err))
} }
signoz.Start(ctx) if err := auth.InitAuthCache(context.Background()); err != nil {
zap.L().Fatal("Failed to initialize auth cache", zap.Error(err))
}
if err := signoz.Wait(ctx); err != nil { signoz.Start(context.Background())
if err := signoz.Wait(context.Background()); err != nil {
zap.L().Fatal("Failed to start signoz", zap.Error(err)) zap.L().Fatal("Failed to start signoz", zap.Error(err))
} }
err = server.Stop(ctx) err = server.Stop()
if err != nil { if err != nil {
zap.L().Fatal("Failed to stop server", zap.Error(err)) zap.L().Fatal("Failed to stop server", zap.Error(err))
} }
err = signoz.Stop(ctx) err = signoz.Stop(context.Background())
if err != nil { if err != nil {
zap.L().Fatal("Failed to stop signoz", zap.Error(err)) zap.L().Fatal("Failed to stop signoz", zap.Error(err))
} }

View File

@@ -0,0 +1,12 @@
package model
import (
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
)
// GettableInvitation overrides base object and adds precheck into
// response
type GettableInvitation struct {
*basemodel.InvitationResponseObject
Precheck *basemodel.PrecheckResponse `json:"precheck"`
}

View File

@@ -0,0 +1,244 @@
package model
import (
"encoding/json"
"fmt"
"reflect"
"time"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/pkg/errors"
)
type License struct {
Key string `json:"key" db:"key"`
ActivationId string `json:"activationId" db:"activationId"`
CreatedAt time.Time `db:"created_at"`
// PlanDetails contains the encrypted plan info
PlanDetails string `json:"planDetails" db:"planDetails"`
// stores parsed license details
LicensePlan
FeatureSet basemodel.FeatureSet
// populated in case license has any errors
ValidationMessage string `db:"validationMessage"`
// used only for sending details to front-end
IsCurrent bool `json:"isCurrent"`
}
func (l *License) MarshalJSON() ([]byte, error) {
return json.Marshal(&struct {
Key string `json:"key" db:"key"`
ActivationId string `json:"activationId" db:"activationId"`
ValidationMessage string `db:"validationMessage"`
IsCurrent bool `json:"isCurrent"`
PlanKey string `json:"planKey"`
ValidFrom time.Time `json:"ValidFrom"`
ValidUntil time.Time `json:"ValidUntil"`
Status string `json:"status"`
}{
Key: l.Key,
ActivationId: l.ActivationId,
IsCurrent: l.IsCurrent,
PlanKey: l.PlanKey,
ValidFrom: time.Unix(l.ValidFrom, 0),
ValidUntil: time.Unix(l.ValidUntil, 0),
Status: l.Status,
ValidationMessage: l.ValidationMessage,
})
}
type LicensePlan struct {
PlanKey string `json:"planKey"`
ValidFrom int64 `json:"validFrom"`
ValidUntil int64 `json:"validUntil"`
Status string `json:"status"`
}
type Licenses struct {
TrialStart int64 `json:"trialStart"`
TrialEnd int64 `json:"trialEnd"`
OnTrial bool `json:"onTrial"`
WorkSpaceBlock bool `json:"workSpaceBlock"`
TrialConvertedToSubscription bool `json:"trialConvertedToSubscription"`
GracePeriodEnd int64 `json:"gracePeriodEnd"`
Licenses []License `json:"licenses"`
}
type SubscriptionServerResp struct {
Status string `json:"status"`
Data Licenses `json:"data"`
}
type Plan struct {
Name string `json:"name"`
}
type LicenseDB struct {
ID string `json:"id"`
Key string `json:"key"`
Data string `json:"data"`
}
type LicenseV3 struct {
ID string
Key string
Data map[string]interface{}
PlanName string
Features basemodel.FeatureSet
Status string
IsCurrent bool
ValidFrom int64
ValidUntil int64
}
func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) {
var zeroValue T
if val, ok := data[key]; ok {
if value, ok := val.(T); ok {
return value, nil
}
return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue))
}
return zeroValue, fmt.Errorf("%s key is missing", key)
}
func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
var features basemodel.FeatureSet
// extract id from data
licenseID, err := extractKeyFromMapStringInterface[string](data, "id")
if err != nil {
return nil, err
}
delete(data, "id")
// extract key from data
licenseKey, err := extractKeyFromMapStringInterface[string](data, "key")
if err != nil {
return nil, err
}
delete(data, "key")
// extract status from data
status, err := extractKeyFromMapStringInterface[string](data, "status")
if err != nil {
return nil, err
}
planMap, err := extractKeyFromMapStringInterface[map[string]any](data, "plan")
if err != nil {
return nil, err
}
planName, err := extractKeyFromMapStringInterface[string](planMap, "name")
if err != nil {
return nil, err
}
// if license status is invalid then default it to basic
if status == LicenseStatusInvalid {
planName = PlanNameBasic
}
featuresFromZeus := basemodel.FeatureSet{}
if _features, ok := data["features"]; ok {
featuresData, err := json.Marshal(_features)
if err != nil {
return nil, errors.Wrap(err, "failed to marshal features data")
}
if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil {
return nil, errors.Wrap(err, "failed to unmarshal features data")
}
}
switch planName {
case PlanNameEnterprise:
features = append(features, EnterprisePlan...)
case PlanNameBasic:
features = append(features, BasicPlan...)
default:
features = append(features, BasicPlan...)
}
if len(featuresFromZeus) > 0 {
for _, feature := range featuresFromZeus {
exists := false
for i, existingFeature := range features {
if existingFeature.Name == feature.Name {
features[i] = feature // Replace existing feature
exists = true
break
}
}
if !exists {
features = append(features, feature) // Append if it doesn't exist
}
}
}
data["features"] = features
_validFrom, err := extractKeyFromMapStringInterface[float64](data, "valid_from")
if err != nil {
_validFrom = 0
}
validFrom := int64(_validFrom)
_validUntil, err := extractKeyFromMapStringInterface[float64](data, "valid_until")
if err != nil {
_validUntil = 0
}
validUntil := int64(_validUntil)
return &LicenseV3{
ID: licenseID,
Key: licenseKey,
Data: data,
PlanName: planName,
Features: features,
ValidFrom: validFrom,
ValidUntil: validUntil,
Status: status,
}, nil
}
func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}) (*LicenseV3, error) {
licenseDataWithIdAndKey := data
licenseDataWithIdAndKey["id"] = id
licenseDataWithIdAndKey["key"] = key
return NewLicenseV3(licenseDataWithIdAndKey)
}
func ConvertLicenseV3ToLicenseV2(l *LicenseV3) *License {
planKeyFromPlanName, ok := MapOldPlanKeyToNewPlanName[l.PlanName]
if !ok {
planKeyFromPlanName = Basic
}
return &License{
Key: l.Key,
ActivationId: "",
PlanDetails: "",
FeatureSet: l.Features,
ValidationMessage: "",
IsCurrent: l.IsCurrent,
LicensePlan: LicensePlan{
PlanKey: planKeyFromPlanName,
ValidFrom: l.ValidFrom,
ValidUntil: l.ValidUntil,
Status: l.Status},
}
}
type CheckoutRequest struct {
SuccessURL string `json:"url"`
}
type PortalRequest struct {
SuccessURL string `json:"url"`
}

View File

@@ -0,0 +1,170 @@
package model
import (
"encoding/json"
"testing"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewLicenseV3(t *testing.T) {
testCases := []struct {
name string
data []byte
pass bool
expected *LicenseV3
error error
}{
{
name: "Error for missing license id",
data: []byte(`{}`),
pass: false,
error: errors.New("id key is missing"),
},
{
name: "Error for license id not being a valid string",
data: []byte(`{"id": 10}`),
pass: false,
error: errors.New("id key is not a valid string"),
},
{
name: "Error for missing license key",
data: []byte(`{"id":"does-not-matter"}`),
pass: false,
error: errors.New("key key is missing"),
},
{
name: "Error for invalid string license key",
data: []byte(`{"id":"does-not-matter","key":10}`),
pass: false,
error: errors.New("key key is not a valid string"),
},
{
name: "Error for missing license status",
data: []byte(`{"id":"does-not-matter", "key": "does-not-matter","category":"FREE"}`),
pass: false,
error: errors.New("status key is missing"),
},
{
name: "Error for invalid string license status",
data: []byte(`{"id":"does-not-matter","key": "does-not-matter", "category":"FREE", "status":10}`),
pass: false,
error: errors.New("status key is not a valid string"),
},
{
name: "Error for missing license plan",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`),
pass: false,
error: errors.New("plan key is missing"),
},
{
name: "Error for invalid json license plan",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`),
pass: false,
error: errors.New("plan key is not a valid map[string]interface {}"),
},
{
name: "Error for invalid license plan",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`),
pass: false,
error: errors.New("name key is missing"),
},
{
name: "Parse the entire license properly",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "ENTERPRISE",
},
"category": "FREE",
"status": "ACTIVE",
"valid_from": float64(1730899309),
"valid_until": float64(-1),
},
PlanName: PlanNameEnterprise,
ValidFrom: 1730899309,
ValidUntil: -1,
Status: "ACTIVE",
IsCurrent: false,
Features: model.FeatureSet{},
},
},
{
name: "Fallback to basic plan if license status is invalid",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "ENTERPRISE",
},
"category": "FREE",
"status": "INVALID",
"valid_from": float64(1730899309),
"valid_until": float64(-1),
},
PlanName: PlanNameBasic,
ValidFrom: 1730899309,
ValidUntil: -1,
Status: "INVALID",
IsCurrent: false,
Features: model.FeatureSet{},
},
},
{
name: "fallback states for validFrom and validUntil",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "ENTERPRISE",
},
"valid_from": 1234.456,
"valid_until": 5678.567,
"category": "FREE",
"status": "ACTIVE",
},
PlanName: PlanNameEnterprise,
ValidFrom: 1234,
ValidUntil: 5678,
Status: "ACTIVE",
IsCurrent: false,
Features: model.FeatureSet{},
},
},
}
for _, tc := range testCases {
var licensePayload map[string]interface{}
err := json.Unmarshal(tc.data, &licensePayload)
require.NoError(t, err)
license, err := NewLicenseV3(licensePayload)
if license != nil {
license.Features = make(model.FeatureSet, 0)
delete(license.Data, "features")
}
if tc.pass {
require.NoError(t, err)
require.NotNil(t, license)
assert.Equal(t, tc.expected, license)
} else {
require.Error(t, err)
assert.EqualError(t, err, tc.error.Error())
require.Nil(t, license)
}
}
}

View File

@@ -0,0 +1,7 @@
package model
type CreatePATRequestBody struct {
Name string `json:"name"`
Role string `json:"role"`
ExpiresInDays int64 `json:"expiresInDays"`
}

View File

@@ -0,0 +1,131 @@
package model
import (
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
)
const SSO = "SSO"
const Basic = "BASIC_PLAN"
const Enterprise = "ENTERPRISE_PLAN"
var (
PlanNameEnterprise = "ENTERPRISE"
PlanNameBasic = "BASIC"
)
var (
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
)
var (
LicenseStatusInvalid = "INVALID"
)
const Onboarding = "ONBOARDING"
const ChatSupport = "CHAT_SUPPORT"
const Gateway = "GATEWAY"
const PremiumSupport = "PREMIUM_SUPPORT"
var BasicPlan = basemodel.FeatureSet{
basemodel.Feature{
Name: SSO,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Gateway,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: PremiumSupport,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AnomalyDetection,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.TraceFunnels,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}
var EnterprisePlan = basemodel.FeatureSet{
basemodel.Feature{
Name: SSO,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Onboarding,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: ChatSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Gateway,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: PremiumSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AnomalyDetection,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.TraceFunnels,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}

View File

@@ -12,11 +12,10 @@ import (
"go.uber.org/zap" "go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/anomaly" "github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/cache" "github.com/SigNoz/signoz/pkg/query-service/cache"
"github.com/SigNoz/signoz/pkg/query-service/common" "github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes" ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2" querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder" "github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
@@ -54,7 +53,6 @@ type AnomalyRule struct {
func NewAnomalyRule( func NewAnomalyRule(
id string, id string,
orgID valuer.UUID,
p *ruletypes.PostableRule, p *ruletypes.PostableRule,
reader interfaces.Reader, reader interfaces.Reader,
cache cache.Cache, cache cache.Cache,
@@ -68,7 +66,7 @@ func NewAnomalyRule(
p.RuleCondition.Target = &target p.RuleCondition.Target = &target
} }
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...) baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -160,18 +158,18 @@ func (r *AnomalyRule) GetSelectedQuery() string {
return r.Condition().GetSelectedQueryName() return r.Condition().GetSelectedQueryName()
} }
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) { func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRange(ts) params, err := r.prepareQueryRange(ts)
if err != nil { if err != nil {
return nil, err return nil, err
} }
err = r.PopulateTemporality(ctx, orgID, params) err = r.PopulateTemporality(ctx, params)
if err != nil { if err != nil {
return nil, fmt.Errorf("internal error while setting temporality") return nil, fmt.Errorf("internal error while setting temporality")
} }
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{ anomalies, err := r.provider.GetAnomalies(ctx, &anomaly.GetAnomaliesRequest{
Params: params, Params: params,
Seasonality: r.seasonality, Seasonality: r.seasonality,
}) })
@@ -206,7 +204,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
prevState := r.State() prevState := r.State()
valueFormatter := formatter.FromUnit(r.Unit()) valueFormatter := formatter.FromUnit(r.Unit())
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts) res, err := r.buildAndRunQuery(ctx, ts)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@@ -9,7 +9,6 @@ import (
baserules "github.com/SigNoz/signoz/pkg/query-service/rules" baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels" "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes" ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid" "github.com/google/uuid"
"go.uber.org/zap" "go.uber.org/zap"
) )
@@ -24,9 +23,10 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
// create a threshold rule // create a threshold rule
tr, err := baserules.NewThresholdRule( tr, err := baserules.NewThresholdRule(
ruleId, ruleId,
opts.OrgID,
opts.Rule, opts.Rule,
opts.Reader, opts.Reader,
opts.UseLogsNewSchema,
opts.UseTraceNewSchema,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay), baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
baserules.WithSQLStore(opts.SQLStore), baserules.WithSQLStore(opts.SQLStore),
) )
@@ -45,7 +45,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
// create promql rule // create promql rule
pr, err := baserules.NewPromRule( pr, err := baserules.NewPromRule(
ruleId, ruleId,
opts.OrgID,
opts.Rule, opts.Rule,
opts.Logger, opts.Logger,
opts.Reader, opts.Reader,
@@ -66,7 +65,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
// create anomaly rule // create anomaly rule
ar, err := NewAnomalyRule( ar, err := NewAnomalyRule(
ruleId, ruleId,
opts.OrgID,
opts.Rule, opts.Rule,
opts.Reader, opts.Reader,
opts.Cache, opts.Cache,
@@ -123,16 +121,17 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
// create a threshold rule // create a threshold rule
rule, err = baserules.NewThresholdRule( rule, err = baserules.NewThresholdRule(
alertname, alertname,
opts.OrgID,
parsedRule, parsedRule,
opts.Reader, opts.Reader,
opts.UseLogsNewSchema,
opts.UseTraceNewSchema,
baserules.WithSendAlways(), baserules.WithSendAlways(),
baserules.WithSendUnmatched(), baserules.WithSendUnmatched(),
baserules.WithSQLStore(opts.SQLStore), baserules.WithSQLStore(opts.SQLStore),
) )
if err != nil { if err != nil {
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", alertname), zap.Error(err)) zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err))
return 0, basemodel.BadRequest(err) return 0, basemodel.BadRequest(err)
} }
@@ -141,7 +140,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
// create promql rule // create promql rule
rule, err = baserules.NewPromRule( rule, err = baserules.NewPromRule(
alertname, alertname,
opts.OrgID,
parsedRule, parsedRule,
opts.Logger, opts.Logger,
opts.Reader, opts.Reader,
@@ -152,14 +150,13 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
) )
if err != nil { if err != nil {
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", alertname), zap.Error(err)) zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
return 0, basemodel.BadRequest(err) return 0, basemodel.BadRequest(err)
} }
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly { } else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
// create anomaly rule // create anomaly rule
rule, err = NewAnomalyRule( rule, err = NewAnomalyRule(
alertname, alertname,
opts.OrgID,
parsedRule, parsedRule,
opts.Reader, opts.Reader,
opts.Cache, opts.Cache,
@@ -168,7 +165,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
baserules.WithSQLStore(opts.SQLStore), baserules.WithSQLStore(opts.SQLStore),
) )
if err != nil { if err != nil {
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err)) zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", rule.Name()), zap.Error(err))
return 0, basemodel.BadRequest(err) return 0, basemodel.BadRequest(err)
} }
} else { } else {
@@ -194,7 +191,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
// newTask returns an appropriate group for // newTask returns an appropriate group for
// rule type // rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task { func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID string) baserules.Task {
if taskType == baserules.TaskTypeCh { if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID) return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
} }

View File

@@ -1,34 +1,34 @@
package ssotypes package sso
import ( import (
"context"
"errors"
"fmt" "fmt"
"errors"
"context"
"net/http" "net/http"
"github.com/coreos/go-oidc/v3/oidc" "github.com/coreos/go-oidc/v3/oidc"
"golang.org/x/oauth2" "golang.org/x/oauth2"
) )
type GoogleOAuthProvider struct { type GoogleOAuthProvider struct {
RedirectURI string RedirectURI string
OAuth2Config *oauth2.Config OAuth2Config *oauth2.Config
Verifier *oidc.IDTokenVerifier Verifier *oidc.IDTokenVerifier
Cancel context.CancelFunc Cancel context.CancelFunc
HostedDomain string HostedDomain string
} }
func (g *GoogleOAuthProvider) BuildAuthURL(state string) (string, error) { func (g *GoogleOAuthProvider) BuildAuthURL(state string) (string, error) {
var opts []oauth2.AuthCodeOption var opts []oauth2.AuthCodeOption
// set hosted domain. google supports multiple hosted domains but in our case // set hosted domain. google supports multiple hosted domains but in our case
// we have one config per host domain. // we have one config per host domain.
opts = append(opts, oauth2.SetAuthURLParam("hd", g.HostedDomain)) opts = append(opts, oauth2.SetAuthURLParam("hd", g.HostedDomain))
return g.OAuth2Config.AuthCodeURL(state, opts...), nil return g.OAuth2Config.AuthCodeURL(state, opts...), nil
} }
type oauth2Error struct { type oauth2Error struct{
error string error string
errorDescription string errorDescription string
} }
@@ -54,6 +54,7 @@ func (g *GoogleOAuthProvider) HandleCallback(r *http.Request) (identity *SSOIden
return g.createIdentity(r.Context(), token) return g.createIdentity(r.Context(), token)
} }
func (g *GoogleOAuthProvider) createIdentity(ctx context.Context, token *oauth2.Token) (identity *SSOIdentity, err error) { func (g *GoogleOAuthProvider) createIdentity(ctx context.Context, token *oauth2.Token) (identity *SSOIdentity, err error) {
rawIDToken, ok := token.Extra("id_token").(string) rawIDToken, ok := token.Extra("id_token").(string)
if !ok { if !ok {
@@ -75,7 +76,7 @@ func (g *GoogleOAuthProvider) createIdentity(ctx context.Context, token *oauth2.
} }
if claims.HostedDomain != g.HostedDomain { if claims.HostedDomain != g.HostedDomain {
return identity, fmt.Errorf("oidc: unexpected hd claim %v", claims.HostedDomain) return identity, fmt.Errorf("oidc: unexpected hd claim %v", claims.HostedDomain)
} }
identity = &SSOIdentity{ identity = &SSOIdentity{
@@ -88,3 +89,4 @@ func (g *GoogleOAuthProvider) createIdentity(ctx context.Context, token *oauth2.
return identity, nil return identity, nil
} }

View File

@@ -0,0 +1,31 @@
package sso
import (
"net/http"
)
// SSOIdentity contains details of user received from SSO provider
type SSOIdentity struct {
UserID string
Username string
PreferredUsername string
Email string
EmailVerified bool
ConnectorData []byte
}
// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth
// style redirect flow to determine user information.
type OAuthCallbackProvider interface {
// The initial URL user would be redirect to.
// OAuth2 implementations support various scopes but we only need profile and user as
// the roles are still being managed in SigNoz.
BuildAuthURL(state string) (string, error)
// Handle the callback to the server (after login at oauth provider site)
// and return a email identity.
// At the moment we dont support auto signup flow (based on domain), so
// the full identity (including name, group etc) is not required outside of the
// connector
HandleCallback(r *http.Request) (identity *SSOIdentity, err error)
}

View File

@@ -1,4 +1,4 @@
package ssotypes package saml
import ( import (
"crypto/x509" "crypto/x509"
@@ -7,10 +7,10 @@ import (
"fmt" "fmt"
"strings" "strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/constants"
saml2 "github.com/russellhaering/gosaml2" saml2 "github.com/russellhaering/gosaml2"
dsig "github.com/russellhaering/goxmldsig" dsig "github.com/russellhaering/goxmldsig"
"go.uber.org/zap"
) )
func LoadCertificateStore(certString string) (dsig.X509CertificateStore, error) { func LoadCertificateStore(certString string) (dsig.X509CertificateStore, error) {
@@ -20,12 +20,12 @@ func LoadCertificateStore(certString string) (dsig.X509CertificateStore, error)
certData, err := base64.StdEncoding.DecodeString(certString) certData, err := base64.StdEncoding.DecodeString(certString)
if err != nil { if err != nil {
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to read certificate: %v", err) return certStore, fmt.Errorf(fmt.Sprintf("failed to read certificate: %v", err))
} }
idpCert, err := x509.ParseCertificate(certData) idpCert, err := x509.ParseCertificate(certData)
if err != nil { if err != nil {
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to prepare saml request, invalid cert: %s", err.Error()) return certStore, fmt.Errorf(fmt.Sprintf("failed to prepare saml request, invalid cert: %s", err.Error()))
} }
certStore.Roots = append(certStore.Roots, idpCert) certStore.Roots = append(certStore.Roots, idpCert)
@@ -40,12 +40,12 @@ func LoadCertFromPem(certString string) (dsig.X509CertificateStore, error) {
block, _ := pem.Decode([]byte(certString)) block, _ := pem.Decode([]byte(certString))
if block == nil { if block == nil {
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no valid pem cert found") return certStore, fmt.Errorf("no valid pem cert found")
} }
idpCert, err := x509.ParseCertificate(block.Bytes) idpCert, err := x509.ParseCertificate(block.Bytes)
if err != nil { if err != nil {
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse pem cert: %s", err.Error()) return certStore, fmt.Errorf(fmt.Sprintf("failed to parse pem cert: %s", err.Error()))
} }
certStore.Roots = append(certStore.Roots, idpCert) certStore.Roots = append(certStore.Roots, idpCert)
@@ -102,6 +102,6 @@ func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (*
IDPCertificateStore: certStore, IDPCertificateStore: certStore,
SPKeyStore: randomKeyStore, SPKeyStore: randomKeyStore,
} }
zap.L().Debug("SAML request", zap.Any("sp", sp))
return sp, nil return sp, nil
} }

View File

@@ -4,6 +4,7 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"regexp"
"strings" "strings"
"sync/atomic" "sync/atomic"
"time" "time"
@@ -14,11 +15,11 @@ import (
"go.uber.org/zap" "go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/dao"
licenseserver "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/model" "github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption" "github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
"github.com/SigNoz/signoz/pkg/zeus"
) )
const ( const (
@@ -35,68 +36,74 @@ var (
type Manager struct { type Manager struct {
clickhouseConn clickhouse.Conn clickhouseConn clickhouse.Conn
licenseService licensing.Licensing licenseRepo *license.Repo
scheduler *gocron.Scheduler scheduler *gocron.Scheduler
zeus zeus.Zeus modelDao dao.ModelDao
orgGetter organization.Getter tenantID string
} }
func New(licenseService licensing.Licensing, clickhouseConn clickhouse.Conn, zeus zeus.Zeus, orgGetter organization.Getter) (*Manager, error) { func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, chUrl string) (*Manager, error) {
hostNameRegex := regexp.MustCompile(`tcp://(?P<hostname>.*):`)
hostNameRegexMatches := hostNameRegex.FindStringSubmatch(chUrl)
tenantID := ""
if len(hostNameRegexMatches) == 2 {
tenantID = hostNameRegexMatches[1]
tenantID = strings.TrimSuffix(tenantID, "-clickhouse")
}
m := &Manager{ m := &Manager{
// repository: repo,
clickhouseConn: clickhouseConn, clickhouseConn: clickhouseConn,
licenseService: licenseService, licenseRepo: licenseRepo,
scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC
zeus: zeus, modelDao: modelDao,
orgGetter: orgGetter, tenantID: tenantID,
} }
return m, nil return m, nil
} }
// start loads collects and exports any exported snapshot and starts the exporter // start loads collects and exports any exported snapshot and starts the exporter
func (lm *Manager) Start(ctx context.Context) error { func (lm *Manager) Start() error {
// compares the locker and stateUnlocked if both are same lock is applied else returns error // compares the locker and stateUnlocked if both are same lock is applied else returns error
if !atomic.CompareAndSwapUint32(&locker, stateUnlocked, stateLocked) { if !atomic.CompareAndSwapUint32(&locker, stateUnlocked, stateLocked) {
return fmt.Errorf("usage exporter is locked") return fmt.Errorf("usage exporter is locked")
} }
// upload usage once when starting the service _, err := lm.scheduler.Do(func() { lm.UploadUsage() })
_, err := lm.scheduler.Do(func() { lm.UploadUsage(ctx) })
if err != nil { if err != nil {
return err return err
} }
lm.UploadUsage(ctx) // upload usage once when starting the service
lm.UploadUsage()
lm.scheduler.StartAsync() lm.scheduler.StartAsync()
return nil return nil
} }
func (lm *Manager) UploadUsage(ctx context.Context) { func (lm *Manager) UploadUsage() {
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx) ctx := context.Background()
// check if license is present or not
license, err := lm.licenseRepo.GetActiveLicense(ctx)
if err != nil { if err != nil {
zap.L().Error("failed to get organizations", zap.Error(err)) zap.L().Error("failed to get active license", zap.Error(err))
return
}
if license == nil {
// we will not start the usage reporting if license is not present.
zap.L().Info("no license present, skipping usage reporting")
return return
} }
for _, organization := range organizations {
// check if license is present or not
license, err := lm.licenseService.GetActive(ctx, organization.ID)
if err != nil {
zap.L().Error("failed to get active license", zap.Error(err))
return
}
if license == nil {
// we will not start the usage reporting if license is not present.
zap.L().Info("no license present, skipping usage reporting")
return
}
usages := []model.UsageDB{} usages := []model.UsageDB{}
// get usage from clickhouse // get usage from clickhouse
dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"} dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"}
query := ` query := `
SELECT tenant, collector_id, exporter_id, timestamp, data SELECT tenant, collector_id, exporter_id, timestamp, data
FROM %s.distributed_usage as u1 FROM %s.distributed_usage as u1
GLOBAL INNER JOIN GLOBAL INNER JOIN
@@ -111,76 +118,91 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
order by timestamp order by timestamp
` `
for _, db := range dbs { for _, db := range dbs {
dbusages := []model.UsageDB{} dbusages := []model.UsageDB{}
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour))) err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
if err != nil && !strings.Contains(err.Error(), "doesn't exist") { if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err)) zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
return return
}
for _, u := range dbusages {
u.Type = db
usages = append(usages, u)
}
} }
for _, u := range dbusages {
u.Type = db
usages = append(usages, u)
}
}
if len(usages) <= 0 { if len(usages) <= 0 {
zap.L().Info("no snapshots to upload, skipping.") zap.L().Info("no snapshots to upload, skipping.")
return
}
zap.L().Info("uploading usage data")
orgName := ""
orgNames, orgError := lm.modelDao.GetOrgs(ctx)
if orgError != nil {
zap.L().Error("failed to get org data: %v", zap.Error(orgError))
}
if len(orgNames) == 1 {
orgName = orgNames[0].Name
}
usagesPayload := []model.Usage{}
for _, usage := range usages {
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
if err != nil {
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
return return
} }
zap.L().Info("uploading usage data") usageData := model.Usage{}
err = json.Unmarshal(usageDataBytes, &usageData)
usagesPayload := []model.Usage{} if err != nil {
for _, usage := range usages { zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
if err != nil {
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
return
}
usageData := model.Usage{}
err = json.Unmarshal(usageDataBytes, &usageData)
if err != nil {
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
return
}
usageData.CollectorID = usage.CollectorID
usageData.ExporterID = usage.ExporterID
usageData.Type = usage.Type
usageData.Tenant = "default"
usageData.OrgName = "default"
usageData.TenantId = "default"
usagesPayload = append(usagesPayload, usageData)
}
key, _ := uuid.Parse(license.Key)
payload := model.UsagePayload{
LicenseKey: key,
Usage: usagesPayload,
}
body, errv2 := json.Marshal(payload)
if errv2 != nil {
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
return return
} }
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body) usageData.CollectorID = usage.CollectorID
if errv2 != nil { usageData.ExporterID = usage.ExporterID
zap.L().Error("failed to upload usage: %v", zap.Error(errv2)) usageData.Type = usage.Type
usageData.Tenant = "default"
usageData.OrgName = orgName
usageData.TenantId = lm.tenantID
usagesPayload = append(usagesPayload, usageData)
}
key, _ := uuid.Parse(license.Key)
payload := model.UsagePayload{
LicenseKey: key,
Usage: usagesPayload,
}
lm.UploadUsageWithExponentalBackOff(ctx, payload)
}
func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload model.UsagePayload) {
for i := 1; i <= MaxRetries; i++ {
apiErr := licenseserver.SendUsage(ctx, payload)
if apiErr != nil && i == MaxRetries {
zap.L().Error("retries stopped : %v", zap.Error(apiErr))
// not returning error here since it is captured in the failed count // not returning error here since it is captured in the failed count
return return
} else if apiErr != nil {
// sleeping for exponential backoff
sleepDuration := RetryInterval * time.Duration(i)
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err))
time.Sleep(sleepDuration)
} else {
break
} }
} }
} }
func (lm *Manager) Stop(ctx context.Context) { func (lm *Manager) Stop() {
lm.scheduler.Stop() lm.scheduler.Stop()
zap.L().Info("sending usage data before shutting down") zap.L().Info("sending usage data before shutting down")
// send usage before shutting down // send usage before shutting down
lm.UploadUsage(ctx) lm.UploadUsage()
atomic.StoreUint32(&locker, stateUnlocked) atomic.StoreUint32(&locker, stateUnlocked)
} }

View File

@@ -19,22 +19,19 @@ var (
var ( var (
Org = "org" Org = "org"
User = "user" User = "user"
UserNoCascade = "user_no_cascade"
FactorPassword = "factor_password"
CloudIntegration = "cloud_integration" CloudIntegration = "cloud_integration"
) )
var ( var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")` OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE` UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE` CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
) )
type dialect struct{} type dialect struct {
}
func (dialect *dialect) IntToTimestamp(ctx context.Context, bun bun.IDB, table string, column string) error { func (dialect *dialect) MigrateIntToTimestamp(ctx context.Context, bun bun.IDB, table string, column string) error {
columnType, err := dialect.GetColumnType(ctx, bun, table, column) columnType, err := dialect.GetColumnType(ctx, bun, table, column)
if err != nil { if err != nil {
return err return err
@@ -81,15 +78,7 @@ func (dialect *dialect) IntToTimestamp(ctx context.Context, bun bun.IDB, table s
return nil return nil
} }
func (dialect *dialect) IntToBoolean(ctx context.Context, bun bun.IDB, table string, column string) error { func (dialect *dialect) MigrateIntToBoolean(ctx context.Context, bun bun.IDB, table string, column string) error {
columnExists, err := dialect.ColumnExists(ctx, bun, table, column)
if err != nil {
return err
}
if !columnExists {
return nil
}
columnType, err := dialect.GetColumnType(ctx, bun, table, column) columnType, err := dialect.GetColumnType(ctx, bun, table, column)
if err != nil { if err != nil {
return err return err
@@ -162,26 +151,6 @@ func (dialect *dialect) ColumnExists(ctx context.Context, bun bun.IDB, table str
return count > 0, nil return count > 0, nil
} }
func (dialect *dialect) AddColumn(ctx context.Context, bun bun.IDB, table string, column string, columnExpr string) error {
exists, err := dialect.ColumnExists(ctx, bun, table, column)
if err != nil {
return err
}
if !exists {
_, err = bun.
NewAddColumn().
Table(table).
ColumnExpr(column + " " + columnExpr).
Exec(ctx)
if err != nil {
return err
}
}
return nil
}
func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table string, oldColumnName string, newColumnName string) (bool, error) { func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table string, oldColumnName string, newColumnName string) (bool, error) {
oldColumnExists, err := dialect.ColumnExists(ctx, bun, table, oldColumnName) oldColumnExists, err := dialect.ColumnExists(ctx, bun, table, oldColumnName)
if err != nil { if err != nil {
@@ -193,14 +162,10 @@ func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table str
return false, err return false, err
} }
if newColumnExists { if !oldColumnExists && newColumnExists {
return true, nil return true, nil
} }
if !oldColumnExists {
return false, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "old column: %s doesn't exist", oldColumnName)
}
_, err = bun. _, err = bun.
ExecContext(ctx, "ALTER TABLE "+table+" RENAME COLUMN "+oldColumnName+" TO "+newColumnName) ExecContext(ctx, "ALTER TABLE "+table+" RENAME COLUMN "+oldColumnName+" TO "+newColumnName)
if err != nil { if err != nil {
@@ -209,26 +174,6 @@ func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table str
return true, nil return true, nil
} }
func (dialect *dialect) DropColumn(ctx context.Context, bun bun.IDB, table string, column string) error {
exists, err := dialect.ColumnExists(ctx, bun, table, column)
if err != nil {
return err
}
if exists {
_, err = bun.
NewDropColumn().
Table(table).
Column(column).
Exec(ctx)
if err != nil {
return err
}
}
return nil
}
func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table interface{}) (bool, error) { func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table interface{}) (bool, error) {
count := 0 count := 0
@@ -268,10 +213,6 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, OrgReference) fkReferences = append(fkReferences, OrgReference)
} else if reference == User && !slices.Contains(fkReferences, UserReference) { } else if reference == User && !slices.Contains(fkReferences, UserReference) {
fkReferences = append(fkReferences, UserReference) fkReferences = append(fkReferences, UserReference)
} else if reference == UserNoCascade && !slices.Contains(fkReferences, UserReferenceNoCascade) {
fkReferences = append(fkReferences, UserReferenceNoCascade)
} else if reference == FactorPassword && !slices.Contains(fkReferences, FactorPasswordReference) {
fkReferences = append(fkReferences, FactorPasswordReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) { } else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference) fkReferences = append(fkReferences, CloudIntegrationReference)
} }
@@ -427,26 +368,3 @@ func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel
return nil return nil
} }
func (dialect *dialect) DropColumnWithForeignKeyConstraint(ctx context.Context, bunIDB bun.IDB, model interface{}, column string) error {
existingTable := bunIDB.Dialect().Tables().Get(reflect.TypeOf(model))
columnExists, err := dialect.ColumnExists(ctx, bunIDB, existingTable.Name, column)
if err != nil {
return err
}
if !columnExists {
return nil
}
_, err = bunIDB.
NewDropColumn().
Model(model).
Column(column).
Exec(ctx)
if err != nil {
return err
}
return nil
}

View File

@@ -4,10 +4,8 @@ import (
"context" "context"
"database/sql" "database/sql"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/jackc/pgx/v5/pgconn"
"github.com/jackc/pgx/v5/pgxpool" "github.com/jackc/pgx/v5/pgxpool"
"github.com/jackc/pgx/v5/stdlib" "github.com/jackc/pgx/v5/stdlib"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
@@ -89,24 +87,3 @@ func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
func (provider *provider) RunInTxCtx(ctx context.Context, opts *sql.TxOptions, cb func(ctx context.Context) error) error { func (provider *provider) RunInTxCtx(ctx context.Context, opts *sql.TxOptions, cb func(ctx context.Context) error) error {
return provider.bundb.RunInTxCtx(ctx, opts, cb) return provider.bundb.RunInTxCtx(ctx, opts, cb)
} }
func (provider *provider) WrapNotFoundErrf(err error, code errors.Code, format string, args ...any) error {
if err == sql.ErrNoRows {
return errors.Wrapf(err, errors.TypeNotFound, code, format, args...)
}
return err
}
func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, format string, args ...any) error {
var pgErr *pgconn.PgError
if errors.As(err, &pgErr) && pgErr.Code == "23505" {
return errors.Wrapf(err, errors.TypeAlreadyExists, code, format, args...)
}
return err
}
func (dialect *dialect) ToggleForeignKeyConstraint(ctx context.Context, bun *bun.DB, enable bool) error {
return nil
}

View File

@@ -6,17 +6,20 @@ import (
"net/url" "net/url"
"strings" "strings"
"github.com/SigNoz/signoz/pkg/types/ssotypes" "github.com/SigNoz/signoz/ee/query-service/sso"
"github.com/SigNoz/signoz/ee/query-service/sso/saml"
"github.com/SigNoz/signoz/pkg/types"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/pkg/errors" "github.com/pkg/errors"
saml2 "github.com/russellhaering/gosaml2" saml2 "github.com/russellhaering/gosaml2"
"github.com/uptrace/bun" "github.com/uptrace/bun"
"go.uber.org/zap"
) )
type StorableOrgDomain struct { type StorableOrgDomain struct {
bun.BaseModel `bun:"table:org_domains"` bun.BaseModel `bun:"table:org_domains"`
TimeAuditable types.TimeAuditable
ID uuid.UUID `json:"id" bun:"id,pk,type:text"` ID uuid.UUID `json:"id" bun:"id,pk,type:text"`
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"` OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
Name string `json:"name" bun:"name,type:varchar(50),notnull,unique"` Name string `json:"name" bun:"name,type:varchar(50),notnull,unique"`
@@ -37,10 +40,10 @@ type GettableOrgDomain struct {
SsoEnabled bool `json:"ssoEnabled"` SsoEnabled bool `json:"ssoEnabled"`
SsoType SSOType `json:"ssoType"` SsoType SSOType `json:"ssoType"`
SamlConfig *ssotypes.SamlConfig `json:"samlConfig"` SamlConfig *SamlConfig `json:"samlConfig"`
GoogleAuthConfig *ssotypes.GoogleOAuthConfig `json:"googleAuthConfig"` GoogleAuthConfig *GoogleOAuthConfig `json:"googleAuthConfig"`
Org *Organization Org *types.Organization
} }
func (od *GettableOrgDomain) String() string { func (od *GettableOrgDomain) String() string {
@@ -102,16 +105,14 @@ func (od *GettableOrgDomain) GetSAMLIdpURL() string {
func (od *GettableOrgDomain) GetSAMLCert() string { func (od *GettableOrgDomain) GetSAMLCert() string {
if od.SamlConfig != nil { if od.SamlConfig != nil {
// remove any whitespaces from the cert return od.SamlConfig.SamlCert
cert := strings.ReplaceAll(od.SamlConfig.SamlCert, " ", "")
return cert
} }
return "" return ""
} }
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in // PrepareGoogleOAuthProvider creates GoogleProvider that is used in
// requesting OAuth and also used in processing response from google // requesting OAuth and also used in processing response from google
func (od *GettableOrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (ssotypes.OAuthCallbackProvider, error) { func (od *GettableOrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) {
if od.GoogleAuthConfig == nil { if od.GoogleAuthConfig == nil {
return nil, fmt.Errorf("GOOGLE OAUTH is not setup correctly for this domain") return nil, fmt.Errorf("GOOGLE OAUTH is not setup correctly for this domain")
} }
@@ -142,7 +143,7 @@ func (od *GettableOrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLSe
// currently we default it to host from window.location (received from browser) // currently we default it to host from window.location (received from browser)
issuer := siteUrl.Host issuer := siteUrl.Host
return ssotypes.PrepareRequest(issuer, acs, sourceUrl, od.GetSAMLEntityID(), od.GetSAMLIdpURL(), od.GetSAMLCert()) return saml.PrepareRequest(issuer, acs, sourceUrl, od.GetSAMLEntityID(), od.GetSAMLIdpURL(), od.GetSAMLCert())
} }
func (od *GettableOrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) { func (od *GettableOrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
@@ -183,6 +184,7 @@ func (od *GettableOrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err e
return googleProvider.BuildAuthURL(relayState) return googleProvider.BuildAuthURL(relayState)
default: default:
zap.L().Error("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
return "", fmt.Errorf("unsupported SSO config for the domain") return "", fmt.Errorf("unsupported SSO config for the domain")
} }

View File

@@ -0,0 +1,76 @@
package types
import (
"crypto/rand"
"encoding/base64"
"time"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
type GettablePAT struct {
CreatedByUser PatUser `json:"createdByUser"`
UpdatedByUser PatUser `json:"updatedByUser"`
StorablePersonalAccessToken
}
type PatUser struct {
types.User
NotFound bool `json:"notFound"`
}
func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
return GettablePAT{
StorablePersonalAccessToken: NewStorablePersonalAccessToken(name, role, userID, expiresAt),
}
}
type StorablePersonalAccessToken struct {
bun.BaseModel `bun:"table:personal_access_token"`
types.Identifiable
types.TimeAuditable
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
Token string `json:"token" bun:"token,type:text,notnull,unique"`
Name string `json:"name" bun:"name,type:text,notnull"`
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
}
func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64) StorablePersonalAccessToken {
now := time.Now()
if expiresAt != 0 {
// convert expiresAt to unix timestamp from days
expiresAt = now.Unix() + (expiresAt * 24 * 60 * 60)
}
// Generate a 32-byte random token.
token := make([]byte, 32)
rand.Read(token)
// Encode the token in base64.
encodedToken := base64.StdEncoding.EncodeToString(token)
return StorablePersonalAccessToken{
Token: encodedToken,
Name: name,
Role: role,
UserID: userID,
ExpiresAt: expiresAt,
LastUsed: 0,
Revoked: false,
UpdatedByUserID: "",
TimeAuditable: types.TimeAuditable{
CreatedAt: now,
UpdatedAt: now,
},
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
}
}

View File

@@ -1,41 +1,15 @@
package ssotypes package types
import ( import (
"context" "context"
"fmt" "fmt"
"net/http"
"net/url" "net/url"
"github.com/SigNoz/signoz/ee/query-service/sso"
"github.com/coreos/go-oidc/v3/oidc" "github.com/coreos/go-oidc/v3/oidc"
"golang.org/x/oauth2" "golang.org/x/oauth2"
) )
// SSOIdentity contains details of user received from SSO provider
type SSOIdentity struct {
UserID string
Username string
PreferredUsername string
Email string
EmailVerified bool
ConnectorData []byte
}
// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth
// style redirect flow to determine user information.
type OAuthCallbackProvider interface {
// The initial URL user would be redirect to.
// OAuth2 implementations support various scopes but we only need profile and user as
// the roles are still being managed in SigNoz.
BuildAuthURL(state string) (string, error)
// Handle the callback to the server (after login at oauth provider site)
// and return a email identity.
// At the moment we dont support auto signup flow (based on domain), so
// the full identity (including name, group etc) is not required outside of the
// connector
HandleCallback(r *http.Request) (identity *SSOIdentity, err error)
}
type SamlConfig struct { type SamlConfig struct {
SamlEntity string `json:"samlEntity"` SamlEntity string `json:"samlEntity"`
SamlIdp string `json:"samlIdp"` SamlIdp string `json:"samlIdp"`
@@ -53,7 +27,7 @@ const (
googleIssuerURL = "https://accounts.google.com" googleIssuerURL = "https://accounts.google.com"
) )
func (g *GoogleOAuthConfig) GetProvider(domain string, siteUrl *url.URL) (OAuthCallbackProvider, error) { func (g *GoogleOAuthConfig) GetProvider(domain string, siteUrl *url.URL) (sso.OAuthCallbackProvider, error) {
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
@@ -73,7 +47,7 @@ func (g *GoogleOAuthConfig) GetProvider(domain string, siteUrl *url.URL) (OAuthC
siteUrl.Host, siteUrl.Host,
"api/v1/complete/google") "api/v1/complete/google")
return &GoogleOAuthProvider{ return &sso.GoogleOAuthProvider{
RedirectURI: g.RedirectURI, RedirectURI: g.RedirectURI,
OAuth2Config: &oauth2.Config{ OAuth2Config: &oauth2.Config{
ClientID: g.ClientID, ClientID: g.ClientID,

View File

@@ -1,42 +0,0 @@
package zeus
import (
"fmt"
neturl "net/url"
"sync"
"github.com/SigNoz/signoz/pkg/zeus"
)
// This will be set via ldflags at build time.
var (
url string = "<unset>"
deprecatedURL string = "<unset>"
)
var (
config zeus.Config
once sync.Once
)
// initializes the Zeus configuration
func Config() zeus.Config {
once.Do(func() {
parsedURL, err := neturl.Parse(url)
if err != nil {
panic(fmt.Errorf("invalid zeus URL: %w", err))
}
deprecatedParsedURL, err := neturl.Parse(deprecatedURL)
if err != nil {
panic(fmt.Errorf("invalid zeus deprecated URL: %w", err))
}
config = zeus.Config{URL: parsedURL, DeprecatedURL: deprecatedParsedURL}
if err := config.Validate(); err != nil {
panic(fmt.Errorf("invalid zeus config: %w", err))
}
})
return config
}

View File

@@ -1,189 +0,0 @@
package httpzeus
import (
"bytes"
"context"
"io"
"net/http"
"net/url"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/client"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
type Provider struct {
settings factory.ScopedProviderSettings
config zeus.Config
httpClient *client.Client
}
func NewProviderFactory() factory.ProviderFactory[zeus.Zeus, zeus.Config] {
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config zeus.Config) (zeus.Zeus, error) {
return New(ctx, providerSettings, config)
})
}
func New(ctx context.Context, providerSettings factory.ProviderSettings, config zeus.Config) (zeus.Zeus, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/zeus/httpzeus")
httpClient, err := client.New(
settings.Logger(),
providerSettings.TracerProvider,
providerSettings.MeterProvider,
client.WithRequestResponseLog(true),
client.WithRetryCount(3),
)
if err != nil {
return nil, err
}
return &Provider{
settings: settings,
config: config,
httpClient: httpClient,
}, nil
}
func (provider *Provider) GetLicense(ctx context.Context, key string) ([]byte, error) {
response, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/licenses/me"),
http.MethodGet,
key,
nil,
)
if err != nil {
return nil, err
}
return []byte(gjson.GetBytes(response, "data").String()), nil
}
func (provider *Provider) GetCheckoutURL(ctx context.Context, key string, body []byte) ([]byte, error) {
response, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/subscriptions/me/sessions/checkout"),
http.MethodPost,
key,
body,
)
if err != nil {
return nil, err
}
return []byte(gjson.GetBytes(response, "data").String()), nil
}
func (provider *Provider) GetPortalURL(ctx context.Context, key string, body []byte) ([]byte, error) {
response, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/subscriptions/me/sessions/portal"),
http.MethodPost,
key,
body,
)
if err != nil {
return nil, err
}
return []byte(gjson.GetBytes(response, "data").String()), nil
}
func (provider *Provider) GetDeployment(ctx context.Context, key string) ([]byte, error) {
response, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/deployments/me"),
http.MethodGet,
key,
nil,
)
if err != nil {
return nil, err
}
return []byte(gjson.GetBytes(response, "data").String()), nil
}
func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte) error {
_, err := provider.do(
ctx,
provider.config.DeprecatedURL.JoinPath("/api/v1/usage"),
http.MethodPost,
key,
data,
)
return err
}
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
_, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/profiles/me"),
http.MethodPut,
key,
body,
)
return err
}
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
_, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
http.MethodPut,
key,
body,
)
return err
}
func (provider *Provider) do(ctx context.Context, url *url.URL, method string, key string, requestBody []byte) ([]byte, error) {
request, err := http.NewRequestWithContext(ctx, method, url.String(), bytes.NewBuffer(requestBody))
if err != nil {
return nil, err
}
request.Header.Set("X-Signoz-Cloud-Api-Key", key)
request.Header.Set("Content-Type", "application/json")
response, err := provider.httpClient.Do(request)
if err != nil {
return nil, err
}
defer func() {
_ = response.Body.Close()
}()
body, err := io.ReadAll(response.Body)
if err != nil {
return nil, err
}
if response.StatusCode/100 == 2 {
return body, nil
}
return nil, provider.errFromStatusCode(response.StatusCode)
}
// This can be taken down to the client package
func (provider *Provider) errFromStatusCode(statusCode int) error {
switch statusCode {
case http.StatusBadRequest:
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
case http.StatusUnauthorized:
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
case http.StatusForbidden:
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
case http.StatusNotFound:
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
}
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
}

View File

@@ -110,8 +110,6 @@ module.exports = {
// eslint rules need to remove // eslint rules need to remove
'@typescript-eslint/no-shadow': 'off', '@typescript-eslint/no-shadow': 'off',
'import/no-cycle': 'off', 'import/no-cycle': 'off',
// https://typescript-eslint.io/rules/consistent-return/ check the warning for details
'consistent-return': 'off',
'prettier/prettier': [ 'prettier/prettier': [
'error', 'error',
{}, {},

1
frontend/.gitignore vendored
View File

@@ -1,4 +1,3 @@
# Sentry Config File # Sentry Config File
.env.sentry-build-plugin .env.sentry-build-plugin
.qodo

View File

@@ -1,10 +1,6 @@
# Ignore artifacts: # Ignore artifacts:
build build
coverage coverage
public/
# Ignore all MD files: # Ignore all MD files:
**/*.md **/*.md
# Ignore all JSON files:
**/*.json

View File

@@ -1,7 +1,7 @@
NODE_ENV="development" NODE_ENV="development"
BUNDLE_ANALYSER="true" BUNDLE_ANALYSER="true"
FRONTEND_API_ENDPOINT="http://localhost:8080/" FRONTEND_API_ENDPOINT="http://localhost:8080/"
PYLON_APP_ID="pylon-app-id" INTERCOM_APP_ID="intercom-app-id"
APPCUES_APP_ID="appcess-app-id"
PLAYWRIGHT_TEST_BASE_URL="http://localhost:8080"
CI="1" CI="1"

View File

@@ -15,7 +15,6 @@ const config: Config.InitialOptions = {
extensionsToTreatAsEsm: ['.ts'], extensionsToTreatAsEsm: ['.ts'],
'ts-jest': { 'ts-jest': {
useESM: true, useESM: true,
isolatedModules: true,
}, },
}, },
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'], testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],
@@ -31,6 +30,11 @@ const config: Config.InitialOptions = {
testPathIgnorePatterns: ['/node_modules/', '/public/'], testPathIgnorePatterns: ['/node_modules/', '/public/'],
moduleDirectories: ['node_modules', 'src'], moduleDirectories: ['node_modules', 'src'],
testEnvironment: 'jest-environment-jsdom', testEnvironment: 'jest-environment-jsdom',
testEnvironmentOptions: {
'jest-playwright': {
browsers: ['chromium', 'firefox', 'webkit'],
},
},
coverageThreshold: { coverageThreshold: {
global: { global: {
statements: 80, statements: 80,

View File

@@ -15,6 +15,10 @@
"jest:coverage": "jest --coverage", "jest:coverage": "jest --coverage",
"jest:watch": "jest --watch", "jest:watch": "jest --watch",
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)", "postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
"playwright": "NODE_ENV=testing playwright test --config=./playwright.config.ts",
"playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium",
"playwright:codegen:local": "playwright codegen http://localhost:3301",
"playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json",
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*", "husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
"commitlint": "commitlint --edit $1", "commitlint": "commitlint --edit $1",
"test": "jest", "test": "jest",
@@ -31,7 +35,6 @@
"@dnd-kit/core": "6.1.0", "@dnd-kit/core": "6.1.0",
"@dnd-kit/modifiers": "7.0.0", "@dnd-kit/modifiers": "7.0.0",
"@dnd-kit/sortable": "8.0.0", "@dnd-kit/sortable": "8.0.0",
"@dnd-kit/utilities": "3.2.2",
"@grafana/data": "^11.2.3", "@grafana/data": "^11.2.3",
"@mdx-js/loader": "2.3.0", "@mdx-js/loader": "2.3.0",
"@mdx-js/react": "2.3.0", "@mdx-js/react": "2.3.0",
@@ -52,7 +55,7 @@
"ansi-to-html": "0.7.2", "ansi-to-html": "0.7.2",
"antd": "5.11.0", "antd": "5.11.0",
"antd-table-saveas-excel": "2.2.1", "antd-table-saveas-excel": "2.2.1",
"axios": "1.8.2", "axios": "1.7.7",
"babel-eslint": "^10.1.0", "babel-eslint": "^10.1.0",
"babel-jest": "^29.6.4", "babel-jest": "^29.6.4",
"babel-loader": "9.1.3", "babel-loader": "9.1.3",
@@ -76,10 +79,8 @@
"eventemitter3": "5.0.1", "eventemitter3": "5.0.1",
"file-loader": "6.1.1", "file-loader": "6.1.1",
"fontfaceobserver": "2.3.0", "fontfaceobserver": "2.3.0",
"history": "4.10.1",
"html-webpack-plugin": "5.5.0", "html-webpack-plugin": "5.5.0",
"http-proxy-middleware": "3.0.3", "http-proxy-middleware": "3.0.3",
"http-status-codes": "2.3.0",
"i18next": "^21.6.12", "i18next": "^21.6.12",
"i18next-browser-languagedetector": "^6.1.3", "i18next-browser-languagedetector": "^6.1.3",
"i18next-http-backend": "^1.3.2", "i18next-http-backend": "^1.3.2",
@@ -88,7 +89,7 @@
"less": "^4.1.2", "less": "^4.1.2",
"less-loader": "^10.2.0", "less-loader": "^10.2.0",
"lodash-es": "^4.17.21", "lodash-es": "^4.17.21",
"lucide-react": "0.498.0", "lucide-react": "0.427.0",
"mini-css-extract-plugin": "2.4.5", "mini-css-extract-plugin": "2.4.5",
"motion": "12.4.13", "motion": "12.4.13",
"overlayscrollbars": "^2.8.1", "overlayscrollbars": "^2.8.1",
@@ -113,8 +114,7 @@
"react-markdown": "8.0.7", "react-markdown": "8.0.7",
"react-query": "3.39.3", "react-query": "3.39.3",
"react-redux": "^7.2.2", "react-redux": "^7.2.2",
"react-router-dom": "^5.2.0", "react-router": "7.5.1",
"react-router-dom-v5-compat": "6.27.0",
"react-syntax-highlighter": "15.5.0", "react-syntax-highlighter": "15.5.0",
"react-use": "^17.3.2", "react-use": "^17.3.2",
"react-virtuoso": "4.0.3", "react-virtuoso": "4.0.3",
@@ -130,11 +130,10 @@
"tsconfig-paths-webpack-plugin": "^3.5.1", "tsconfig-paths-webpack-plugin": "^3.5.1",
"typescript": "^4.0.5", "typescript": "^4.0.5",
"uplot": "1.6.31", "uplot": "1.6.31",
"userpilot": "1.3.9",
"uuid": "^8.3.2", "uuid": "^8.3.2",
"web-vitals": "^0.2.4", "web-vitals": "^0.2.4",
"webpack": "5.94.0", "webpack": "5.94.0",
"webpack-dev-server": "^5.2.1", "webpack-dev-server": "^4.15.2",
"webpack-retry-chunk-load-plugin": "3.1.1", "webpack-retry-chunk-load-plugin": "3.1.1",
"xstate": "^4.31.0" "xstate": "^4.31.0"
}, },
@@ -161,6 +160,7 @@
"@commitlint/config-conventional": "^16.2.4", "@commitlint/config-conventional": "^16.2.4",
"@faker-js/faker": "9.3.0", "@faker-js/faker": "9.3.0",
"@jest/globals": "^27.5.1", "@jest/globals": "^27.5.1",
"@playwright/test": "^1.22.0",
"@testing-library/jest-dom": "5.16.5", "@testing-library/jest-dom": "5.16.5",
"@testing-library/react": "13.4.0", "@testing-library/react": "13.4.0",
"@testing-library/user-event": "14.4.3", "@testing-library/user-event": "14.4.3",
@@ -184,7 +184,7 @@
"@types/react-lottie": "1.2.10", "@types/react-lottie": "1.2.10",
"@types/react-redux": "^7.1.11", "@types/react-redux": "^7.1.11",
"@types/react-resizable": "3.0.3", "@types/react-resizable": "3.0.3",
"@types/react-router-dom": "^5.1.6", "@types/react-router": "^5.1.20",
"@types/react-syntax-highlighter": "15.5.7", "@types/react-syntax-highlighter": "15.5.7",
"@types/redux-mock-store": "1.0.4", "@types/redux-mock-store": "1.0.4",
"@types/styled-components": "^5.1.4", "@types/styled-components": "^5.1.4",
@@ -196,7 +196,8 @@
"autoprefixer": "10.4.19", "autoprefixer": "10.4.19",
"babel-plugin-styled-components": "^1.12.0", "babel-plugin-styled-components": "^1.12.0",
"compression-webpack-plugin": "9.0.0", "compression-webpack-plugin": "9.0.0",
"copy-webpack-plugin": "^11.0.0", "copy-webpack-plugin": "^8.1.0",
"critters-webpack-plugin": "^3.0.1",
"eslint": "^7.32.0", "eslint": "^7.32.0",
"eslint-config-airbnb": "^19.0.4", "eslint-config-airbnb": "^19.0.4",
"eslint-config-airbnb-typescript": "^16.1.4", "eslint-config-airbnb-typescript": "^16.1.4",
@@ -234,7 +235,7 @@
"ts-node": "^10.2.1", "ts-node": "^10.2.1",
"typescript-plugin-css-modules": "5.0.1", "typescript-plugin-css-modules": "5.0.1",
"webpack-bundle-analyzer": "^4.5.0", "webpack-bundle-analyzer": "^4.5.0",
"webpack-cli": "^5.1.4" "webpack-cli": "^4.9.2"
}, },
"lint-staged": { "lint-staged": {
"*.(js|jsx|ts|tsx)": [ "*.(js|jsx|ts|tsx)": [
@@ -252,8 +253,6 @@
"body-parser": "1.20.3", "body-parser": "1.20.3",
"http-proxy-middleware": "3.0.3", "http-proxy-middleware": "3.0.3",
"cross-spawn": "7.0.5", "cross-spawn": "7.0.5",
"cookie": "^0.7.1", "cookie": "^0.7.1"
"serialize-javascript": "6.0.2",
"prismjs": "1.30.0"
} }
} }

View File

@@ -0,0 +1,23 @@
import { PlaywrightTestConfig } from '@playwright/test';
import dotenv from 'dotenv';
dotenv.config();
const config: PlaywrightTestConfig = {
forbidOnly: !!process.env.CI,
retries: process.env.CI ? 2 : 0,
preserveOutput: 'always',
name: 'Signoz',
testDir: './tests',
use: {
trace: 'retain-on-failure',
baseURL: process.env.PLAYWRIGHT_TEST_BASE_URL || 'http://localhost:3301',
},
updateSnapshots: 'all',
fullyParallel: !!process.env.CI,
quiet: false,
testMatch: ['**/*.spec.ts'],
reporter: process.env.CI ? 'github' : 'list',
};
export default config;

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 5.9 KiB

View File

@@ -1 +0,0 @@
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="m12.192 3.18-1.167 2.33-.583 1.165M7.31 12.74a.583.583 0 0 1-.835-.24L1.808 3.179"/><path d="M7 1.167c2.9 0 5.25.783 5.25 1.75 0 .966-2.35 1.75-5.25 1.75s-5.25-.784-5.25-1.75c0-.967 2.35-1.75 5.25-1.75ZM8.75 10.5h3.5M10.5 12.25v-3.5"/></g></svg>

Before

Width:  |  Height:  |  Size: 418 B

View File

@@ -1 +0,0 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#a)" stroke-linecap="round" stroke-linejoin="round"><path d="M8 14.666A6.667 6.667 0 1 0 8 1.333a6.667 6.667 0 0 0 0 13.333Z" fill="#C0C1C3" stroke="#C0C1C3" stroke-width="2"/><path d="M8 11.333v-4H6.333M8 4.667h.007" stroke="#121317" stroke-width="1.333"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>

Before

Width:  |  Height:  |  Size: 439 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="80" height="80"><defs><linearGradient id="a" x1="0%" x2="100%" y1="100%" y2="0%"><stop offset="0%" stop-color="#4D27A8"/><stop offset="100%" stop-color="#A166FF"/></linearGradient></defs><g fill="none" fill-rule="evenodd"><path fill="url(#a)" d="M0 0h80v80H0z"/><path fill="#FFF" d="M36.008 36.977v-8.99h1.996v3.995l2.694-3.996h2.21l-3.123 4.204 3.38 4.787h-2.368l-2.793-4.345v4.345zM66 49.117l-.669.579c-1.347 1.18-3.16 1.77-4.973 1.77-1.815 0-3.63-.59-4.98-1.773l-1.333-1.155c-1.956-1.714-5.38-1.714-7.332-.004l-1.341 1.162c-2.693 2.36-7.255 2.362-9.953-.003l-1.333-1.155c-1.956-1.714-5.38-1.714-7.332-.004l-1.341 1.162c-2.694 2.36-7.255 2.362-9.953-.003l-.665-.576 1.307-1.511.668.58c1.957 1.714 5.381 1.714 7.332.003l1.342-1.162c2.693-2.36 7.255-2.36 9.952.004l1.334 1.154c1.957 1.715 5.38 1.715 7.332.004l1.341-1.162c2.693-2.36 7.255-2.36 9.953.004l1.333 1.154c1.956 1.715 5.38 1.715 7.332.004l.673-.583zM63.898 61.14l1.307 1.51-.669.58c-1.346 1.18-3.159 1.769-4.972 1.769-1.815 0-3.63-.59-4.98-1.773L53.25 62.07c-1.956-1.712-5.38-1.712-7.332-.004l-1.341 1.163c-2.692 2.36-7.254 2.361-9.953-.004l-1.333-1.155c-1.955-1.712-5.38-1.712-7.332-.004l-1.342 1.163c-2.692 2.36-7.254 2.361-9.952-.004L14 62.65l1.306-1.51.669.58c1.957 1.712 5.381 1.713 7.332.002l1.341-1.161c2.695-2.36 7.255-2.361 9.953.002l1.333 1.156c1.957 1.713 5.381 1.714 7.332.003l1.342-1.161c2.694-2.36 7.254-2.361 9.952.002l1.334 1.156c1.957 1.713 5.38 1.714 7.332.003zm0-6.333 1.307 1.509-.669.58c-2.693 2.36-7.254 2.362-9.953-.004l-1.333-1.154c-1.956-1.715-5.38-1.714-7.332-.004l-1.341 1.162c-1.347 1.18-3.16 1.77-4.972 1.77-1.815 0-3.63-.59-4.981-1.774l-1.333-1.154c-1.955-1.715-5.38-1.714-7.332-.004l-1.342 1.162c-2.691 2.36-7.253 2.36-9.952-.004L14 56.316l1.306-1.51.669.58c1.957 1.714 5.381 1.713 7.332.003l1.341-1.162c2.695-2.36 7.257-2.361 9.953.003l1.333 1.155c1.957 1.715 5.381 1.714 7.332.004l1.342-1.162c2.694-2.36 7.256-2.361 9.952.003l1.334 1.155c1.957 1.715 5.38 1.714 7.332.004zM24.033 37.976a1.999 1.999 0 0 1 0 3.996 1.999 1.999 0 0 1 0-3.996m14.969-20.978a1.999 1.999 0 0 1 0 3.996 1.999 1.999 0 0 1 0-3.996m16.965 22.976a2 2 0 0 1-1.995 1.998 1.999 1.999 0 0 1 0-3.996c1.1 0 1.995.896 1.995 1.998M39.002 22.992c.665 0 1.283-.18 1.835-.469l10.168 14.804a3.95 3.95 0 0 0-.883 1.648h-22.24A3.95 3.95 0 0 0 27 37.327l10.167-14.804c.552.29 1.17.469 1.835.469M24.032 43.97c1.855 0 3.405-1.279 3.85-2.997h22.24c.445 1.718 1.995 2.997 3.85 2.997a4 4 0 0 0 3.991-3.996 4 4 0 0 0-3.991-3.996c-.46 0-.896.094-1.309.238L42.337 21.18a3.96 3.96 0 0 0 .657-2.185A4 4 0 0 0 39.002 15a4 4 0 0 0-3.992 3.996c0 .807.244 1.556.657 2.185L25.34 36.216a4 4 0 0 0-1.308-.238 4 4 0 0 0-3.992 3.996 4 4 0 0 0 3.992 3.996"/></g></svg>

Before

Width:  |  Height:  |  Size: 2.7 KiB

View File

@@ -1,18 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="80px" height="80px" viewBox="0 0 80 80" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<title>Icon-Architecture/64/Arch_ Amazon-API-Gateway_64</title>
<defs>
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
<stop stop-color="#B0084D" offset="0%"></stop>
<stop stop-color="#FF4F8B" offset="100%"></stop>
</linearGradient>
</defs>
<g id="Icon-Architecture/64/Arch_-Amazon-API-Gateway_64" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g id="Rectangle" fill="url(#linearGradient-1)">
<rect x="0" y="0" width="80" height="80"></rect>
</g>
<g id="Icon-Service/64/Amazon-API-Gateway_64" transform="translate(8.000000, 8.000000)" fill="#FFFFFF">
<path d="M26.065,47.6323642 L29,47.6323642 L29,45.6152989 L26.065,45.6152989 L26.065,47.6323642 Z M31,47.6323642 L34,47.6323642 L34,45.6152989 L31,45.6152989 L31,47.6323642 Z M19,6.62946112 L6,13.082053 L6,53.1934132 L19,57.589607 L19,6.62946112 Z M21,19.3934502 L21,45.6152989 L24,45.6152989 L24,47.6323642 L21,47.6323642 L21,58.9914674 C21,59.3162149 20.845,59.6207917 20.584,59.8103959 C20.412,59.9354539 20.207,60 20,60 C19.894,60 19.786,59.9828549 19.682,59.9475563 L4.682,54.8756456 C4.274,54.7374767 4,54.3522172 4,53.9195567 L4,12.4537371 C4,12.0704947 4.217,11.7185168 4.559,11.5490833 L19.559,4.10409539 C19.868,3.94978989 20.235,3.96794348 20.528,4.15250495 C20.821,4.33605789 21,4.65979687 21,5.00874917 L21,17.376385 L24,17.376385 L24,19.3934502 L21,19.3934502 Z M36,47.6323642 L39,47.6323642 L39,45.6152989 L36,45.6152989 L36,47.6323642 Z M36.065,19.3934502 L39,19.3934502 L39,17.376385 L36.065,17.376385 L36.065,19.3934502 Z M31.065,19.3934502 L34,19.3934502 L34,17.376385 L31.065,17.376385 L31.065,19.3934502 Z M26.065,19.3934502 L29,19.3934502 L29,17.376385 L26.065,17.376385 L26.065,19.3934502 Z M58,13.082053 L45,6.62946112 L45,57.589607 L58,53.1934132 L58,13.082053 Z M60,53.9195567 C60,54.3522172 59.726,54.7374767 59.318,54.8756456 L44.318,59.9475563 C44.214,59.9828549 44.106,60 44,60 C43.793,60 43.588,59.9354539 43.416,59.8103959 C43.155,59.6207917 43,59.3162149 43,58.9914674 L43,47.6323642 L41.065,47.6323642 L41.065,45.6152989 L43,45.6152989 L43,19.3934502 L41.065,19.3934502 L41.065,17.376385 L43,17.376385 L43,5.00874917 C43,4.65979687 43.179,4.33605789 43.472,4.15250495 C43.765,3.96794348 44.131,3.94978989 44.441,4.10409539 L59.441,11.5490833 C59.783,11.7185168 60,12.0704947 60,12.4537371 L60,53.9195567 Z M34.934,25.8067093 L33.066,25.0825829 L28.066,38.1935072 L29.934,38.9176337 L34.934,25.8067093 Z M41.707,32.2088745 C42.098,31.8145383 42.098,31.1761371 41.707,30.7828094 L37.707,26.7486788 L36.293,28.174744 L39.586,31.4958419 L36.293,34.8169399 L37.707,36.2430051 L41.707,32.2088745 Z M26.293,36.2430051 L22.293,32.2088745 C21.902,31.8145383 21.902,31.1761371 22.293,30.7828094 L26.293,26.7486788 L27.707,28.174744 L24.414,31.4958419 L27.707,34.8169399 L26.293,36.2430051 Z" id="Amazon-API-Gateway_Icon_64_Squid"></path>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 85 85" fill="#fff" fill-rule="evenodd" stroke="#000" stroke-linecap="round" stroke-linejoin="round"><use xlink:href="#A" x="2.5" y="2.5"/><symbol id="A" overflow="visible"><g stroke="none"><path d="M0 41.579C0 20.293 17.84 3.157 40 3.157s40 17.136 40 38.422S62.16 80 40 80 0 62.864 0 41.579z" fill="#9d5025"/><path d="M0 38.422C0 17.136 17.84 0 40 0s40 17.136 40 38.422-17.84 38.422-40 38.422S0 59.707 0 38.422z" fill="#f58536"/><path d="M51.672 7.387v13.952H28.327V7.387zm18.061 40.378v11.364h-11.83V47.765zm-14.958 0v11.364h-11.83V47.765zm-18.206 0v11.364h-11.83V47.765zm-14.959 0v11.364H9.78V47.765z"/><path d="M14.63 37.929h2.13v11.149h-2.13z"/><path d="M14.63 37.929h17.088v2.045H14.63z"/><path d="M29.589 37.929h2.13v11.149H29.59zm18.206 0h2.13v11.149h-2.13z"/><path d="M47.795 37.929h17.088v2.045H47.795z"/><path d="M62.754 37.929h2.13v11.149h-2.129zm-40.631-7.954h2.13v8.977h-2.13zM38.935 19.28h2.13v10.859h-2.129z"/><path d="M22.123 29.116h35.32v2.045h-35.32z"/><path d="M55.314 29.975h2.13v8.977h-2.129z"/></g></symbol></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" id="图层_1" x="0" y="0" version="1.1" viewBox="0 0 519.27 214.14"><style>.st2{fill:#a9cc54}</style><path d="M493.54 171.25c-35.63 0-64.52-28.89-64.52-64.52s28.89-64.52 64.52-64.52c3.85 0 7.62.36 11.29 1L493.54 5.45H104.46v.02C49.24 6.51 4.78 51.59 4.78 107.07c0 55.47 44.45 100.56 99.68 101.59v.02h389.07l11.29-38.44c-3.66.65-7.43 1.01-11.28 1.01" style="fill:#b6de64"/><defs><path id="SVGID_1_" d="M493.54 171.25c-35.63 0-64.52-28.89-64.52-64.52s28.89-64.52 64.52-64.52c3.85 0 7.62.36 11.29 1V5.45H104.46v.02C49.24 6.51 4.78 51.59 4.78 107.07c0 55.47 44.45 100.56 99.68 101.59v.02h389.07l11.29-38.44c-3.66.65-7.43 1.01-11.28 1.01"/></defs><clipPath id="SVGID_2_"><use xlink:href="#SVGID_1_" style="overflow:visible"/></clipPath><g style="clip-path:url(#SVGID_2_)"><path d="M-23.16 55.58H428.5v51.1H-23.16zM-23.16 157.59h530.98v72.35H-23.16z" class="st2"/></g><path d="M493.54 184.23c-43.94 0-79.56-33.62-79.56-77.56s35.62-76.56 79.56-76.56" style="fill:none;stroke:#ddf4a4;stroke-width:49;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10"/></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,31 +0,0 @@
<svg version="1.1" id="Layer_1" xmlns:x="ns_extend;" xmlns:i="ns_ai;" xmlns:graph="ns_graphs;" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 50.6 50.6" style="enable-background:new 0 0 50.6 50.6;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<metadata>
<sfw xmlns="ns_sfw;">
<slices>
</slices>
<sliceSourceBounds bottomLeftOrigin="true" height="50.6" width="50.6" x="0" y="0">
</sliceSourceBounds>
</sfw>
</metadata>
<g>
<g>
<path class="st0" d="M0.6,0H5c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6H0.6C0.3,50.6,0,50.4,0,50V0.6C0,0.3,0.3,0,0.6,0z">
</path>
<path class="st0" d="M11.8,0h4.4c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V0.6
C11.3,0.3,11.5,0,11.8,0z">
</path>
<path class="st0" d="M23.1,0h4.4c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V0.6
C22.5,0.3,22.8,0,23.1,0z">
</path>
<path class="st0" d="M34.3,0h4.4c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V0.6
C33.7,0.3,34,0,34.3,0z">
</path>
<path class="st0" d="M45.6,19.7H50c0.3,0,0.6,0.3,0.6,0.6v10.1c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V20.3
C45,20,45.3,19.7,45.6,19.7z">
</path>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

Some files were not shown because too many files have changed in this diff Show More