Compare commits
187 Commits
chore/depr
...
testing-qb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8d03e35d84 | ||
|
|
f267ed8ad1 | ||
|
|
1542b9d6e9 | ||
|
|
8455349459 | ||
|
|
c488a24d09 | ||
|
|
9091cf61fd | ||
|
|
eeb2ab3212 | ||
|
|
3f128f0f1d | ||
|
|
59ff7ed1e1 | ||
|
|
d236b6ce1e | ||
|
|
44b118a212 | ||
|
|
3fc6f7ee63 | ||
|
|
f1016baf03 | ||
|
|
e5c0d9e44a | ||
|
|
e51056c804 | ||
|
|
7d8dad4550 | ||
|
|
c477e0ef16 | ||
|
|
fff7f8fc76 | ||
|
|
8cfeef4521 | ||
|
|
d85a1a21ac | ||
|
|
17f48d656d | ||
|
|
2d6774da68 | ||
|
|
62a9d7e602 | ||
|
|
3a2c7a7a68 | ||
|
|
33e70d1f37 | ||
|
|
85f04e4bae | ||
|
|
53f9e7d811 | ||
|
|
ad46e22561 | ||
|
|
e79195ccf1 | ||
|
|
f77bb888a8 | ||
|
|
baa15baea9 | ||
|
|
316e6821f1 | ||
|
|
a1fa2769e4 | ||
|
|
decb660992 | ||
|
|
0acbcf8322 | ||
|
|
11eabdc2ac | ||
|
|
eb94554f5a | ||
|
|
e8280dbea4 | ||
|
|
44ea237039 | ||
|
|
72b0214d1d | ||
|
|
386a215324 | ||
|
|
ba0ba4bbc9 | ||
|
|
d60c9ab36b | ||
|
|
90770b90bd | ||
|
|
a19874c1dd | ||
|
|
65ff460d63 | ||
|
|
b9d542a294 | ||
|
|
e75e5bdbdb | ||
|
|
0d03203977 | ||
|
|
28f6f42ac4 | ||
|
|
92f8e4d5b9 | ||
|
|
037eea5262 | ||
|
|
cd4df6280f | ||
|
|
ad2d4ed56c | ||
|
|
7955497a8d | ||
|
|
6ed30318bd | ||
|
|
c32dd9f17e | ||
|
|
c58cf67eb0 | ||
|
|
440c3d8386 | ||
|
|
d683b94344 | ||
|
|
6a629623bc | ||
|
|
982688ccc9 | ||
|
|
74bbb26033 | ||
|
|
3bb9e05681 | ||
|
|
61b2f8cb31 | ||
|
|
9d397d0867 | ||
|
|
5fb4206a99 | ||
|
|
dd11ba9f48 | ||
|
|
f9cb9f10be | ||
|
|
b6180f6957 | ||
|
|
51d3ca16f7 | ||
|
|
91cbd17275 | ||
|
|
68effaf232 | ||
|
|
c08d1bccaf | ||
|
|
1d77780c70 | ||
|
|
80ded899c7 | ||
|
|
4733af974e | ||
|
|
1ab6c7177f | ||
|
|
c3123a4fa4 | ||
|
|
5a602bbeb7 | ||
|
|
f487f088bd | ||
|
|
1cb01e8dd2 | ||
|
|
595a500be4 | ||
|
|
bec52c3d3e | ||
|
|
0a6a7ba729 | ||
|
|
3d758d4358 | ||
|
|
9c8435119d | ||
|
|
d732f8ba42 | ||
|
|
83b8eaf623 | ||
|
|
ae7364f098 | ||
|
|
0ec1be1ddf | ||
|
|
93de4681a9 | ||
|
|
69e94cbd38 | ||
|
|
62810428d8 | ||
|
|
b921a2280b | ||
|
|
8990fb7a73 | ||
|
|
aaeffae1bd | ||
|
|
d1d7da6c9b | ||
|
|
28a01bf042 | ||
|
|
fb1f320346 | ||
|
|
4d484b225f | ||
|
|
3a396602a8 | ||
|
|
650cf81329 | ||
|
|
cdbf23d053 | ||
|
|
3ca3db2567 | ||
|
|
0925ae73a9 | ||
|
|
cffa511cf3 | ||
|
|
2ba693f040 | ||
|
|
403630ad31 | ||
|
|
93ca3fee33 | ||
|
|
b1c78c2f12 | ||
|
|
7feb94e5eb | ||
|
|
47dc2b98f1 | ||
|
|
f4dc2a8fb8 | ||
|
|
77d1492aac | ||
|
|
6090a6be6e | ||
|
|
eabddf87d2 | ||
|
|
9e13245d1b | ||
|
|
a1c7a948fa | ||
|
|
0bfe53a93c | ||
|
|
16140991be | ||
|
|
aadf2a3ac7 | ||
|
|
824302be38 | ||
|
|
8d4c4dc5f2 | ||
|
|
91fae8c0f3 | ||
|
|
4bbe8c0ee7 | ||
|
|
0f7d226b9b | ||
|
|
e03342e001 | ||
|
|
57f96574ff | ||
|
|
354e4b4b8f | ||
|
|
d7102f69a9 | ||
|
|
040c45b144 | ||
|
|
207d7602ab | ||
|
|
018346ca18 | ||
|
|
7290ab3602 | ||
|
|
88239cec4d | ||
|
|
10ba0e6b4f | ||
|
|
88e1e42bf0 | ||
|
|
a0d896557e | ||
|
|
2b28c5f2e2 | ||
|
|
6dbcc5fb9d | ||
|
|
175e9a4c5e | ||
|
|
33506cafce | ||
|
|
e34e61a20d | ||
|
|
da084b4686 | ||
|
|
6821efeb99 | ||
|
|
c5d5c84a0e | ||
|
|
9c298e83a5 | ||
|
|
9383b6576d | ||
|
|
f10f7a806f | ||
|
|
03600f4d6f | ||
|
|
9fbf111976 | ||
|
|
b8dff86a56 | ||
|
|
f525647b40 | ||
|
|
0a2b7ca1d8 | ||
|
|
16938c6cc0 | ||
|
|
81b8f93177 | ||
|
|
96cfb607d1 | ||
|
|
f526e887cc | ||
|
|
03ab6e704b | ||
|
|
9c0134da54 | ||
|
|
175b059268 | ||
|
|
dfca5b13c0 | ||
|
|
ad392e81ff | ||
|
|
92ceefccee | ||
|
|
9cc4e1b56f | ||
|
|
3758ee7451 | ||
|
|
02b605d109 | ||
|
|
eb86aabf3e | ||
|
|
8810693bda | ||
|
|
6334e09a60 | ||
|
|
1d379931b2 | ||
|
|
815a6d13c5 | ||
|
|
59af9d1c2f | ||
|
|
19d24da147 | ||
|
|
cd1c9ddf11 | ||
|
|
7ff3286c9c | ||
|
|
27830742f9 | ||
|
|
39f07e7477 | ||
|
|
0ab50da7b0 | ||
|
|
c03541cd6c | ||
|
|
727a039eb9 | ||
|
|
c7db85f44c | ||
|
|
08d9a74055 | ||
|
|
503e4cdf00 | ||
|
|
224f952da7 | ||
|
|
0c28067f89 |
@@ -40,7 +40,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
8
.github/CODEOWNERS
vendored
8
.github/CODEOWNERS
vendored
@@ -2,7 +2,7 @@
|
||||
# Owners are automatically requested for review for PRs that changes code
|
||||
# that they own.
|
||||
|
||||
/frontend/ @YounixM
|
||||
/frontend/ @SigNoz/frontend @YounixM
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
/deploy/ @SigNoz/devops
|
||||
@@ -11,4 +11,10 @@
|
||||
/pkg/errors/ @grandwizard28
|
||||
/pkg/factory/ @grandwizard28
|
||||
/pkg/types/ @grandwizard28
|
||||
.golangci.yml @grandwizard28
|
||||
/pkg/zeus/ @vikrantgupta25
|
||||
/pkg/licensing/ @vikrantgupta25
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
/ee/zeus/ @vikrantgupta25
|
||||
/ee/licensing/ @vikrantgupta25
|
||||
/ee/sqlmigration/ @vikrantgupta25
|
||||
73
.github/pull_request_template.md
vendored
73
.github/pull_request_template.md
vendored
@@ -1,17 +1,72 @@
|
||||
### Summary
|
||||
## 📄 Summary
|
||||
|
||||
<!-- ✍️ A clear and concise description...-->
|
||||
<!-- Describe the purpose of the PR in a few sentences. What does it fix/add/update? -->
|
||||
|
||||
#### Related Issues / PR's
|
||||
---
|
||||
|
||||
<!-- ✍️ Add the issues being resolved here and related PR's where applicable -->
|
||||
## ✅ Changes
|
||||
|
||||
#### Screenshots
|
||||
- [ ] Feature: Brief description
|
||||
- [ ] Bug fix: Brief description
|
||||
|
||||
NA
|
||||
---
|
||||
|
||||
<!-- ✍️ Add screenshots of before and after changes where applicable-->
|
||||
## 🏷️ Required: Add Relevant Labels
|
||||
|
||||
#### Affected Areas and Manually Tested Areas
|
||||
> ⚠️ **Manually add appropriate labels in the PR sidebar**
|
||||
Please select one or more labels (as applicable):
|
||||
|
||||
<!-- ✍️ Add details of blast radius and dev testing areas where applicable-->
|
||||
ex:
|
||||
|
||||
- `frontend`
|
||||
- `backend`
|
||||
- `devops`
|
||||
- `bug`
|
||||
- `enhancement`
|
||||
- `ui`
|
||||
- `test`
|
||||
|
||||
---
|
||||
|
||||
## 👥 Reviewers
|
||||
|
||||
> Tag the relevant teams for review:
|
||||
|
||||
- frontend / backend / devops
|
||||
|
||||
---
|
||||
|
||||
## 🧪 How to Test
|
||||
|
||||
<!-- Describe how reviewers can test this PR -->
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
|
||||
---
|
||||
|
||||
## 🔍 Related Issues
|
||||
|
||||
<!-- Reference any related issues (e.g. Fixes #123, Closes #456) -->
|
||||
Closes #
|
||||
|
||||
---
|
||||
|
||||
## 📸 Screenshots / Screen Recording (if applicable / mandatory for UI related changes)
|
||||
|
||||
<!-- Add screenshots or GIFs to help visualize changes -->
|
||||
|
||||
---
|
||||
|
||||
## 📋 Checklist
|
||||
|
||||
- [ ] Dev Review
|
||||
- [ ] Test cases added (Unit/ Integration / E2E)
|
||||
- [ ] Manually tested the changes
|
||||
|
||||
|
||||
---
|
||||
|
||||
## 👀 Notes for Reviewers
|
||||
|
||||
<!-- Anything reviewers should keep in mind while reviewing -->
|
||||
|
||||
4
.github/workflows/build-community.yaml
vendored
4
.github/workflows/build-community.yaml
vendored
@@ -62,6 +62,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
@@ -73,7 +74,8 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch
|
||||
|
||||
9
.github/workflows/build-enterprise.yaml
vendored
9
.github/workflows/build-enterprise.yaml
vendored
@@ -67,9 +67,8 @@ jobs:
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -94,6 +93,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
@@ -108,7 +108,8 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
|
||||
7
.github/workflows/build-staging.yaml
vendored
7
.github/workflows/build-staging.yaml
vendored
@@ -66,7 +66,8 @@ jobs:
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.NP_TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.NP_USERPILOT_KEY }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -91,6 +92,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
@@ -105,7 +107,8 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
|
||||
14
.github/workflows/goci.yaml
vendored
14
.github/workflows/goci.yaml
vendored
@@ -18,6 +18,7 @@ jobs:
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_TEST_CONTEXT: ./...
|
||||
GO_VERSION: 1.23
|
||||
fmt:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -26,6 +27,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
lint:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -34,6 +36,16 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
deps:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
uses: signoz/primus.workflows/.github/workflows/go-deps.yaml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
build:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -45,7 +57,7 @@ jobs:
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
- name: qemu-install
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: aarch64-install
|
||||
|
||||
4
.github/workflows/gor-signoz-community.yaml
vendored
4
.github/workflows/gor-signoz-community.yaml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
9
.github/workflows/gor-signoz.yaml
vendored
9
.github/workflows/gor-signoz.yaml
vendored
@@ -33,9 +33,8 @@ jobs:
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> .env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> .env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> .env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> .env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> .env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
@@ -73,7 +72,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -136,7 +135,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -60,14 +60,13 @@ ee/query-service/db
|
||||
|
||||
e2e/node_modules/
|
||||
e2e/test-results/
|
||||
e2e/playwright-report/
|
||||
e2e/blob-report/
|
||||
e2e/playwright/.cache/
|
||||
e2e/.auth
|
||||
|
||||
# go
|
||||
vendor/
|
||||
**/main/**
|
||||
__debug_bin**
|
||||
|
||||
# git-town
|
||||
.git-branches.toml
|
||||
|
||||
33
.golangci.yml
Normal file
33
.golangci.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
linters:
|
||||
default: standard
|
||||
enable:
|
||||
- bodyclose
|
||||
- misspell
|
||||
- nilnil
|
||||
- sloglint
|
||||
- depguard
|
||||
- iface
|
||||
|
||||
linters-settings:
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
msg-style: lowercased
|
||||
key-naming-case: snake
|
||||
depguard:
|
||||
rules:
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: "go.uber.org/zap"
|
||||
desc: "Do not use zap logger. Use slog instead."
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
issues:
|
||||
exclude-dirs:
|
||||
- "pkg/query-service"
|
||||
- "ee/query-service"
|
||||
- "scripts/"
|
||||
@@ -8,7 +8,7 @@
|
||||
<p align="center">All your logs, metrics, and traces in one place. Monitor your application, spot issues before they occur and troubleshoot downtime quickly with rich context. SigNoz is a cost-effective open-source alternative to Datadog and New Relic. Visit <a href="https://signoz.io" target="_blank">signoz.io</a> for the full documentation, tutorials, and guide.</p>
|
||||
|
||||
<p align="center">
|
||||
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/query-service?label=Docker Downloads"> </a>
|
||||
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/signoz.svg?label=Docker%20Downloads"> </a>
|
||||
<img alt="GitHub issues" src="https://img.shields.io/github/issues/signoz/signoz"> </a>
|
||||
<a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNozHQ&hashtags=opensource,signoz,observability">
|
||||
<img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a>
|
||||
|
||||
@@ -164,3 +164,63 @@ alertmanager:
|
||||
maintenance_interval: 15m
|
||||
# Retention of the notification logs.
|
||||
retention: 120h
|
||||
|
||||
##################### Emailing #####################
|
||||
emailing:
|
||||
# Whether to enable emailing.
|
||||
enabled: false
|
||||
templates:
|
||||
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
|
||||
directory: /opt/signoz/conf/templates/email
|
||||
smtp:
|
||||
# The SMTP server address.
|
||||
address: localhost:25
|
||||
# The email address to use for the SMTP server.
|
||||
from:
|
||||
# The hello message to use for the SMTP server.
|
||||
hello:
|
||||
# The static headers to send with the email.
|
||||
headers: {}
|
||||
auth:
|
||||
# The username to use for the SMTP server.
|
||||
username:
|
||||
# The password to use for the SMTP server.
|
||||
password:
|
||||
# The secret to use for the SMTP server.
|
||||
secret:
|
||||
# The identity to use for the SMTP server.
|
||||
identity:
|
||||
tls:
|
||||
# Whether to enable TLS. It should be false in most cases since the authentication mechanism should use the STARTTLS extension instead.
|
||||
enabled: false
|
||||
# Whether to skip TLS verification.
|
||||
insecure_skip_verify: false
|
||||
# The path to the CA file.
|
||||
ca_file_path:
|
||||
# The path to the key file.
|
||||
key_file_path:
|
||||
# The path to the certificate file.
|
||||
cert_file_path:
|
||||
|
||||
##################### Sharder (experimental) #####################
|
||||
sharder:
|
||||
# Specifies the sharder provider to use.
|
||||
provider: noop
|
||||
single:
|
||||
# The org id to which this instance belongs to.
|
||||
org_id: org_id
|
||||
|
||||
##################### Analytics #####################
|
||||
analytics:
|
||||
# Whether to enable analytics.
|
||||
enabled: false
|
||||
segment:
|
||||
# The key to use for segment.
|
||||
key: ""
|
||||
|
||||
##################### StatsReporter #####################
|
||||
statsreporter:
|
||||
# Whether to enable stats reporter. This is used to provide valuable insights to the SigNoz team. It does not collect any sensitive/PII data.
|
||||
enabled: true
|
||||
# The interval at which the stats are collected.
|
||||
interval: 6h
|
||||
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.82.0
|
||||
image: signoz/signoz:v0.87.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -206,7 +206,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.41
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -230,7 +230,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -100,26 +100,33 @@ services:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
# - "9181:9181"
|
||||
|
||||
configs:
|
||||
- source: clickhouse-config
|
||||
target: /etc/clickhouse-server/config.xml
|
||||
- source: clickhouse-users
|
||||
target: /etc/clickhouse-server/users.xml
|
||||
- source: clickhouse-custom-function
|
||||
target: /etc/clickhouse-server/custom-function.xml
|
||||
- source: clickhouse-cluster
|
||||
target: /etc/clickhouse-server/config.d/cluster.xml
|
||||
|
||||
volumes:
|
||||
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
- ../common/clickhouse/cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
- clickhouse:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.82.0
|
||||
image: signoz/signoz:v0.87.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
volumes:
|
||||
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
|
||||
- ../common/dashboards:/root/config/dashboards
|
||||
- sqlite:/var/lib/signoz/
|
||||
configs:
|
||||
- source: signoz-prometheus-config
|
||||
target: /root/config/prometheus.yml
|
||||
environment:
|
||||
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
||||
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
@@ -141,15 +148,17 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.41
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
configs:
|
||||
- source: otel-collector-config
|
||||
target: /etc/otel-collector-config.yaml
|
||||
- source: otel-manager-config
|
||||
target: /etc/manager-config.yaml
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
@@ -165,7 +174,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -186,3 +195,26 @@ volumes:
|
||||
name: signoz-sqlite
|
||||
zookeeper-1:
|
||||
name: signoz-zookeeper-1
|
||||
|
||||
configs:
|
||||
clickhouse-config:
|
||||
file: ../common/clickhouse/config.xml
|
||||
clickhouse-users:
|
||||
file: ../common/clickhouse/users.xml
|
||||
clickhouse-custom-function:
|
||||
file: ../common/clickhouse/custom-function.xml
|
||||
clickhouse-cluster:
|
||||
file: ../common/clickhouse/cluster.xml
|
||||
|
||||
signoz-prometheus-config:
|
||||
file: ../common/signoz/prometheus.yml
|
||||
# If you have multiple dashboard files, you can list them individually:
|
||||
# dashboard-foo:
|
||||
# file: ../common/dashboards/foo.json
|
||||
# dashboard-bar:
|
||||
# file: ../common/dashboards/bar.json
|
||||
|
||||
otel-collector-config:
|
||||
file: ./otel-collector-config.yaml
|
||||
otel-manager-config:
|
||||
file: ../common/signoz/otel-collector-opamp-config.yaml
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.82.0}
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -210,7 +210,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -236,7 +236,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -247,7 +247,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.82.0}
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -142,7 +142,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -164,7 +164,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
51
docs/contributing/go/endpoint.md
Normal file
51
docs/contributing/go/endpoint.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# Endpoint
|
||||
|
||||
This guide outlines the recommended approach for designing endpoints, with a focus on entity relationships, RESTful structure, and examples from the codebase.
|
||||
|
||||
## How do we design an endpoint?
|
||||
|
||||
### Understand the core entities and their relationships
|
||||
|
||||
Start with understanding the core entities and their relationships. For example:
|
||||
|
||||
- **Organization**: an organization can have multiple users
|
||||
|
||||
### Structure Endpoints RESTfully
|
||||
|
||||
Endpoints should reflect the resource hierarchy and follow RESTful conventions. Use clear, **pluralized resource names** and versioning. For example:
|
||||
|
||||
- `POST /v1/organizations` — Create an organization
|
||||
- `GET /v1/organizations/:id` — Get an organization by id
|
||||
- `DELETE /v1/organizations/:id` — Delete an organization by id
|
||||
- `PUT /v1/organizations/:id` — Update an organization by id
|
||||
- `GET /v1/organizations/:id/users` — Get all users in an organization
|
||||
- `GET /v1/organizations/me/users` — Get all users in my organization
|
||||
|
||||
Think in terms of resource navigation in a file system. For example, to find your organization, you would navigate to the root of the file system and then to the `organizations` directory. To find a user in an organization, you would navigate to the `organizations` directory and then to the `id` directory.
|
||||
|
||||
```bash
|
||||
v1/
|
||||
├── organizations/
|
||||
│ └── 123/
|
||||
│ └── users/
|
||||
```
|
||||
|
||||
`me` endpoints are special. They are used to determine the actual id via some auth/external mechanism. For `me` endpoints, think of the `me` directory being symlinked to your organization directory. For example, if you are a part of the organization `123`, the `me` directory will be symlinked to `/v1/organizations/123`:
|
||||
|
||||
```bash
|
||||
v1/
|
||||
├── organizations/
|
||||
│ └── me/ -> symlink to /v1/organizations/123
|
||||
│ └── users/
|
||||
│ └── 123/
|
||||
│ └── users/
|
||||
```
|
||||
|
||||
> 💡 **Note**: There are various ways to structure endpoints. Some prefer to use singular resource names instead of `me`. Others prefer to use singular resource names for all endpoints. We have, however, chosen to standardize our endpoints in the manner described above.
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- Use clear, **plural resource names**
|
||||
- Use `me` endpoints for determining the actual id via some auth mechanism
|
||||
|
||||
> 💡 **Note**: When in doubt, diagram the relationships and walk through the user flows as if navigating a file system. This will help you design endpoints that are both logical and user-friendly.
|
||||
106
docs/contributing/go/provider.md
Normal file
106
docs/contributing/go/provider.md
Normal file
@@ -0,0 +1,106 @@
|
||||
# Provider
|
||||
|
||||
SigNoz is built on the provider pattern, a design approach where code is organized into providers that handle specific application responsibilities. Providers act as adapter components that integrate with external services and deliver required functionality to the application.
|
||||
|
||||
> 💡 **Note**: Coming from a DDD background? Providers are similar (not exactly the same) to adapter/infrastructure services.
|
||||
|
||||
## How to create a new provider?
|
||||
|
||||
To create a new provider, create a directory in the `pkg/` directory named after your provider. The provider package consists of four key components:
|
||||
|
||||
- **Interface** (`pkg/<name>/<name>.go`): Defines the provider's interface. Other packages should import this interface to use the provider.
|
||||
- **Config** (`pkg/<name>/config.go`): Contains provider configuration, implementing the `factory.Config` interface from [factory/config.go](/pkg/factory/config.go).
|
||||
- **Implementation** (`pkg/<name>/<implname><name>/provider.go`): Contains the provider implementation, including a `NewProvider` function that returns a `factory.Provider` interface from [factory/provider.go](/pkg/factory/provider.go).
|
||||
- **Mock** (`pkg/<name>/<name>test.go`): Provides mocks for the provider, typically used by dependent packages for unit testing.
|
||||
|
||||
For example, the [prometheus](/pkg/prometheus) provider delivers a prometheus engine to the application:
|
||||
|
||||
- `pkg/prometheus/prometheus.go` - Interface definition
|
||||
- `pkg/prometheus/config.go` - Configuration
|
||||
- `pkg/prometheus/clickhouseprometheus/provider.go` - Clickhouse-powered implementation
|
||||
- `pkg/prometheus/prometheustest/provider.go` - Mock implementation
|
||||
|
||||
## How to wire it up?
|
||||
|
||||
The `pkg/signoz` package contains the inversion of control container responsible for wiring providers. It handles instantiation, configuration, and assembly of providers based on configuration metadata.
|
||||
|
||||
> 💡 **Note**: Coming from a Java background? Providers are similar to Spring beans.
|
||||
|
||||
Wiring up a provider involves three steps:
|
||||
|
||||
1. Wiring up the configuration
|
||||
Add your config from `pkg/<name>/config.go` to the `pkg/signoz/config.Config` struct and in new factories:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
...
|
||||
MyProvider myprovider.Config `mapstructure:"myprovider"`
|
||||
...
|
||||
}
|
||||
|
||||
func NewConfig(ctx context.Context, resolverConfig config.ResolverConfig, ....) (Config, error) {
|
||||
...
|
||||
configFactories := []factory.ConfigFactory{
|
||||
myprovider.NewConfigFactory(),
|
||||
}
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
2. Wiring up the provider
|
||||
Add available provider implementations in `pkg/signoz/provider.go`:
|
||||
|
||||
```go
|
||||
func NewMyProviderFactories() factory.NamedMap[factory.ProviderFactory[myprovider.MyProvider, myprovider.Config]] {
|
||||
return factory.MustNewNamedMap(
|
||||
myproviderone.NewFactory(),
|
||||
myprovidertwo.NewFactory(),
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
3. Instantiate the provider by adding it to the `SigNoz` struct in `pkg/signoz/signoz.go`:
|
||||
|
||||
```go
|
||||
type SigNoz struct {
|
||||
...
|
||||
MyProvider myprovider.MyProvider
|
||||
...
|
||||
}
|
||||
|
||||
func New(...) (*SigNoz, error) {
|
||||
...
|
||||
myprovider, err := myproviderone.New(ctx, settings, config.MyProvider, "one/two")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
## How to use it?
|
||||
|
||||
To use a provider, import its interface. For example, to use the prometheus provider, import `pkg/prometheus/prometheus.go`:
|
||||
|
||||
```go
|
||||
import "github.com/SigNoz/signoz/pkg/prometheus/prometheus"
|
||||
|
||||
func CreateSomething(ctx context.Context, prometheus prometheus.Prometheus) {
|
||||
...
|
||||
prometheus.DoSomething()
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
## Why do we need this?
|
||||
|
||||
Like any dependency injection framework, providers decouple the codebase from implementation details. This is especially valuable in SigNoz's large codebase, where we need to swap implementations without changing dependent code. The provider pattern offers several benefits apart from the obvious one of decoupling:
|
||||
|
||||
- Configuration is **defined with each provider and centralized in one place**, making it easier to understand and manage through various methods (environment variables, config files, etc.)
|
||||
- Provider mocking is **straightforward for unit testing**, with a consistent pattern for locating mocks
|
||||
- **Multiple implementations** of the same provider are **supported**, as demonstrated by our sqlstore provider
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- Use the provider pattern wherever applicable.
|
||||
- Always create a provider **irrespective of the number of implementations**. This makes it easier to add new implementations in the future.
|
||||
@@ -1,91 +0,0 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type Pat struct {
|
||||
store sqlstore.SQLStore
|
||||
uuid *authtypes.UUID
|
||||
headers []string
|
||||
}
|
||||
|
||||
func NewPat(store sqlstore.SQLStore, headers []string) *Pat {
|
||||
return &Pat{store: store, uuid: authtypes.NewUUID(), headers: headers}
|
||||
}
|
||||
|
||||
func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var values []string
|
||||
var patToken string
|
||||
var pat eeTypes.StorablePersonalAccessToken
|
||||
|
||||
for _, header := range p.headers {
|
||||
values = append(values, r.Header.Get(header))
|
||||
}
|
||||
|
||||
ctx, err := p.uuid.ContextFromRequest(r.Context(), values...)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
patToken, ok := authtypes.UUIDFromContext(ctx)
|
||||
if !ok {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
err = p.store.BunDB().NewSelect().Model(&pat).Where("token = ?", patToken).Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// get user from db
|
||||
user := types.User{}
|
||||
err = p.store.BunDB().NewSelect().Model(&user).Where("id = ?", pat.UserID).Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
role, err := authtypes.NewRole(user.Role)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
jwt := authtypes.Claims{
|
||||
UserID: user.ID,
|
||||
Role: role,
|
||||
Email: user.Email,
|
||||
OrgID: user.OrgID,
|
||||
}
|
||||
|
||||
ctx = authtypes.NewContextWithClaims(ctx, jwt)
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
|
||||
pat.LastUsed = time.Now().Unix()
|
||||
_, err = p.store.BunDB().NewUpdate().Model(&pat).Column("last_used").Where("token = ?", patToken).Where("revoked = false").Exec(r.Context())
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
26
ee/licensing/config.go
Normal file
26
ee/licensing/config.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package licensing
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
)
|
||||
|
||||
var (
|
||||
config licensing.Config
|
||||
once sync.Once
|
||||
)
|
||||
|
||||
// initializes the licensing configuration
|
||||
func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
|
||||
once.Do(func() {
|
||||
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(fmt.Errorf("invalid licensing config: %w", err))
|
||||
}
|
||||
})
|
||||
|
||||
return config
|
||||
}
|
||||
168
ee/licensing/httplicensing/api.go
Normal file
168
ee/licensing/httplicensing/api.go
Normal file
@@ -0,0 +1,168 @@
|
||||
package httplicensing
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type licensingAPI struct {
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func NewLicensingAPI(licensing licensing.Licensing) licensing.API {
|
||||
return &licensingAPI{licensing: licensing}
|
||||
}
|
||||
|
||||
func (api *licensingAPI) Activate(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
req := new(licensetypes.PostableLicense)
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = api.licensing.Activate(r.Context(), orgID, req.Key)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusAccepted, nil)
|
||||
}
|
||||
|
||||
func (api *licensingAPI) GetActive(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
license, err := api.licensing.GetActive(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
gettableLicense := licensetypes.NewGettableLicense(license.Data, license.Key)
|
||||
render.Success(rw, http.StatusOK, gettableLicense)
|
||||
}
|
||||
|
||||
func (api *licensingAPI) Refresh(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
err = api.licensing.Refresh(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (api *licensingAPI) Checkout(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
req := new(licensetypes.PostableSubscription)
|
||||
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
gettableSubscription, err := api.licensing.Checkout(ctx, orgID, req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusCreated, gettableSubscription)
|
||||
}
|
||||
|
||||
func (api *licensingAPI) Portal(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
req := new(licensetypes.PostableSubscription)
|
||||
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
gettableSubscription, err := api.licensing.Portal(ctx, orgID, req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusCreated, gettableSubscription)
|
||||
}
|
||||
249
ee/licensing/httplicensing/provider.go
Normal file
249
ee/licensing/httplicensing/provider.go
Normal file
@@ -0,0 +1,249 @@
|
||||
package httplicensing
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
store licensetypes.Store
|
||||
zeus zeus.Zeus
|
||||
config licensing.Config
|
||||
settings factory.ScopedProviderSettings
|
||||
orgGetter organization.Getter
|
||||
analytics analytics.Analytics
|
||||
stopChan chan struct{}
|
||||
}
|
||||
|
||||
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
|
||||
return New(ctx, providerSettings, config, store, zeus, orgGetter, analytics)
|
||||
})
|
||||
}
|
||||
|
||||
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) (licensing.Licensing, error) {
|
||||
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing")
|
||||
licensestore := sqllicensingstore.New(sqlstore)
|
||||
return &provider{
|
||||
store: licensestore,
|
||||
zeus: zeus,
|
||||
config: config,
|
||||
settings: settings,
|
||||
orgGetter: orgGetter,
|
||||
stopChan: make(chan struct{}),
|
||||
analytics: analytics,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
tick := time.NewTicker(provider.config.PollInterval)
|
||||
defer tick.Stop()
|
||||
|
||||
err := provider.Validate(ctx)
|
||||
if err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-provider.stopChan:
|
||||
return nil
|
||||
case <-tick.C:
|
||||
err := provider.Validate(ctx)
|
||||
if err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
provider.settings.Logger().DebugContext(ctx, "license validation stopped")
|
||||
close(provider.stopChan)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Validate(ctx context.Context) error {
|
||||
organizations, err := provider.orgGetter.ListByOwnedKeyRange(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, organization := range organizations {
|
||||
err := provider.Refresh(ctx, organization.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Activate(ctx context.Context, organizationID valuer.UUID, key string) error {
|
||||
data, err := provider.zeus.GetLicense(ctx, key)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to fetch license data with upstream server")
|
||||
}
|
||||
|
||||
license, err := licensetypes.NewLicense(data, organizationID)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity")
|
||||
}
|
||||
|
||||
storableLicense := licensetypes.NewStorableLicenseFromLicense(license)
|
||||
err = provider.store.Create(ctx, storableLicense)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetActive(ctx context.Context, organizationID valuer.UUID) (*licensetypes.License, error) {
|
||||
storableLicenses, err := provider.store.GetAll(ctx, organizationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
activeLicense, err := licensetypes.GetActiveLicenseFromStorableLicenses(storableLicenses, organizationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return activeLicense, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUID) error {
|
||||
activeLicense, err := provider.GetActive(ctx, organizationID)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil
|
||||
}
|
||||
provider.settings.Logger().ErrorContext(ctx, "license validation failed", "org_id", organizationID.StringValue())
|
||||
return err
|
||||
}
|
||||
|
||||
data, err := provider.zeus.GetLicense(ctx, activeLicense.Key)
|
||||
if err != nil {
|
||||
if time.Since(activeLicense.LastValidatedAt) > time.Duration(provider.config.FailureThreshold)*provider.config.PollInterval {
|
||||
activeLicense.UpdateFeatures(licensetypes.BasicPlan)
|
||||
updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense)
|
||||
err = provider.store.Update(ctx, organizationID, updatedStorableLicense)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
err = activeLicense.Update(data)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity from license data")
|
||||
}
|
||||
|
||||
updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense)
|
||||
err = provider.store.Update(ctx, organizationID, updatedStorableLicense)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
stats := licensetypes.NewStatsFromLicense(activeLicense)
|
||||
provider.analytics.Send(ctx,
|
||||
analyticstypes.Track{
|
||||
UserId: "stats_" + organizationID.String(),
|
||||
Event: "License Updated",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(stats),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: organizationID.String(),
|
||||
},
|
||||
},
|
||||
},
|
||||
analyticstypes.Group{
|
||||
UserId: "stats_" + organizationID.String(),
|
||||
GroupId: organizationID.String(),
|
||||
Traits: analyticstypes.NewTraitsFromMap(stats),
|
||||
},
|
||||
)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Checkout(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) {
|
||||
activeLicense, err := provider.GetActive(ctx, organizationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
body, err := json.Marshal(postableSubscription)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal checkout payload")
|
||||
}
|
||||
|
||||
response, err := provider.zeus.GetCheckoutURL(ctx, activeLicense.Key, body)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate checkout session")
|
||||
}
|
||||
|
||||
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Portal(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) {
|
||||
activeLicense, err := provider.GetActive(ctx, organizationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
body, err := json.Marshal(postableSubscription)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal portal payload")
|
||||
}
|
||||
|
||||
response, err := provider.zeus.GetPortalURL(ctx, activeLicense.Key, body)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate portal session")
|
||||
}
|
||||
|
||||
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetFeatureFlags(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.Feature, error) {
|
||||
license, err := provider.GetActive(ctx, organizationID)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
return licensetypes.BasicPlan, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return license.Features, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
|
||||
activeLicense, err := provider.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
return map[string]any{}, nil
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return licensetypes.NewStatsFromLicense(activeLicense), nil
|
||||
}
|
||||
81
ee/licensing/licensingstore/sqllicensingstore/store.go
Normal file
81
ee/licensing/licensingstore/sqllicensingstore/store.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package sqllicensingstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func New(sqlstore sqlstore.SQLStore) licensetypes.Store {
|
||||
return &store{sqlstore}
|
||||
}
|
||||
|
||||
func (store *store) Create(ctx context.Context, storableLicense *licensetypes.StorableLicense) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewInsert().
|
||||
Model(storableLicense).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "license with ID: %s already exists", storableLicense.ID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) Get(ctx context.Context, organizationID valuer.UUID, licenseID valuer.UUID) (*licensetypes.StorableLicense, error) {
|
||||
storableLicense := new(licensetypes.StorableLicense)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(storableLicense).
|
||||
Where("org_id = ?", organizationID).
|
||||
Where("id = ?", licenseID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "license with ID: %s does not exist", licenseID)
|
||||
}
|
||||
|
||||
return storableLicense, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAll(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.StorableLicense, error) {
|
||||
storableLicenses := make([]*licensetypes.StorableLicense, 0)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&storableLicenses).
|
||||
Where("org_id = ?", organizationID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "licenses for organizationID: %s does not exists", organizationID)
|
||||
}
|
||||
|
||||
return storableLicenses, nil
|
||||
}
|
||||
|
||||
func (store *store) Update(ctx context.Context, organizationID valuer.UUID, storableLicense *licensetypes.StorableLicense) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewUpdate().
|
||||
Model(storableLicense).
|
||||
WherePK().
|
||||
Where("org_id = ?", organizationID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to update license with ID: %s", storableLicense.ID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -39,6 +39,7 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM golang:1.22-bullseye
|
||||
FROM golang:1.23-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
|
||||
@@ -5,21 +5,18 @@ import (
|
||||
"net/http/httputil"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
@@ -31,11 +28,8 @@ import (
|
||||
type APIHandlerOptions struct {
|
||||
DataConnector interfaces.DataConnector
|
||||
PreferSpanMetrics bool
|
||||
AppDao dao.ModelDao
|
||||
RulesManager *rules.Manager
|
||||
UsageManager *usage.Manager
|
||||
FeatureFlags baseint.FeatureLookup
|
||||
LicenseManager *license.Manager
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
@@ -55,23 +49,19 @@ type APIHandler struct {
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
quickfiltermodule := quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(signoz.SQLStore))
|
||||
quickFilter := quickfilter.NewAPI(quickfiltermodule)
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
PreferSpanMetrics: opts.PreferSpanMetrics,
|
||||
AppDao: opts.AppDao,
|
||||
RuleManager: opts.RulesManager,
|
||||
FeatureFlags: opts.FeatureFlags,
|
||||
IntegrationsController: opts.IntegrationsController,
|
||||
CloudIntegrationsController: opts.CloudIntegrationsController,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuickFilters: quickFilter,
|
||||
QuickFilterModule: quickfiltermodule,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Querier),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -85,98 +75,39 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
return ah, nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) FF() baseint.FeatureLookup {
|
||||
return ah.opts.FeatureFlags
|
||||
}
|
||||
|
||||
func (ah *APIHandler) RM() *rules.Manager {
|
||||
return ah.opts.RulesManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) LM() *license.Manager {
|
||||
return ah.opts.LicenseManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) UM() *usage.Manager {
|
||||
return ah.opts.UsageManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) AppDao() dao.ModelDao {
|
||||
return ah.opts.AppDao
|
||||
}
|
||||
|
||||
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
|
||||
return ah.opts.Gateway
|
||||
}
|
||||
|
||||
func (ah *APIHandler) CheckFeature(f string) bool {
|
||||
err := ah.FF().CheckFeature(f)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// RegisterRoutes registers routes for this handler on the given router
|
||||
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// note: add ee override methods first
|
||||
|
||||
// routes available only in ee version
|
||||
|
||||
router.HandleFunc("/api/v1/featureFlags",
|
||||
am.OpenAccess(ah.getFeatureFlags)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/loginPrecheck",
|
||||
am.OpenAccess(ah.precheckLogin)).
|
||||
Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
|
||||
|
||||
// paid plans specific routes
|
||||
router.HandleFunc("/api/v1/complete/saml",
|
||||
am.OpenAccess(ah.receiveSAML)).
|
||||
Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v1/complete/google",
|
||||
am.OpenAccess(ah.receiveGoogleAuth)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/orgs/{orgId}/domains",
|
||||
am.AdminAccess(ah.listDomainsByOrg)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/domains",
|
||||
am.AdminAccess(ah.postDomain)).
|
||||
Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v1/domains/{id}",
|
||||
am.AdminAccess(ah.putDomain)).
|
||||
Methods(http.MethodPut)
|
||||
|
||||
router.HandleFunc("/api/v1/domains/{id}",
|
||||
am.AdminAccess(ah.deleteDomain)).
|
||||
Methods(http.MethodDelete)
|
||||
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost)
|
||||
|
||||
// base overrides
|
||||
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
||||
|
||||
// PAT APIs
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.getPATs)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.updatePAT)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.revokePAT)).Methods(http.MethodDelete)
|
||||
|
||||
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.LicensingAPI.Checkout)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.portalSession)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v1/dashboards/{uuid}/lock", am.EditAccess(ah.lockDashboard)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/dashboards/{uuid}/unlock", am.EditAccess(ah.unlockDashboard)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
|
||||
|
||||
// v3
|
||||
router.HandleFunc("/api/v3/licenses", am.ViewAccess(ah.listLicensesV3)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.applyLicenseV3)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.refreshLicensesV3)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.getActiveLicenseV3)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.LicensingAPI.GetActive)).Methods(http.MethodGet)
|
||||
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
@@ -3,191 +3,16 @@ package api
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func parseRequest(r *http.Request, req interface{}) error {
|
||||
defer r.Body.Close()
|
||||
requestBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(requestBody, &req)
|
||||
return err
|
||||
}
|
||||
|
||||
// loginUser overrides base handler and considers SSO case.
|
||||
func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
req := basemodel.LoginRequest{}
|
||||
err := parseRequest(r, &req)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if req.Email != "" && ah.CheckFeature(model.SSO) {
|
||||
var apierr basemodel.BaseApiError
|
||||
_, apierr = ah.AppDao().CanUsePassword(ctx, req.Email)
|
||||
if apierr != nil && !apierr.IsNil() {
|
||||
RespondError(w, apierr, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// if all looks good, call auth
|
||||
resp, err := baseauth.Login(ctx, &req, ah.opts.JWT)
|
||||
if ah.HandleError(w, err, http.StatusUnauthorized) {
|
||||
return
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, resp)
|
||||
}
|
||||
|
||||
// registerUser registers a user and responds with a precheck
|
||||
// so the front-end can decide the login method
|
||||
func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
ah.APIHandler.Register(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
var req *baseauth.RegisterRequest
|
||||
|
||||
defer r.Body.Close()
|
||||
requestBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
zap.L().Error("received no input in api", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
err = json.Unmarshal(requestBody, &req)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("received invalid user registration request", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// get invite object
|
||||
invite, err := baseauth.ValidateInvite(ctx, req)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to validate invite token", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if invite == nil {
|
||||
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// get auth domain from email domain
|
||||
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
|
||||
}
|
||||
|
||||
precheckResp := &basemodel.PrecheckResponse{
|
||||
SSO: false,
|
||||
IsUser: false,
|
||||
}
|
||||
|
||||
if domain != nil && domain.SsoEnabled {
|
||||
// sso is enabled, create user and respond precheck data
|
||||
user, apierr := baseauth.RegisterInvitedUser(ctx, req, true)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var precheckError basemodel.BaseApiError
|
||||
|
||||
precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl)
|
||||
if precheckError != nil {
|
||||
RespondError(w, precheckError, precheckResp)
|
||||
}
|
||||
|
||||
} else {
|
||||
// no-sso, validate password
|
||||
if err := baseauth.ValidatePassword(req.Password); err != nil {
|
||||
RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager, ah.Signoz.Modules.Organization, ah.QuickFilterModule)
|
||||
if !registerError.IsNil() {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
precheckResp.IsUser = true
|
||||
}
|
||||
|
||||
ah.Respond(w, precheckResp)
|
||||
}
|
||||
|
||||
// getInvite returns the invite object details for the given invite token. We do not need to
|
||||
// protect this API because invite token itself is meant to be private.
|
||||
func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
|
||||
token := mux.Vars(r)["token"]
|
||||
sourceUrl := r.URL.Query().Get("ref")
|
||||
|
||||
inviteObject, err := baseauth.GetInvite(r.Context(), token, ah.Signoz.Modules.Organization)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
resp := model.GettableInvitation{
|
||||
InvitationResponseObject: inviteObject,
|
||||
}
|
||||
|
||||
precheck, apierr := ah.AppDao().PrecheckLogin(r.Context(), inviteObject.Email, sourceUrl)
|
||||
resp.Precheck = precheck
|
||||
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, resp)
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, resp)
|
||||
}
|
||||
|
||||
// PrecheckLogin enables browser login page to display appropriate
|
||||
// login methods
|
||||
func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
email := r.URL.Query().Get("email")
|
||||
sourceUrl := r.URL.Query().Get("ref")
|
||||
|
||||
resp, apierr := ah.AppDao().PrecheckLogin(ctx, email, sourceUrl)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, resp)
|
||||
}
|
||||
|
||||
ah.Respond(w, resp)
|
||||
}
|
||||
|
||||
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
|
||||
ssoError := []byte("Login failed. Please contact your system administrator")
|
||||
dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
|
||||
@@ -196,84 +21,12 @@ func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string)
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// receiveGoogleAuth completes google OAuth response and forwards a request
|
||||
// to front-end to sign user in
|
||||
func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) {
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
q := r.URL.Query()
|
||||
if errType := q.Get("error"); errType != "" {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description")))
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
relayState := q.Get("state")
|
||||
zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// upgrade redirect url from the relay state for better accuracy
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// now that we have domain, use domain to fetch sso settings.
|
||||
// prepare google callback handler using parsedState -
|
||||
// which contains redirect URL (front-end endpoint)
|
||||
callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to prepare google oauth provider", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
identity, err := callbackHandler.HandleCallback(r)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email, ah.opts.JWT)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
http.Redirect(w, r, nextPage, http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// receiveSAML completes a SAML request and gets user logged in
|
||||
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// this is the source url that initiated the login request
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
@@ -297,12 +50,25 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
|
||||
domain, err := ah.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(domain.OrgID)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
_, err = ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
sp, err := domain.PrepareSamlRequest(parsedState)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
|
||||
@@ -330,7 +96,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
|
||||
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
|
||||
@@ -11,12 +11,12 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
@@ -36,6 +36,12 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
if cloudProvider != "aws" {
|
||||
RespondError(w, basemodel.BadRequest(fmt.Errorf(
|
||||
@@ -56,11 +62,9 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
SigNozAPIKey: apiKey,
|
||||
}
|
||||
|
||||
license, apiErr := ah.LM().GetRepo().GetActiveLicense(r.Context())
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't look for active license",
|
||||
), nil)
|
||||
license, err := ah.Signoz.Licensing.GetActive(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -116,7 +120,14 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
return "", apiErr
|
||||
}
|
||||
|
||||
allPats, err := ah.AppDao().ListPATs(ctx, orgId)
|
||||
orgIdUUID, err := valuer.NewUUID(orgId)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't parse orgId: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't list PATs: %w", err,
|
||||
@@ -133,19 +144,25 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newPAT := eeTypes.NewGettablePAT(
|
||||
newPAT, err := types.NewStorableAPIKey(
|
||||
integrationPATName,
|
||||
authtypes.RoleViewer.String(),
|
||||
integrationUser.ID,
|
||||
types.RoleViewer,
|
||||
0,
|
||||
)
|
||||
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration PAT: %w", err,
|
||||
))
|
||||
}
|
||||
return integrationPAT.Token, nil
|
||||
|
||||
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration PAT: %w", err,
|
||||
))
|
||||
}
|
||||
return newPAT.Token, nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
@@ -154,10 +171,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
|
||||
|
||||
// TODO(nitya): there should be orgId here
|
||||
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
|
||||
integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||
}
|
||||
|
||||
if integrationUserResult != nil {
|
||||
@@ -169,29 +185,18 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newUser := &types.User{
|
||||
ID: uuid.New().String(),
|
||||
Name: cloudIntegrationUser,
|
||||
Email: email,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
OrgID: orgId,
|
||||
}
|
||||
|
||||
newUser.Role = authtypes.RoleViewer.String()
|
||||
|
||||
passwordHash, err := auth.PasswordHash(uuid.NewString())
|
||||
newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't hash random password for cloud integration user: %w", err,
|
||||
"couldn't create cloud integration user: %w", err,
|
||||
))
|
||||
}
|
||||
newUser.Password = passwordHash
|
||||
|
||||
integrationUser, apiErr := ah.AppDao().CreateUser(ctx, newUser, false)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't create cloud integration user")
|
||||
password, err := types.NewFactorPassword(uuid.NewString())
|
||||
|
||||
integrationUser, err := ah.Signoz.Modules.User.CreateUserWithPassword(ctx, newUser, password)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
||||
}
|
||||
|
||||
return integrationUser, nil
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) lockDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
ah.lockUnlockDashboard(w, r, true)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) unlockDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
ah.lockUnlockDashboard(w, r, false)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request, lock bool) {
|
||||
// Locking can only be done by the owner of the dashboard
|
||||
// or an admin
|
||||
|
||||
// - Fetch the dashboard
|
||||
// - Check if the user is the owner or an admin
|
||||
// - If yes, lock/unlock the dashboard
|
||||
// - If no, return 403
|
||||
|
||||
// Get the dashboard UUID from the request
|
||||
uuid := mux.Vars(r)["uuid"]
|
||||
if strings.HasPrefix(uuid, "integration") {
|
||||
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "dashboards created by integrations cannot be modified"))
|
||||
return
|
||||
}
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
dashboard, err := dashboards.GetDashboard(r.Context(), claims.OrgID, uuid)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get dashboard"))
|
||||
return
|
||||
}
|
||||
|
||||
if err := claims.IsAdmin(); err != nil && (dashboard.CreatedBy != claims.Email) {
|
||||
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "You are not authorized to lock/unlock this dashboard"))
|
||||
return
|
||||
}
|
||||
|
||||
// Lock/Unlock the dashboard
|
||||
err = dashboards.LockUnlockDashboard(r.Context(), claims.OrgID, uuid, lock)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to lock/unlock dashboard"))
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, "Dashboard updated successfully")
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) listDomainsByOrg(w http.ResponseWriter, r *http.Request) {
|
||||
orgId := mux.Vars(r)["orgId"]
|
||||
domains, apierr := ah.AppDao().ListDomains(context.Background(), orgId)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, domains)
|
||||
return
|
||||
}
|
||||
ah.Respond(w, domains)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) postDomain(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
req := types.GettableOrgDomain{}
|
||||
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := req.ValidNew(); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if apierr := ah.AppDao().CreateDomain(ctx, &req); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, &req)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) putDomain(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
domainIdStr := mux.Vars(r)["id"]
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
req := types.GettableOrgDomain{StorableOrgDomain: types.StorableOrgDomain{ID: domainId}}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
req.ID = domainId
|
||||
if err := req.Valid(nil); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
}
|
||||
|
||||
if apierr := ah.AppDao().UpdateDomain(ctx, &req); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, &req)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) deleteDomain(w http.ResponseWriter, r *http.Request) {
|
||||
domainIdStr := mux.Vars(r)["id"]
|
||||
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("invalid domain id")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
apierr := ah.AppDao().DeleteDomain(context.Background(), domainId)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
ah.Respond(w, nil)
|
||||
}
|
||||
@@ -9,13 +9,29 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
pkgError "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
featureSet, err := ah.FF().GetFeatureFlags()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, pkgError.Newf(pkgError.TypeInvalidInput, pkgError.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
featureSet, err := ah.Signoz.Licensing.GetFeatureFlags(r.Context(), orgID)
|
||||
if err != nil {
|
||||
ah.HandleError(w, err, http.StatusInternalServerError)
|
||||
return
|
||||
@@ -23,7 +39,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if constants.FetchFeatures == "true" {
|
||||
zap.L().Debug("fetching license")
|
||||
license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
|
||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to fetch license", zap.Error(err))
|
||||
} else if license == nil {
|
||||
@@ -44,9 +60,16 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
if ah.opts.PreferSpanMetrics {
|
||||
for idx := range featureSet {
|
||||
feature := &featureSet[idx]
|
||||
if feature.Name == basemodel.UseSpanMetrics {
|
||||
for idx, feature := range featureSet {
|
||||
if feature.Name == licensetypes.UseSpanMetrics {
|
||||
featureSet[idx].Active = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if constants.IsDotMetricsEnabled {
|
||||
for idx, feature := range featureSet {
|
||||
if feature.Name == licensetypes.DotMetricsEnabled {
|
||||
featureSet[idx].Active = true
|
||||
}
|
||||
}
|
||||
@@ -57,7 +80,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
// fetchZeusFeatures makes an HTTP GET request to the /zeusFeatures endpoint
|
||||
// and returns the FeatureSet.
|
||||
func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) {
|
||||
func fetchZeusFeatures(url, licenseKey string) ([]*licensetypes.Feature, error) {
|
||||
// Check if the URL is empty
|
||||
if url == "" {
|
||||
return nil, fmt.Errorf("url is empty")
|
||||
@@ -116,28 +139,28 @@ func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) {
|
||||
}
|
||||
|
||||
type ZeusFeaturesResponse struct {
|
||||
Status string `json:"status"`
|
||||
Data basemodel.FeatureSet `json:"data"`
|
||||
Status string `json:"status"`
|
||||
Data []*licensetypes.Feature `json:"data"`
|
||||
}
|
||||
|
||||
// MergeFeatureSets merges two FeatureSet arrays with precedence to zeusFeatures.
|
||||
func MergeFeatureSets(zeusFeatures, internalFeatures basemodel.FeatureSet) basemodel.FeatureSet {
|
||||
func MergeFeatureSets(zeusFeatures, internalFeatures []*licensetypes.Feature) []*licensetypes.Feature {
|
||||
// Create a map to store the merged features
|
||||
featureMap := make(map[string]basemodel.Feature)
|
||||
featureMap := make(map[string]*licensetypes.Feature)
|
||||
|
||||
// Add all features from the otherFeatures set to the map
|
||||
for _, feature := range internalFeatures {
|
||||
featureMap[feature.Name] = feature
|
||||
featureMap[feature.Name.StringValue()] = feature
|
||||
}
|
||||
|
||||
// Add all features from the zeusFeatures set to the map
|
||||
// If a feature already exists (i.e., same name), the zeusFeature will overwrite it
|
||||
for _, feature := range zeusFeatures {
|
||||
featureMap[feature.Name] = feature
|
||||
featureMap[feature.Name.StringValue()] = feature
|
||||
}
|
||||
|
||||
// Convert the map back to a FeatureSet slice
|
||||
var mergedFeatures basemodel.FeatureSet
|
||||
var mergedFeatures []*licensetypes.Feature
|
||||
for _, feature := range featureMap {
|
||||
mergedFeatures = append(mergedFeatures, feature)
|
||||
}
|
||||
|
||||
@@ -3,78 +3,79 @@ package api
|
||||
import (
|
||||
"testing"
|
||||
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestMergeFeatureSets(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
zeusFeatures basemodel.FeatureSet
|
||||
internalFeatures basemodel.FeatureSet
|
||||
expected basemodel.FeatureSet
|
||||
zeusFeatures []*licensetypes.Feature
|
||||
internalFeatures []*licensetypes.Feature
|
||||
expected []*licensetypes.Feature
|
||||
}{
|
||||
{
|
||||
name: "empty zeusFeatures and internalFeatures",
|
||||
zeusFeatures: basemodel.FeatureSet{},
|
||||
internalFeatures: basemodel.FeatureSet{},
|
||||
expected: basemodel.FeatureSet{},
|
||||
zeusFeatures: []*licensetypes.Feature{},
|
||||
internalFeatures: []*licensetypes.Feature{},
|
||||
expected: []*licensetypes.Feature{},
|
||||
},
|
||||
{
|
||||
name: "non-empty zeusFeatures and empty internalFeatures",
|
||||
zeusFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
zeusFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
internalFeatures: basemodel.FeatureSet{},
|
||||
expected: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
internalFeatures: []*licensetypes.Feature{},
|
||||
expected: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty zeusFeatures and non-empty internalFeatures",
|
||||
zeusFeatures: basemodel.FeatureSet{},
|
||||
internalFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
zeusFeatures: []*licensetypes.Feature{},
|
||||
internalFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
expected: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
expected: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "non-empty zeusFeatures and non-empty internalFeatures with no conflicts",
|
||||
zeusFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature3", Active: false},
|
||||
zeusFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature3"), Active: false},
|
||||
},
|
||||
internalFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature2", Active: true},
|
||||
{Name: "Feature4", Active: false},
|
||||
internalFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature2"), Active: true},
|
||||
{Name: valuer.NewString("Feature4"), Active: false},
|
||||
},
|
||||
expected: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: true},
|
||||
{Name: "Feature3", Active: false},
|
||||
{Name: "Feature4", Active: false},
|
||||
expected: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: true},
|
||||
{Name: valuer.NewString("Feature3"), Active: false},
|
||||
{Name: valuer.NewString("Feature4"), Active: false},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "non-empty zeusFeatures and non-empty internalFeatures with conflicts",
|
||||
zeusFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
zeusFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
internalFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: false},
|
||||
{Name: "Feature3", Active: true},
|
||||
internalFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: false},
|
||||
{Name: valuer.NewString("Feature3"), Active: true},
|
||||
},
|
||||
expected: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
{Name: "Feature3", Active: true},
|
||||
expected: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
{Name: valuer.NewString("Feature3"), Active: true},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -5,10 +5,26 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
validPath := false
|
||||
for _, allowedPrefix := range gateway.AllowedPrefix {
|
||||
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
|
||||
@@ -22,9 +38,9 @@ func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request
|
||||
return
|
||||
}
|
||||
|
||||
license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
|
||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
RespondError(rw, err, nil)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -6,11 +6,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
)
|
||||
|
||||
type DayWiseBreakdown struct {
|
||||
@@ -49,10 +45,6 @@ type details struct {
|
||||
BillTotal float64 `json:"billTotal"`
|
||||
}
|
||||
|
||||
type Redirect struct {
|
||||
RedirectURL string `json:"redirectURL"`
|
||||
}
|
||||
|
||||
type billingDetails struct {
|
||||
Status string `json:"status"`
|
||||
Data struct {
|
||||
@@ -64,97 +56,6 @@ type billingDetails struct {
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type ApplyLicenseRequest struct {
|
||||
LicenseKey string `json:"key"`
|
||||
}
|
||||
|
||||
func (ah *APIHandler) listLicensesV3(w http.ResponseWriter, r *http.Request) {
|
||||
ah.listLicensesV2(w, r)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getActiveLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
activeLicense, err := ah.LM().GetRepo().GetActiveLicenseV3(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// return 404 not found if there is no active license
|
||||
if activeLicense == nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no active license found")}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// TODO deprecate this when we move away from key for stripe
|
||||
activeLicense.Data["key"] = activeLicense.Key
|
||||
render.Success(w, http.StatusOK, activeLicense.Data)
|
||||
}
|
||||
|
||||
// this function is called by zeus when inserting licenses in the query-service
|
||||
func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
var licenseKey ApplyLicenseRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&licenseKey); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if licenseKey.LicenseKey == "" {
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
_, err = ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey)
|
||||
if err != nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED, map[string]interface{}{"err": err.Error()}, claims.Email, true, false)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusAccepted, nil)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) refreshLicensesV3(w http.ResponseWriter, r *http.Request) {
|
||||
err := ah.LM().RefreshLicense(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func getCheckoutPortalResponse(redirectURL string) *Redirect {
|
||||
return &Redirect{RedirectURL: redirectURL}
|
||||
}
|
||||
|
||||
func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
|
||||
checkoutRequest := &model.CheckoutRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(checkoutRequest); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
license := ah.LM().GetActiveLicense()
|
||||
if license == nil {
|
||||
RespondError(w, model.BadRequestStr("cannot proceed with checkout without license key"), nil)
|
||||
return
|
||||
}
|
||||
|
||||
redirectUrl, err := signozio.CheckoutSession(r.Context(), checkoutRequest, license.Key, ah.Signoz.Zeus)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, getCheckoutPortalResponse(redirectUrl))
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
|
||||
licenseKey := r.URL.Query().Get("licenseKey")
|
||||
|
||||
@@ -188,71 +89,3 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
|
||||
// TODO(srikanthccv):Fetch the current day usage and add it to the response
|
||||
ah.Respond(w, billingResponse.Data)
|
||||
}
|
||||
|
||||
func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License {
|
||||
licensesV2 := []model.License{}
|
||||
for _, l := range licenses {
|
||||
planKeyFromPlanName, ok := model.MapOldPlanKeyToNewPlanName[l.PlanName]
|
||||
if !ok {
|
||||
planKeyFromPlanName = model.Basic
|
||||
}
|
||||
licenseV2 := model.License{
|
||||
Key: l.Key,
|
||||
ActivationId: "",
|
||||
PlanDetails: "",
|
||||
FeatureSet: l.Features,
|
||||
ValidationMessage: "",
|
||||
IsCurrent: l.IsCurrent,
|
||||
LicensePlan: model.LicensePlan{
|
||||
PlanKey: planKeyFromPlanName,
|
||||
ValidFrom: l.ValidFrom,
|
||||
ValidUntil: l.ValidUntil,
|
||||
Status: l.Status},
|
||||
}
|
||||
licensesV2 = append(licensesV2, licenseV2)
|
||||
}
|
||||
return licensesV2
|
||||
}
|
||||
|
||||
func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
licensesV3, apierr := ah.LM().GetLicensesV3(r.Context())
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
licenses := convertLicenseV3ToLicenseV2(licensesV3)
|
||||
|
||||
resp := model.Licenses{
|
||||
TrialStart: -1,
|
||||
TrialEnd: -1,
|
||||
OnTrial: false,
|
||||
WorkSpaceBlock: false,
|
||||
TrialConvertedToSubscription: false,
|
||||
GracePeriodEnd: -1,
|
||||
Licenses: licenses,
|
||||
}
|
||||
|
||||
ah.Respond(w, resp)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
|
||||
portalRequest := &model.PortalRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(portalRequest); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
license := ah.LM().GetActiveLicense()
|
||||
if license == nil {
|
||||
RespondError(w, model.BadRequestStr("cannot request the portal session without license key"), nil)
|
||||
return
|
||||
}
|
||||
|
||||
redirectUrl, err := signozio.PortalSession(r.Context(), portalRequest, license.Key, ah.Signoz.Zeus)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, getCheckoutPortalResponse(redirectUrl))
|
||||
}
|
||||
|
||||
@@ -1,187 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := model.CreatePATRequestBody{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
pat := eeTypes.NewGettablePAT(
|
||||
req.Name,
|
||||
req.Role,
|
||||
claims.UserID,
|
||||
req.ExpiresInDays,
|
||||
)
|
||||
err = validatePATRequest(pat)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
|
||||
var apierr basemodel.BaseApiError
|
||||
if pat, apierr = ah.AppDao().CreatePAT(r.Context(), claims.OrgID, pat); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, &pat)
|
||||
}
|
||||
|
||||
func validatePATRequest(req eeTypes.GettablePAT) error {
|
||||
_, err := authtypes.NewRole(req.Role)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if req.ExpiresAt < 0 {
|
||||
return fmt.Errorf("valid expiresAt is required")
|
||||
}
|
||||
|
||||
if req.Name == "" {
|
||||
return fmt.Errorf("valid name is required")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := eeTypes.GettablePAT{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(r.Context(), claims.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(r.Context(), existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
err = validatePATRequest(req)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
req.UpdatedByUserID = claims.UserID
|
||||
req.UpdatedAt = time.Now()
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
if apierr = ah.AppDao().UpdatePAT(r.Context(), claims.OrgID, req, id); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, map[string]string{"data": "pat updated successfully"})
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
pats, apierr := ah.AppDao().ListPATs(r.Context(), claims.OrgID)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, pats)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(r.Context(), claims.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(r.Context(), existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
|
||||
if apierr := ah.AppDao().RevokePAT(r.Context(), claims.OrgID, id, claims.UserID); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
ah.Respond(w, map[string]string{"data": "pat revoked successfully"})
|
||||
}
|
||||
@@ -33,3 +33,7 @@ func NewDataConnector(
|
||||
ClickHouseReader: chReader,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickhouseReader) GetSQLStore() sqlstore.SQLStore {
|
||||
return r.appdb
|
||||
}
|
||||
|
||||
@@ -11,16 +11,16 @@ import (
|
||||
"github.com/gorilla/handlers"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
eemiddleware "github.com/SigNoz/signoz/ee/http/middleware"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/db"
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -30,14 +30,9 @@ import (
|
||||
"github.com/rs/cors"
|
||||
"github.com/soheilhy/cmux"
|
||||
|
||||
licensepkg "github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
|
||||
baseexplorer "github.com/SigNoz/signoz/pkg/query-service/app/explorer"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
@@ -92,33 +87,11 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
|
||||
// NewServer creates and initializes Server
|
||||
func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
modelDao, err := dao.InitDao(serverOptions.SigNoz.SQLStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := baseexplorer.InitWithDSN(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// initiate license manager
|
||||
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.Zeus)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// set license manager as feature flag provider in dao
|
||||
modelDao.SetFlagProvider(lm)
|
||||
|
||||
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -141,6 +114,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
serverOptions.SigNoz.Modules.OrgGetter,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -148,10 +122,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// initiate opamp
|
||||
_, err = opAmpModel.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opAmpModel.InitDB(serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.Instrumentation.Logger(), serverOptions.SigNoz.Modules.OrgGetter)
|
||||
|
||||
integrationsController, err := integrations.NewController(serverOptions.SigNoz.SQLStore)
|
||||
if err != nil {
|
||||
@@ -169,7 +140,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
// ingestion pipelines manager
|
||||
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
|
||||
serverOptions.SigNoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
integrationsController.GetPipelinesForInstalledIntegrations,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -177,7 +149,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
// initiate agent config handler
|
||||
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
|
||||
DB: serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
Store: serverOptions.SigNoz.SQLStore,
|
||||
AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController},
|
||||
})
|
||||
if err != nil {
|
||||
@@ -185,17 +157,23 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// start the usagemanager
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus)
|
||||
usageManager, err := usage.New(serverOptions.SigNoz.Licensing, serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus, serverOptions.SigNoz.Modules.OrgGetter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = usageManager.Start()
|
||||
err = usageManager.Start(context.Background())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
telemetry.GetInstance().SetReader(reader)
|
||||
telemetry.GetInstance().SetSqlStore(serverOptions.SigNoz.SQLStore)
|
||||
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
|
||||
telemetry.GetInstance().SetSavedViewsInfoCallback(telemetry.GetSavedViewsInfo)
|
||||
telemetry.GetInstance().SetAlertsInfoCallback(telemetry.GetAlertsInfo)
|
||||
telemetry.GetInstance().SetGetUsersCallback(telemetry.GetUsers)
|
||||
telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount)
|
||||
telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo)
|
||||
|
||||
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
|
||||
if err != nil {
|
||||
@@ -205,11 +183,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
apiOpts := api.APIHandlerOptions{
|
||||
DataConnector: reader,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
AppDao: modelDao,
|
||||
RulesManager: rm,
|
||||
UsageManager: usageManager,
|
||||
FeatureFlags: lm,
|
||||
LicenseManager: lm,
|
||||
IntegrationsController: integrationsController,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
@@ -250,7 +225,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
&opAmpModel.AllAgents, agentConfMgr,
|
||||
)
|
||||
|
||||
orgs, err := apiHandler.Signoz.Modules.Organization.GetAll(context.Background())
|
||||
orgs, err := apiHandler.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -265,18 +240,17 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
|
||||
|
||||
r := baseapp.NewRouter()
|
||||
|
||||
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
|
||||
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
|
||||
r.Use(middleware.NewTimeout(zap.L(),
|
||||
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
|
||||
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
|
||||
s.serverOptions.Config.APIServer.Timeout.Default,
|
||||
s.serverOptions.Config.APIServer.Timeout.Max,
|
||||
).Wrap)
|
||||
r.Use(middleware.NewAnalytics(zap.L()).Wrap)
|
||||
r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
r.Use(middleware.NewAnalytics().Wrap)
|
||||
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
|
||||
apiHandler.RegisterPrivateRoutes(r)
|
||||
|
||||
@@ -300,15 +274,15 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger())
|
||||
|
||||
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
|
||||
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
|
||||
r.Use(middleware.NewTimeout(zap.L(),
|
||||
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
|
||||
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
|
||||
s.serverOptions.Config.APIServer.Timeout.Default,
|
||||
s.serverOptions.Config.APIServer.Timeout.Max,
|
||||
).Wrap)
|
||||
r.Use(middleware.NewAnalytics(zap.L()).Wrap)
|
||||
r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
r.Use(middleware.NewAnalytics().Wrap)
|
||||
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
|
||||
apiHandler.RegisterRoutes(r, am)
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
@@ -318,10 +292,12 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
apiHandler.RegisterQueryRangeV5Routes(r, am)
|
||||
apiHandler.RegisterWebSocketPaths(r, am)
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
apiHandler.MetricExplorerRoutes(r, am)
|
||||
apiHandler.RegisterTraceFunnelsRoutes(r, am)
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: []string{"*"},
|
||||
@@ -442,15 +418,15 @@ func (s *Server) Start(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Server) Stop() error {
|
||||
func (s *Server) Stop(ctx context.Context) error {
|
||||
if s.httpServer != nil {
|
||||
if err := s.httpServer.Shutdown(context.Background()); err != nil {
|
||||
if err := s.httpServer.Shutdown(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if s.privateHTTP != nil {
|
||||
if err := s.privateHTTP.Shutdown(context.Background()); err != nil {
|
||||
if err := s.privateHTTP.Shutdown(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -458,11 +434,11 @@ func (s *Server) Stop() error {
|
||||
s.opampServer.Stop()
|
||||
|
||||
if s.ruleManager != nil {
|
||||
s.ruleManager.Stop(context.Background())
|
||||
s.ruleManager.Stop(ctx)
|
||||
}
|
||||
|
||||
// stop usage manager
|
||||
s.usageManager.Stop()
|
||||
s.usageManager.Stop(ctx)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -475,6 +451,7 @@ func makeRulesManager(
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
orgGetter organization.Getter,
|
||||
) (*baserules.Manager, error) {
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
@@ -490,6 +467,7 @@ func makeRulesManager(
|
||||
PrepareTestRuleFunc: rules.TestNotification,
|
||||
Alertmanager: alertmanager,
|
||||
SQLStore: sqlstore,
|
||||
OrgGetter: orgGetter,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -33,3 +33,13 @@ func GetOrDefaultEnv(key string, fallback string) string {
|
||||
func GetDefaultSiteURL() string {
|
||||
return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL)
|
||||
}
|
||||
|
||||
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
|
||||
|
||||
var IsDotMetricsEnabled = false
|
||||
|
||||
func init() {
|
||||
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
|
||||
IsDotMetricsEnabled = true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao/sqlite"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
)
|
||||
|
||||
func InitDao(sqlStore sqlstore.SQLStore) (ModelDao, error) {
|
||||
return sqlite.InitDB(sqlStore)
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type ModelDao interface {
|
||||
basedao.ModelDao
|
||||
|
||||
// SetFlagProvider sets the feature lookup provider
|
||||
SetFlagProvider(flags baseint.FeatureLookup)
|
||||
|
||||
DB() *bun.DB
|
||||
|
||||
// auth methods
|
||||
CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
|
||||
PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError)
|
||||
GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error)
|
||||
|
||||
// org domain (auth domains) CRUD ops
|
||||
ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
CreateDomain(ctx context.Context, d *types.GettableOrgDomain) basemodel.BaseApiError
|
||||
UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError
|
||||
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
|
||||
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
|
||||
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
|
||||
}
|
||||
@@ -1,191 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*types.User, basemodel.BaseApiError) {
|
||||
// get auth domain from email domain
|
||||
domain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
return nil, model.InternalErrorStr("failed to get domain from email")
|
||||
}
|
||||
if domain == nil {
|
||||
zap.L().Error("email domain does not match any authenticated domain", zap.String("email", email))
|
||||
return nil, model.InternalErrorStr("email domain does not match any authenticated domain")
|
||||
}
|
||||
|
||||
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
|
||||
if err != nil {
|
||||
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
|
||||
return nil, model.InternalErrorStr("failed to generate password hash")
|
||||
}
|
||||
|
||||
user := &types.User{
|
||||
ID: uuid.New().String(),
|
||||
Name: "",
|
||||
Email: email,
|
||||
Password: hash,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
ProfilePictureURL: "", // Currently unused
|
||||
Role: authtypes.RoleViewer.String(),
|
||||
OrgID: domain.OrgID,
|
||||
}
|
||||
|
||||
user, apiErr := m.CreateUser(ctx, user, false)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("CreateUser failed", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
return user, nil
|
||||
|
||||
}
|
||||
|
||||
// PrepareSsoRedirect prepares redirect page link after SSO response
|
||||
// is successfully parsed (i.e. valid email is available)
|
||||
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError) {
|
||||
|
||||
userPayload, apierr := m.GetUserByEmail(ctx, email)
|
||||
if !apierr.IsNil() {
|
||||
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
|
||||
return "", model.BadRequestStr("invalid user email received from the auth provider")
|
||||
}
|
||||
|
||||
user := &types.User{}
|
||||
|
||||
if userPayload == nil {
|
||||
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
|
||||
user = newUser
|
||||
if apiErr != nil {
|
||||
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
|
||||
return "", apiErr
|
||||
}
|
||||
} else {
|
||||
user = &userPayload.User
|
||||
}
|
||||
|
||||
tokenStore, err := baseauth.GenerateJWTForUser(user, jwt)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
|
||||
return "", model.InternalErrorStr("failed to generate token for the user")
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
|
||||
redirectUri,
|
||||
tokenStore.AccessJwt,
|
||||
user.ID,
|
||||
tokenStore.RefreshJwt), nil
|
||||
}
|
||||
|
||||
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
|
||||
domain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
return false, apierr
|
||||
}
|
||||
|
||||
if domain != nil && domain.SsoEnabled {
|
||||
// sso is enabled, check if the user has admin role
|
||||
userPayload, baseapierr := m.GetUserByEmail(ctx, email)
|
||||
|
||||
if baseapierr != nil || userPayload == nil {
|
||||
return false, baseapierr
|
||||
}
|
||||
|
||||
if userPayload.Role != authtypes.RoleAdmin.String() {
|
||||
return false, model.BadRequest(fmt.Errorf("auth method not supported"))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// PrecheckLogin is called when the login or signup page is loaded
|
||||
// to check sso login is to be prompted
|
||||
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, basemodel.BaseApiError) {
|
||||
|
||||
// assume user is valid unless proven otherwise
|
||||
resp := &basemodel.PrecheckResponse{IsUser: true, CanSelfRegister: false}
|
||||
|
||||
// check if email is a valid user
|
||||
userPayload, baseApiErr := m.GetUserByEmail(ctx, email)
|
||||
if baseApiErr != nil {
|
||||
return resp, baseApiErr
|
||||
}
|
||||
|
||||
if userPayload == nil {
|
||||
resp.IsUser = false
|
||||
}
|
||||
|
||||
ssoAvailable := true
|
||||
err := m.checkFeature(model.SSO)
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case basemodel.ErrFeatureUnavailable:
|
||||
// do nothing, just skip sso
|
||||
ssoAvailable = false
|
||||
default:
|
||||
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
|
||||
return resp, model.BadRequestStr(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
if ssoAvailable {
|
||||
|
||||
resp.IsUser = true
|
||||
|
||||
// find domain from email
|
||||
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
|
||||
return resp, apierr
|
||||
}
|
||||
|
||||
if orgDomain != nil && orgDomain.SsoEnabled {
|
||||
// saml is enabled for this domain, lets prepare sso url
|
||||
|
||||
if sourceUrl == "" {
|
||||
sourceUrl = constants.GetDefaultSiteURL()
|
||||
}
|
||||
|
||||
// parse source url that generated the login request
|
||||
var err error
|
||||
escapedUrl, _ := url.QueryUnescape(sourceUrl)
|
||||
siteUrl, err := url.Parse(escapedUrl)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to parse referer", zap.Error(err))
|
||||
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
|
||||
}
|
||||
|
||||
// build Idp URL that will authenticat the user
|
||||
// the front-end will redirect user to this url
|
||||
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
|
||||
return resp, model.InternalError(err)
|
||||
}
|
||||
|
||||
// set SSO to true, as the url is generated correctly
|
||||
resp.SSO = true
|
||||
}
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
@@ -1,272 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// GetDomainFromSsoResponse uses relay state received from IdP to fetch
|
||||
// user domain. The domain is further used to process validity of the response.
|
||||
// when sending login request to IdP we send relay state as URL (site url)
|
||||
// with domainId or domainName as query parameter.
|
||||
func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error) {
|
||||
// derive domain id from relay state now
|
||||
var domainIdStr string
|
||||
var domainNameStr string
|
||||
var domain *types.GettableOrgDomain
|
||||
|
||||
for k, v := range relayState.Query() {
|
||||
if k == "domainId" && len(v) > 0 {
|
||||
domainIdStr = strings.Replace(v[0], ":", "-", -1)
|
||||
}
|
||||
if k == "domainName" && len(v) > 0 {
|
||||
domainNameStr = v[0]
|
||||
}
|
||||
}
|
||||
|
||||
if domainIdStr != "" {
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to parse domainId from relay state", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse domainId from IdP response")
|
||||
}
|
||||
|
||||
domain, err = m.GetDomain(ctx, domainId)
|
||||
if (err != nil) || domain == nil {
|
||||
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
}
|
||||
|
||||
if domainNameStr != "" {
|
||||
|
||||
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
|
||||
domain = domainFromDB
|
||||
if (err != nil) || domain == nil {
|
||||
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
}
|
||||
if domain != nil {
|
||||
return domain, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("failed to find domain received in IdP response")
|
||||
}
|
||||
|
||||
// GetDomainByName returns org domain for a given domain name
|
||||
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
|
||||
stored := types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("name = ?", name).
|
||||
Limit(1).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.BadRequest(fmt.Errorf("invalid domain name"))
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
|
||||
if err := domain.LoadConfig(stored.Data); err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
return domain, nil
|
||||
}
|
||||
|
||||
// GetDomain returns org domain for a given domain id
|
||||
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
|
||||
stored := types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("id = ?", id).
|
||||
Limit(1).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.BadRequest(fmt.Errorf("invalid domain id"))
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
|
||||
if err := domain.LoadConfig(stored.Data); err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
return domain, nil
|
||||
}
|
||||
|
||||
// ListDomains gets the list of auth domains by org id
|
||||
func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
domains := []types.GettableOrgDomain{}
|
||||
|
||||
stored := []types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("org_id = ?", orgId).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return domains, nil
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
for _, s := range stored {
|
||||
domain := types.GettableOrgDomain{StorableOrgDomain: s}
|
||||
if err := domain.LoadConfig(s.Data); err != nil {
|
||||
zap.L().Error("ListDomains() failed", zap.Error(err))
|
||||
}
|
||||
domains = append(domains, domain)
|
||||
}
|
||||
|
||||
return domains, nil
|
||||
}
|
||||
|
||||
// CreateDomain creates a new auth domain
|
||||
func (m *modelDao) CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError {
|
||||
|
||||
if domain.ID == uuid.Nil {
|
||||
domain.ID = uuid.New()
|
||||
}
|
||||
|
||||
if domain.OrgID == "" || domain.Name == "" {
|
||||
return model.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgID, Name "))
|
||||
}
|
||||
|
||||
configJson, err := json.Marshal(domain)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain creation failed"))
|
||||
}
|
||||
|
||||
storableDomain := types.StorableOrgDomain{
|
||||
ID: domain.ID,
|
||||
Name: domain.Name,
|
||||
OrgID: domain.OrgID,
|
||||
Data: string(configJson),
|
||||
TimeAuditable: ossTypes.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
|
||||
}
|
||||
|
||||
_, err = m.DB().NewInsert().
|
||||
Model(&storableDomain).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to insert domain in db", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain creation failed"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateDomain updates stored config params for a domain
|
||||
func (m *modelDao) UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError {
|
||||
|
||||
if domain.ID == uuid.Nil {
|
||||
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
configJson, err := json.Marshal(domain)
|
||||
if err != nil {
|
||||
zap.L().Error("domain update failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
storableDomain := &types.StorableOrgDomain{
|
||||
ID: domain.ID,
|
||||
Name: domain.Name,
|
||||
OrgID: domain.OrgID,
|
||||
Data: string(configJson),
|
||||
TimeAuditable: ossTypes.TimeAuditable{UpdatedAt: time.Now()},
|
||||
}
|
||||
|
||||
_, err = m.DB().NewUpdate().
|
||||
Model(storableDomain).
|
||||
Column("data", "updated_at").
|
||||
WherePK().
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("domain update failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteDomain deletes an org domain
|
||||
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
|
||||
|
||||
if id == uuid.Nil {
|
||||
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
return model.InternalError(fmt.Errorf("domain delete failed"))
|
||||
}
|
||||
|
||||
storableDomain := &types.StorableOrgDomain{ID: id}
|
||||
_, err := m.DB().NewDelete().
|
||||
Model(storableDomain).
|
||||
WherePK().
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("domain delete failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain delete failed"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
|
||||
if email == "" {
|
||||
return nil, model.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email "))
|
||||
}
|
||||
|
||||
components := strings.Split(email, "@")
|
||||
if len(components) < 2 {
|
||||
return nil, model.BadRequest(fmt.Errorf("invalid email address"))
|
||||
}
|
||||
|
||||
parsedDomain := components[1]
|
||||
|
||||
stored := types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("name = ?", parsedDomain).
|
||||
Limit(1).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
|
||||
if err := domain.LoadConfig(stored.Data); err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
return domain, nil
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
basedsql "github.com/SigNoz/signoz/pkg/query-service/dao/sqlite"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type modelDao struct {
|
||||
*basedsql.ModelDaoSqlite
|
||||
flags baseint.FeatureLookup
|
||||
}
|
||||
|
||||
// SetFlagProvider sets the feature lookup provider
|
||||
func (m *modelDao) SetFlagProvider(flags baseint.FeatureLookup) {
|
||||
m.flags = flags
|
||||
}
|
||||
|
||||
// CheckFeature confirms if a feature is available
|
||||
func (m *modelDao) checkFeature(key string) error {
|
||||
if m.flags == nil {
|
||||
return fmt.Errorf("flag provider not set")
|
||||
}
|
||||
|
||||
return m.flags.CheckFeature(key)
|
||||
}
|
||||
|
||||
// InitDB creates and extends base model DB repository
|
||||
func InitDB(sqlStore sqlstore.SQLStore) (*modelDao, error) {
|
||||
dao, err := basedsql.InitDB(sqlStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// set package variable so dependent base methods (e.g. AuthCache) will work
|
||||
basedao.SetDB(dao)
|
||||
m := &modelDao{ModelDaoSqlite: dao}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) DB() *bun.DB {
|
||||
return m.ModelDaoSqlite.DB()
|
||||
}
|
||||
@@ -1,198 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
|
||||
p.StorablePersonalAccessToken.OrgID = orgID
|
||||
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
|
||||
_, err := m.DB().NewInsert().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, p.UserID)
|
||||
if createdByUser == nil {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Column("role", "name", "updated_at", "updated_by_user_id").
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT update failed"))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("revoked = false").
|
||||
Where("org_id = ?", orgID).
|
||||
Order("updated_at DESC").
|
||||
Scan(ctx); err != nil {
|
||||
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
|
||||
}
|
||||
|
||||
patsWithUsers := []types.GettablePAT{}
|
||||
for i := range pats {
|
||||
patWithUser := types.GettablePAT{
|
||||
StorablePersonalAccessToken: pats[i],
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
|
||||
if createdByUser == nil {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
|
||||
if updatedByUser == nil {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: updatedByUser.CreatedAt,
|
||||
UpdatedAt: updatedByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
patsWithUsers = append(patsWithUsers, patWithUser)
|
||||
}
|
||||
return patsWithUsers, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
|
||||
updatedAt := time.Now().Unix()
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&types.StorablePersonalAccessToken{}).
|
||||
Set("revoked = ?", true).
|
||||
Set("updated_by_user_id = ?", userID).
|
||||
Set("updated_at = ?", updatedAt).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("org_id = ?", orgID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT revoke failed"))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("token = ?", token).
|
||||
Where("revoked = false").
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
|
||||
}
|
||||
|
||||
if len(pats) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple PATs with same token, %s", token),
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
|
||||
}
|
||||
|
||||
if len(pats) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple PATs with same token"),
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
package signozio
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
func ValidateLicenseV3(ctx context.Context, licenseKey string, zeus zeus.Zeus) (*model.LicenseV3, error) {
|
||||
data, err := zeus.GetLicense(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var m map[string]any
|
||||
if err = json.Unmarshal(data, &m); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return license, nil
|
||||
}
|
||||
|
||||
// SendUsage reports the usage of signoz to license server
|
||||
func SendUsage(ctx context.Context, usage model.UsagePayload, zeus zeus.Zeus) error {
|
||||
body, err := json.Marshal(usage)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return zeus.PutMeters(ctx, usage.LicenseKey.String(), body)
|
||||
}
|
||||
|
||||
func CheckoutSession(ctx context.Context, checkoutRequest *model.CheckoutRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
|
||||
body, err := json.Marshal(checkoutRequest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
response, err := zeus.GetCheckoutURL(ctx, licenseKey, body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return gjson.GetBytes(response, "url").String(), nil
|
||||
}
|
||||
|
||||
func PortalSession(ctx context.Context, portalRequest *model.PortalRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
|
||||
body, err := json.Marshal(portalRequest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
response, err := zeus.GetPortalURL(ctx, licenseKey, body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return gjson.GetBytes(response, "url").String(), nil
|
||||
}
|
||||
@@ -1,248 +0,0 @@
|
||||
package license
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/mattn/go-sqlite3"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// Repo is license repo. stores license keys in a secured DB
|
||||
type Repo struct {
|
||||
db *sqlx.DB
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
// NewLicenseRepo initiates a new license repo
|
||||
func NewLicenseRepo(db *sqlx.DB, store sqlstore.SQLStore) Repo {
|
||||
return Repo{
|
||||
db: db,
|
||||
store: store,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Repo) GetLicensesV3(ctx context.Context) ([]*model.LicenseV3, error) {
|
||||
licensesData := []model.LicenseDB{}
|
||||
licenseV3Data := []*model.LicenseV3{}
|
||||
|
||||
query := "SELECT id,key,data FROM licenses_v3"
|
||||
|
||||
err := r.db.Select(&licensesData, query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get licenses from db: %v", err)
|
||||
}
|
||||
|
||||
for _, l := range licensesData {
|
||||
var licenseData map[string]interface{}
|
||||
err := json.Unmarshal([]byte(l.Data), &licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
|
||||
}
|
||||
licenseV3Data = append(licenseV3Data, license)
|
||||
}
|
||||
|
||||
return licenseV3Data, nil
|
||||
}
|
||||
|
||||
// GetActiveLicense fetches the latest active license from DB.
|
||||
// If the license is not present, expect a nil license and a nil error in the output.
|
||||
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) {
|
||||
activeLicenseV3, err := r.GetActiveLicenseV3(ctx)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
|
||||
}
|
||||
|
||||
if activeLicenseV3 == nil {
|
||||
return nil, nil
|
||||
}
|
||||
activeLicenseV2 := model.ConvertLicenseV3ToLicenseV2(activeLicenseV3)
|
||||
return activeLicenseV2, nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetActiveLicenseV3(ctx context.Context) (*model.LicenseV3, error) {
|
||||
var err error
|
||||
licenses := []model.LicenseDB{}
|
||||
|
||||
query := "SELECT id,key,data FROM licenses_v3"
|
||||
|
||||
err = r.db.Select(&licenses, query)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
|
||||
}
|
||||
|
||||
var active *model.LicenseV3
|
||||
for _, l := range licenses {
|
||||
var licenseData map[string]interface{}
|
||||
err := json.Unmarshal([]byte(l.Data), &licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
|
||||
}
|
||||
|
||||
if active == nil &&
|
||||
(license.ValidFrom != 0) &&
|
||||
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
|
||||
active = license
|
||||
}
|
||||
if active != nil &&
|
||||
license.ValidFrom > active.ValidFrom &&
|
||||
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
|
||||
active = license
|
||||
}
|
||||
}
|
||||
|
||||
return active, nil
|
||||
}
|
||||
|
||||
// InsertLicenseV3 inserts a new license v3 in db
|
||||
func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) *model.ApiError {
|
||||
|
||||
query := `INSERT INTO licenses_v3 (id, key, data) VALUES ($1, $2, $3)`
|
||||
|
||||
// licsense is the entity of zeus so putting the entire license here without defining schema
|
||||
licenseData, err := json.Marshal(l.Data)
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: basemodel.ErrorBadData, Err: err}
|
||||
}
|
||||
|
||||
_, err = r.db.ExecContext(ctx,
|
||||
query,
|
||||
l.ID,
|
||||
l.Key,
|
||||
string(licenseData),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if sqliteErr, ok := err.(sqlite3.Error); ok {
|
||||
if sqliteErr.ExtendedCode == sqlite3.ErrConstraintUnique {
|
||||
zap.L().Error("error in inserting license data: ", zap.Error(sqliteErr))
|
||||
return &model.ApiError{Typ: model.ErrorConflict, Err: sqliteErr}
|
||||
}
|
||||
}
|
||||
zap.L().Error("error in inserting license data: ", zap.Error(err))
|
||||
return &model.ApiError{Typ: basemodel.ErrorExec, Err: err}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateLicenseV3 updates a new license v3 in db
|
||||
func (r *Repo) UpdateLicenseV3(ctx context.Context, l *model.LicenseV3) error {
|
||||
|
||||
// the key and id for the license can't change so only update the data here!
|
||||
query := `UPDATE licenses_v3 SET data=$1 WHERE id=$2;`
|
||||
|
||||
license, err := json.Marshal(l.Data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("insert license failed: license marshal error")
|
||||
}
|
||||
_, err = r.db.ExecContext(ctx,
|
||||
query,
|
||||
license,
|
||||
l.ID,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("error in updating license data: ", zap.Error(err))
|
||||
return fmt.Errorf("failed to update license in db: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repo) CreateFeature(req *types.FeatureStatus) *basemodel.ApiError {
|
||||
|
||||
_, err := r.store.BunDB().NewInsert().
|
||||
Model(req).
|
||||
Exec(context.Background())
|
||||
if err != nil {
|
||||
return &basemodel.ApiError{Typ: basemodel.ErrorInternal, Err: err}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetFeature(featureName string) (types.FeatureStatus, error) {
|
||||
var feature types.FeatureStatus
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&feature).
|
||||
Where("name = ?", featureName).
|
||||
Scan(context.Background())
|
||||
|
||||
if err != nil {
|
||||
return feature, err
|
||||
}
|
||||
if feature.Name == "" {
|
||||
return feature, basemodel.ErrFeatureUnavailable{Key: featureName}
|
||||
}
|
||||
return feature, nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetAllFeatures() ([]basemodel.Feature, error) {
|
||||
|
||||
var feature []basemodel.Feature
|
||||
|
||||
err := r.db.Select(&feature,
|
||||
`SELECT * FROM feature_status;`)
|
||||
if err != nil {
|
||||
return feature, err
|
||||
}
|
||||
|
||||
return feature, nil
|
||||
}
|
||||
|
||||
func (r *Repo) UpdateFeature(req types.FeatureStatus) error {
|
||||
|
||||
_, err := r.store.BunDB().NewUpdate().
|
||||
Model(&req).
|
||||
Where("name = ?", req.Name).
|
||||
Exec(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repo) InitFeatures(req []types.FeatureStatus) error {
|
||||
// get a feature by name, if it doesn't exist, create it. If it does exist, update it.
|
||||
for _, feature := range req {
|
||||
currentFeature, err := r.GetFeature(feature.Name)
|
||||
if err != nil && err == sql.ErrNoRows {
|
||||
err := r.CreateFeature(&feature)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
feature.Usage = int(currentFeature.Usage)
|
||||
if feature.Usage >= feature.UsageLimit && feature.UsageLimit != -1 {
|
||||
feature.Active = false
|
||||
}
|
||||
err = r.UpdateFeature(feature)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,318 +0,0 @@
|
||||
package license
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"sync"
|
||||
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
|
||||
validate "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var LM *Manager
|
||||
|
||||
// validate and update license every 24 hours
|
||||
var validationFrequency = 24 * 60 * time.Minute
|
||||
|
||||
type Manager struct {
|
||||
repo *Repo
|
||||
zeus zeus.Zeus
|
||||
mutex sync.Mutex
|
||||
validatorRunning bool
|
||||
// end the license validation, this is important to gracefully
|
||||
// stopping validation and protect in-consistent updates
|
||||
done chan struct{}
|
||||
// terminated waits for the validate go routine to end
|
||||
terminated chan struct{}
|
||||
// last time the license was validated
|
||||
lastValidated int64
|
||||
// keep track of validation failure attempts
|
||||
failedAttempts uint64
|
||||
// keep track of active license and features
|
||||
activeLicenseV3 *model.LicenseV3
|
||||
activeFeatures basemodel.FeatureSet
|
||||
}
|
||||
|
||||
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, zeus zeus.Zeus, features ...basemodel.Feature) (*Manager, error) {
|
||||
if LM != nil {
|
||||
return LM, nil
|
||||
}
|
||||
|
||||
repo := NewLicenseRepo(db, store)
|
||||
m := &Manager{
|
||||
repo: &repo,
|
||||
zeus: zeus,
|
||||
}
|
||||
if err := m.start(features...); err != nil {
|
||||
return m, err
|
||||
}
|
||||
|
||||
LM = m
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// start loads active license in memory and initiates validator
|
||||
func (lm *Manager) start(features ...basemodel.Feature) error {
|
||||
return lm.LoadActiveLicenseV3(features...)
|
||||
}
|
||||
|
||||
func (lm *Manager) Stop() {
|
||||
close(lm.done)
|
||||
<-lm.terminated
|
||||
}
|
||||
|
||||
func (lm *Manager) SetActiveV3(l *model.LicenseV3, features ...basemodel.Feature) {
|
||||
lm.mutex.Lock()
|
||||
defer lm.mutex.Unlock()
|
||||
|
||||
if l == nil {
|
||||
return
|
||||
}
|
||||
|
||||
lm.activeLicenseV3 = l
|
||||
lm.activeFeatures = append(l.Features, features...)
|
||||
// set default features
|
||||
setDefaultFeatures(lm)
|
||||
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Panic("Couldn't activate features", zap.Error(err))
|
||||
}
|
||||
if !lm.validatorRunning {
|
||||
// we want to make sure only one validator runs,
|
||||
// we already have lock() so good to go
|
||||
lm.validatorRunning = true
|
||||
go lm.ValidatorV3(context.Background())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func setDefaultFeatures(lm *Manager) {
|
||||
lm.activeFeatures = append(lm.activeFeatures, baseconstants.DEFAULT_FEATURE_SET...)
|
||||
}
|
||||
|
||||
func (lm *Manager) LoadActiveLicenseV3(features ...basemodel.Feature) error {
|
||||
active, err := lm.repo.GetActiveLicenseV3(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if active != nil {
|
||||
lm.SetActiveV3(active, features...)
|
||||
} else {
|
||||
zap.L().Info("No active license found, defaulting to basic plan")
|
||||
// if no active license is found, we default to basic(free) plan with all default features
|
||||
lm.activeFeatures = model.BasicPlan
|
||||
setDefaultFeatures(lm)
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Error("Couldn't initialize features", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) GetLicensesV3(ctx context.Context) (response []*model.LicenseV3, apiError *model.ApiError) {
|
||||
|
||||
licenses, err := lm.repo.GetLicensesV3(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
for _, l := range licenses {
|
||||
if lm.activeLicenseV3 != nil && l.Key == lm.activeLicenseV3.Key {
|
||||
l.IsCurrent = true
|
||||
}
|
||||
if l.ValidUntil == -1 {
|
||||
// for subscriptions, there is no end-date as such
|
||||
// but for showing user some validity we default one year timespan
|
||||
l.ValidUntil = l.ValidFrom + 31556926
|
||||
}
|
||||
response = append(response, l)
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Validator validates license after an epoch of time
|
||||
func (lm *Manager) ValidatorV3(ctx context.Context) {
|
||||
zap.L().Info("ValidatorV3 started!")
|
||||
defer close(lm.terminated)
|
||||
|
||||
tick := time.NewTicker(validationFrequency)
|
||||
defer tick.Stop()
|
||||
|
||||
_ = lm.ValidateV3(ctx)
|
||||
for {
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
default:
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
case <-tick.C:
|
||||
_ = lm.ValidateV3(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func (lm *Manager) RefreshLicense(ctx context.Context) error {
|
||||
license, err := validate.ValidateLicenseV3(ctx, lm.activeLicenseV3.Key, lm.zeus)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = lm.repo.UpdateLicenseV3(ctx, license)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
lm.SetActiveV3(license)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
|
||||
if lm.activeLicenseV3 == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
defer func() {
|
||||
lm.mutex.Lock()
|
||||
|
||||
lm.lastValidated = time.Now().Unix()
|
||||
if reterr != nil {
|
||||
zap.L().Error("License validation completed with error", zap.Error(reterr))
|
||||
|
||||
atomic.AddUint64(&lm.failedAttempts, 1)
|
||||
// default to basic plan if validation fails for three consecutive times
|
||||
if atomic.LoadUint64(&lm.failedAttempts) > 3 {
|
||||
zap.L().Error("License validation completed with error for three consecutive times, defaulting to basic plan", zap.String("license_id", lm.activeLicenseV3.ID), zap.Bool("license_validation", false))
|
||||
lm.activeLicenseV3 = nil
|
||||
lm.activeFeatures = model.BasicPlan
|
||||
setDefaultFeatures(lm)
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Error("Couldn't initialize features", zap.Error(err))
|
||||
}
|
||||
lm.done <- struct{}{}
|
||||
lm.validatorRunning = false
|
||||
}
|
||||
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
|
||||
map[string]interface{}{"err": reterr.Error()}, "", true, false)
|
||||
} else {
|
||||
// reset the failed attempts counter
|
||||
atomic.StoreUint64(&lm.failedAttempts, 0)
|
||||
zap.L().Info("License validation completed with no errors")
|
||||
}
|
||||
|
||||
lm.mutex.Unlock()
|
||||
}()
|
||||
|
||||
err := lm.RefreshLicense(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (*model.LicenseV3, error) {
|
||||
license, err := validate.ValidateLicenseV3(ctx, licenseKey, lm.zeus)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// insert the new license to the sqlite db
|
||||
modelErr := lm.repo.InsertLicenseV3(ctx, license)
|
||||
if modelErr != nil {
|
||||
zap.L().Error("failed to activate license", zap.Error(modelErr))
|
||||
return nil, modelErr
|
||||
}
|
||||
|
||||
// license is valid, activate it
|
||||
lm.SetActiveV3(license)
|
||||
return license, nil
|
||||
}
|
||||
|
||||
func (lm *Manager) GetActiveLicense() *model.LicenseV3 {
|
||||
return lm.activeLicenseV3
|
||||
}
|
||||
|
||||
// CheckFeature will be internally used by backend routines
|
||||
// for feature gating
|
||||
func (lm *Manager) CheckFeature(featureKey string) error {
|
||||
feature, err := lm.repo.GetFeature(featureKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if feature.Active {
|
||||
return nil
|
||||
}
|
||||
return basemodel.ErrFeatureUnavailable{Key: featureKey}
|
||||
}
|
||||
|
||||
// GetFeatureFlags returns current active features
|
||||
func (lm *Manager) GetFeatureFlags() (basemodel.FeatureSet, error) {
|
||||
return lm.repo.GetAllFeatures()
|
||||
}
|
||||
|
||||
func (lm *Manager) InitFeatures(features basemodel.FeatureSet) error {
|
||||
featureStatus := make([]types.FeatureStatus, len(features))
|
||||
for i, f := range features {
|
||||
featureStatus[i] = types.FeatureStatus{
|
||||
Name: f.Name,
|
||||
Active: f.Active,
|
||||
Usage: int(f.Usage),
|
||||
UsageLimit: int(f.UsageLimit),
|
||||
Route: f.Route,
|
||||
}
|
||||
}
|
||||
return lm.repo.InitFeatures(featureStatus)
|
||||
}
|
||||
|
||||
func (lm *Manager) UpdateFeatureFlag(feature basemodel.Feature) error {
|
||||
return lm.repo.UpdateFeature(types.FeatureStatus{
|
||||
Name: feature.Name,
|
||||
Active: feature.Active,
|
||||
Usage: int(feature.Usage),
|
||||
UsageLimit: int(feature.UsageLimit),
|
||||
Route: feature.Route,
|
||||
})
|
||||
}
|
||||
|
||||
func (lm *Manager) GetFeatureFlag(key string) (basemodel.Feature, error) {
|
||||
featureStatus, err := lm.repo.GetFeature(key)
|
||||
if err != nil {
|
||||
return basemodel.Feature{}, err
|
||||
}
|
||||
return basemodel.Feature{
|
||||
Name: featureStatus.Name,
|
||||
Active: featureStatus.Active,
|
||||
Usage: int64(featureStatus.Usage),
|
||||
UsageLimit: int64(featureStatus.UsageLimit),
|
||||
Route: featureStatus.Route,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetRepo return the license repo
|
||||
func (lm *Manager) GetRepo() *Repo {
|
||||
return lm.repo
|
||||
}
|
||||
@@ -6,18 +6,26 @@ import (
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/ee/zeus"
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
pkglicensing "github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
pkgzeus "github.com/SigNoz/signoz/pkg/zeus"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
@@ -85,8 +93,9 @@ func main() {
|
||||
loggerMgr := initZapLog()
|
||||
zap.ReplaceGlobals(loggerMgr)
|
||||
defer loggerMgr.Sync() // flushes buffer, if any
|
||||
ctx := context.Background()
|
||||
|
||||
config, err := signoz.NewConfig(context.Background(), config.ResolverConfig{
|
||||
config, err := signoz.NewConfig(ctx, config.ResolverConfig{
|
||||
Uris: []string{"env:"},
|
||||
ProviderFactories: []config.ProviderFactory{
|
||||
envprovider.NewFactory(),
|
||||
@@ -109,20 +118,6 @@ func main() {
|
||||
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
zeus.Config(),
|
||||
httpzeus.NewProviderFactory(),
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
|
||||
if len(jwtSecret) == 0 {
|
||||
@@ -133,6 +128,26 @@ func main() {
|
||||
|
||||
jwt := authtypes.NewJWT(jwtSecret, 30*time.Minute, 30*24*time.Hour)
|
||||
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
jwt,
|
||||
zeus.Config(),
|
||||
httpzeus.NewProviderFactory(),
|
||||
licensing.Config(24*time.Hour, 3),
|
||||
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
|
||||
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics)
|
||||
},
|
||||
signoz.NewEmailingProviderFactories(),
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
serverOptions := &app.ServerOptions{
|
||||
Config: config,
|
||||
SigNoz: signoz,
|
||||
@@ -151,22 +166,22 @@ func main() {
|
||||
zap.L().Fatal("Failed to create server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := server.Start(context.Background()); err != nil {
|
||||
if err := server.Start(ctx); err != nil {
|
||||
zap.L().Fatal("Could not start server", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz.Start(context.Background())
|
||||
signoz.Start(ctx)
|
||||
|
||||
if err := signoz.Wait(context.Background()); err != nil {
|
||||
if err := signoz.Wait(ctx); err != nil {
|
||||
zap.L().Fatal("Failed to start signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
err = server.Stop()
|
||||
err = server.Stop(ctx)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to stop server", zap.Error(err))
|
||||
}
|
||||
|
||||
err = signoz.Stop(context.Background())
|
||||
err = signoz.Stop(ctx)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to stop signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
// GettableInvitation overrides base object and adds precheck into
|
||||
// response
|
||||
type GettableInvitation struct {
|
||||
*basemodel.InvitationResponseObject
|
||||
Precheck *basemodel.PrecheckResponse `json:"precheck"`
|
||||
}
|
||||
@@ -1,244 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type License struct {
|
||||
Key string `json:"key" db:"key"`
|
||||
ActivationId string `json:"activationId" db:"activationId"`
|
||||
CreatedAt time.Time `db:"created_at"`
|
||||
|
||||
// PlanDetails contains the encrypted plan info
|
||||
PlanDetails string `json:"planDetails" db:"planDetails"`
|
||||
|
||||
// stores parsed license details
|
||||
LicensePlan
|
||||
|
||||
FeatureSet basemodel.FeatureSet
|
||||
|
||||
// populated in case license has any errors
|
||||
ValidationMessage string `db:"validationMessage"`
|
||||
|
||||
// used only for sending details to front-end
|
||||
IsCurrent bool `json:"isCurrent"`
|
||||
}
|
||||
|
||||
func (l *License) MarshalJSON() ([]byte, error) {
|
||||
|
||||
return json.Marshal(&struct {
|
||||
Key string `json:"key" db:"key"`
|
||||
ActivationId string `json:"activationId" db:"activationId"`
|
||||
ValidationMessage string `db:"validationMessage"`
|
||||
IsCurrent bool `json:"isCurrent"`
|
||||
PlanKey string `json:"planKey"`
|
||||
ValidFrom time.Time `json:"ValidFrom"`
|
||||
ValidUntil time.Time `json:"ValidUntil"`
|
||||
Status string `json:"status"`
|
||||
}{
|
||||
Key: l.Key,
|
||||
ActivationId: l.ActivationId,
|
||||
IsCurrent: l.IsCurrent,
|
||||
PlanKey: l.PlanKey,
|
||||
ValidFrom: time.Unix(l.ValidFrom, 0),
|
||||
ValidUntil: time.Unix(l.ValidUntil, 0),
|
||||
Status: l.Status,
|
||||
ValidationMessage: l.ValidationMessage,
|
||||
})
|
||||
}
|
||||
|
||||
type LicensePlan struct {
|
||||
PlanKey string `json:"planKey"`
|
||||
ValidFrom int64 `json:"validFrom"`
|
||||
ValidUntil int64 `json:"validUntil"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
type Licenses struct {
|
||||
TrialStart int64 `json:"trialStart"`
|
||||
TrialEnd int64 `json:"trialEnd"`
|
||||
OnTrial bool `json:"onTrial"`
|
||||
WorkSpaceBlock bool `json:"workSpaceBlock"`
|
||||
TrialConvertedToSubscription bool `json:"trialConvertedToSubscription"`
|
||||
GracePeriodEnd int64 `json:"gracePeriodEnd"`
|
||||
Licenses []License `json:"licenses"`
|
||||
}
|
||||
|
||||
type SubscriptionServerResp struct {
|
||||
Status string `json:"status"`
|
||||
Data Licenses `json:"data"`
|
||||
}
|
||||
|
||||
type Plan struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type LicenseDB struct {
|
||||
ID string `json:"id"`
|
||||
Key string `json:"key"`
|
||||
Data string `json:"data"`
|
||||
}
|
||||
type LicenseV3 struct {
|
||||
ID string
|
||||
Key string
|
||||
Data map[string]interface{}
|
||||
PlanName string
|
||||
Features basemodel.FeatureSet
|
||||
Status string
|
||||
IsCurrent bool
|
||||
ValidFrom int64
|
||||
ValidUntil int64
|
||||
}
|
||||
|
||||
func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) {
|
||||
var zeroValue T
|
||||
if val, ok := data[key]; ok {
|
||||
if value, ok := val.(T); ok {
|
||||
return value, nil
|
||||
}
|
||||
return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue))
|
||||
}
|
||||
return zeroValue, fmt.Errorf("%s key is missing", key)
|
||||
}
|
||||
|
||||
func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
|
||||
var features basemodel.FeatureSet
|
||||
|
||||
// extract id from data
|
||||
licenseID, err := extractKeyFromMapStringInterface[string](data, "id")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
delete(data, "id")
|
||||
|
||||
// extract key from data
|
||||
licenseKey, err := extractKeyFromMapStringInterface[string](data, "key")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
delete(data, "key")
|
||||
|
||||
// extract status from data
|
||||
status, err := extractKeyFromMapStringInterface[string](data, "status")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
planMap, err := extractKeyFromMapStringInterface[map[string]any](data, "plan")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
planName, err := extractKeyFromMapStringInterface[string](planMap, "name")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// if license status is invalid then default it to basic
|
||||
if status == LicenseStatusInvalid {
|
||||
planName = PlanNameBasic
|
||||
}
|
||||
|
||||
featuresFromZeus := basemodel.FeatureSet{}
|
||||
if _features, ok := data["features"]; ok {
|
||||
featuresData, err := json.Marshal(_features)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to marshal features data")
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to unmarshal features data")
|
||||
}
|
||||
}
|
||||
|
||||
switch planName {
|
||||
case PlanNameEnterprise:
|
||||
features = append(features, EnterprisePlan...)
|
||||
case PlanNameBasic:
|
||||
features = append(features, BasicPlan...)
|
||||
default:
|
||||
features = append(features, BasicPlan...)
|
||||
}
|
||||
|
||||
if len(featuresFromZeus) > 0 {
|
||||
for _, feature := range featuresFromZeus {
|
||||
exists := false
|
||||
for i, existingFeature := range features {
|
||||
if existingFeature.Name == feature.Name {
|
||||
features[i] = feature // Replace existing feature
|
||||
exists = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !exists {
|
||||
features = append(features, feature) // Append if it doesn't exist
|
||||
}
|
||||
}
|
||||
}
|
||||
data["features"] = features
|
||||
|
||||
_validFrom, err := extractKeyFromMapStringInterface[float64](data, "valid_from")
|
||||
if err != nil {
|
||||
_validFrom = 0
|
||||
}
|
||||
validFrom := int64(_validFrom)
|
||||
|
||||
_validUntil, err := extractKeyFromMapStringInterface[float64](data, "valid_until")
|
||||
if err != nil {
|
||||
_validUntil = 0
|
||||
}
|
||||
validUntil := int64(_validUntil)
|
||||
|
||||
return &LicenseV3{
|
||||
ID: licenseID,
|
||||
Key: licenseKey,
|
||||
Data: data,
|
||||
PlanName: planName,
|
||||
Features: features,
|
||||
ValidFrom: validFrom,
|
||||
ValidUntil: validUntil,
|
||||
Status: status,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}) (*LicenseV3, error) {
|
||||
licenseDataWithIdAndKey := data
|
||||
licenseDataWithIdAndKey["id"] = id
|
||||
licenseDataWithIdAndKey["key"] = key
|
||||
return NewLicenseV3(licenseDataWithIdAndKey)
|
||||
}
|
||||
|
||||
func ConvertLicenseV3ToLicenseV2(l *LicenseV3) *License {
|
||||
planKeyFromPlanName, ok := MapOldPlanKeyToNewPlanName[l.PlanName]
|
||||
if !ok {
|
||||
planKeyFromPlanName = Basic
|
||||
}
|
||||
return &License{
|
||||
Key: l.Key,
|
||||
ActivationId: "",
|
||||
PlanDetails: "",
|
||||
FeatureSet: l.Features,
|
||||
ValidationMessage: "",
|
||||
IsCurrent: l.IsCurrent,
|
||||
LicensePlan: LicensePlan{
|
||||
PlanKey: planKeyFromPlanName,
|
||||
ValidFrom: l.ValidFrom,
|
||||
ValidUntil: l.ValidUntil,
|
||||
Status: l.Status},
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
type CheckoutRequest struct {
|
||||
SuccessURL string `json:"url"`
|
||||
}
|
||||
|
||||
type PortalRequest struct {
|
||||
SuccessURL string `json:"url"`
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestNewLicenseV3(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
data []byte
|
||||
pass bool
|
||||
expected *LicenseV3
|
||||
error error
|
||||
}{
|
||||
{
|
||||
name: "Error for missing license id",
|
||||
data: []byte(`{}`),
|
||||
pass: false,
|
||||
error: errors.New("id key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for license id not being a valid string",
|
||||
data: []byte(`{"id": 10}`),
|
||||
pass: false,
|
||||
error: errors.New("id key is not a valid string"),
|
||||
},
|
||||
{
|
||||
name: "Error for missing license key",
|
||||
data: []byte(`{"id":"does-not-matter"}`),
|
||||
pass: false,
|
||||
error: errors.New("key key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid string license key",
|
||||
data: []byte(`{"id":"does-not-matter","key":10}`),
|
||||
pass: false,
|
||||
error: errors.New("key key is not a valid string"),
|
||||
},
|
||||
{
|
||||
name: "Error for missing license status",
|
||||
data: []byte(`{"id":"does-not-matter", "key": "does-not-matter","category":"FREE"}`),
|
||||
pass: false,
|
||||
error: errors.New("status key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid string license status",
|
||||
data: []byte(`{"id":"does-not-matter","key": "does-not-matter", "category":"FREE", "status":10}`),
|
||||
pass: false,
|
||||
error: errors.New("status key is not a valid string"),
|
||||
},
|
||||
{
|
||||
name: "Error for missing license plan",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`),
|
||||
pass: false,
|
||||
error: errors.New("plan key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid json license plan",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`),
|
||||
pass: false,
|
||||
error: errors.New("plan key is not a valid map[string]interface {}"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid license plan",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`),
|
||||
pass: false,
|
||||
error: errors.New("name key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Parse the entire license properly",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "ACTIVE",
|
||||
IsCurrent: false,
|
||||
Features: model.FeatureSet{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Fallback to basic plan if license status is invalid",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "INVALID",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameBasic,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "INVALID",
|
||||
IsCurrent: false,
|
||||
Features: model.FeatureSet{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "fallback states for validFrom and validUntil",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"valid_from": 1234.456,
|
||||
"valid_until": 5678.567,
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
ValidFrom: 1234,
|
||||
ValidUntil: 5678,
|
||||
Status: "ACTIVE",
|
||||
IsCurrent: false,
|
||||
Features: model.FeatureSet{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
var licensePayload map[string]interface{}
|
||||
err := json.Unmarshal(tc.data, &licensePayload)
|
||||
require.NoError(t, err)
|
||||
license, err := NewLicenseV3(licensePayload)
|
||||
if license != nil {
|
||||
license.Features = make(model.FeatureSet, 0)
|
||||
delete(license.Data, "features")
|
||||
}
|
||||
|
||||
if tc.pass {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, license)
|
||||
assert.Equal(t, tc.expected, license)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
assert.EqualError(t, err, tc.error.Error())
|
||||
require.Nil(t, license)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package model
|
||||
|
||||
type CreatePATRequestBody struct {
|
||||
Name string `json:"name"`
|
||||
Role string `json:"role"`
|
||||
ExpiresInDays int64 `json:"expiresInDays"`
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
const SSO = "SSO"
|
||||
const Basic = "BASIC_PLAN"
|
||||
const Enterprise = "ENTERPRISE_PLAN"
|
||||
|
||||
var (
|
||||
PlanNameEnterprise = "ENTERPRISE"
|
||||
PlanNameBasic = "BASIC"
|
||||
)
|
||||
|
||||
var (
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
|
||||
)
|
||||
|
||||
var (
|
||||
LicenseStatusInvalid = "INVALID"
|
||||
)
|
||||
|
||||
const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
const Gateway = "GATEWAY"
|
||||
const PremiumSupport = "PREMIUM_SUPPORT"
|
||||
|
||||
var BasicPlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
Name: SSO,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Gateway,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: PremiumSupport,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var EnterprisePlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
Name: SSO,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Onboarding,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: ChatSupport,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Gateway,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: PremiumSupport,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
package sso
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// SSOIdentity contains details of user received from SSO provider
|
||||
type SSOIdentity struct {
|
||||
UserID string
|
||||
Username string
|
||||
PreferredUsername string
|
||||
Email string
|
||||
EmailVerified bool
|
||||
ConnectorData []byte
|
||||
}
|
||||
|
||||
// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth
|
||||
// style redirect flow to determine user information.
|
||||
type OAuthCallbackProvider interface {
|
||||
// The initial URL user would be redirect to.
|
||||
// OAuth2 implementations support various scopes but we only need profile and user as
|
||||
// the roles are still being managed in SigNoz.
|
||||
BuildAuthURL(state string) (string, error)
|
||||
|
||||
// Handle the callback to the server (after login at oauth provider site)
|
||||
// and return a email identity.
|
||||
// At the moment we dont support auto signup flow (based on domain), so
|
||||
// the full identity (including name, group etc) is not required outside of the
|
||||
// connector
|
||||
HandleCallback(r *http.Request) (identity *SSOIdentity, err error)
|
||||
}
|
||||
@@ -14,9 +14,9 @@ import (
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
@@ -35,64 +35,68 @@ var (
|
||||
type Manager struct {
|
||||
clickhouseConn clickhouse.Conn
|
||||
|
||||
licenseRepo *license.Repo
|
||||
licenseService licensing.Licensing
|
||||
|
||||
scheduler *gocron.Scheduler
|
||||
|
||||
modelDao dao.ModelDao
|
||||
|
||||
zeus zeus.Zeus
|
||||
|
||||
orgGetter organization.Getter
|
||||
}
|
||||
|
||||
func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, zeus zeus.Zeus) (*Manager, error) {
|
||||
func New(licenseService licensing.Licensing, clickhouseConn clickhouse.Conn, zeus zeus.Zeus, orgGetter organization.Getter) (*Manager, error) {
|
||||
m := &Manager{
|
||||
clickhouseConn: clickhouseConn,
|
||||
licenseRepo: licenseRepo,
|
||||
licenseService: licenseService,
|
||||
scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC
|
||||
modelDao: modelDao,
|
||||
zeus: zeus,
|
||||
orgGetter: orgGetter,
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// start loads collects and exports any exported snapshot and starts the exporter
|
||||
func (lm *Manager) Start() error {
|
||||
func (lm *Manager) Start(ctx context.Context) error {
|
||||
// compares the locker and stateUnlocked if both are same lock is applied else returns error
|
||||
if !atomic.CompareAndSwapUint32(&locker, stateUnlocked, stateLocked) {
|
||||
return fmt.Errorf("usage exporter is locked")
|
||||
}
|
||||
|
||||
_, err := lm.scheduler.Do(func() { lm.UploadUsage() })
|
||||
// upload usage once when starting the service
|
||||
|
||||
_, err := lm.scheduler.Do(func() { lm.UploadUsage(ctx) })
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// upload usage once when starting the service
|
||||
lm.UploadUsage()
|
||||
|
||||
lm.UploadUsage(ctx)
|
||||
lm.scheduler.StartAsync()
|
||||
|
||||
return nil
|
||||
}
|
||||
func (lm *Manager) UploadUsage() {
|
||||
ctx := context.Background()
|
||||
// check if license is present or not
|
||||
license, err := lm.licenseRepo.GetActiveLicense(ctx)
|
||||
func (lm *Manager) UploadUsage(ctx context.Context) {
|
||||
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get active license", zap.Error(err))
|
||||
return
|
||||
}
|
||||
if license == nil {
|
||||
// we will not start the usage reporting if license is not present.
|
||||
zap.L().Info("no license present, skipping usage reporting")
|
||||
zap.L().Error("failed to get organizations", zap.Error(err))
|
||||
return
|
||||
}
|
||||
for _, organization := range organizations {
|
||||
// check if license is present or not
|
||||
license, err := lm.licenseService.GetActive(ctx, organization.ID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get active license", zap.Error(err))
|
||||
return
|
||||
}
|
||||
if license == nil {
|
||||
// we will not start the usage reporting if license is not present.
|
||||
zap.L().Info("no license present, skipping usage reporting")
|
||||
return
|
||||
}
|
||||
|
||||
usages := []model.UsageDB{}
|
||||
usages := []model.UsageDB{}
|
||||
|
||||
// get usage from clickhouse
|
||||
dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"}
|
||||
query := `
|
||||
// get usage from clickhouse
|
||||
dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"}
|
||||
query := `
|
||||
SELECT tenant, collector_id, exporter_id, timestamp, data
|
||||
FROM %s.distributed_usage as u1
|
||||
GLOBAL INNER JOIN
|
||||
@@ -107,76 +111,76 @@ func (lm *Manager) UploadUsage() {
|
||||
order by timestamp
|
||||
`
|
||||
|
||||
for _, db := range dbs {
|
||||
dbusages := []model.UsageDB{}
|
||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
return
|
||||
for _, db := range dbs {
|
||||
dbusages := []model.UsageDB{}
|
||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
for _, u := range dbusages {
|
||||
u.Type = db
|
||||
usages = append(usages, u)
|
||||
}
|
||||
}
|
||||
for _, u := range dbusages {
|
||||
u.Type = db
|
||||
usages = append(usages, u)
|
||||
}
|
||||
}
|
||||
|
||||
if len(usages) <= 0 {
|
||||
zap.L().Info("no snapshots to upload, skipping.")
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("uploading usage data")
|
||||
|
||||
usagesPayload := []model.Usage{}
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
if err != nil {
|
||||
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
|
||||
if len(usages) <= 0 {
|
||||
zap.L().Info("no snapshots to upload, skipping.")
|
||||
return
|
||||
}
|
||||
|
||||
usageData := model.Usage{}
|
||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||
if err != nil {
|
||||
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
zap.L().Info("uploading usage data")
|
||||
|
||||
usagesPayload := []model.Usage{}
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
if err != nil {
|
||||
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
usageData := model.Usage{}
|
||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||
if err != nil {
|
||||
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
usageData.CollectorID = usage.CollectorID
|
||||
usageData.ExporterID = usage.ExporterID
|
||||
usageData.Type = usage.Type
|
||||
usageData.Tenant = "default"
|
||||
usageData.OrgName = "default"
|
||||
usageData.TenantId = "default"
|
||||
usagesPayload = append(usagesPayload, usageData)
|
||||
}
|
||||
|
||||
key, _ := uuid.Parse(license.Key)
|
||||
payload := model.UsagePayload{
|
||||
LicenseKey: key,
|
||||
Usage: usagesPayload,
|
||||
}
|
||||
|
||||
body, errv2 := json.Marshal(payload)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
|
||||
return
|
||||
}
|
||||
|
||||
usageData.CollectorID = usage.CollectorID
|
||||
usageData.ExporterID = usage.ExporterID
|
||||
usageData.Type = usage.Type
|
||||
usageData.Tenant = "default"
|
||||
usageData.OrgName = "default"
|
||||
usageData.TenantId = "default"
|
||||
usagesPayload = append(usagesPayload, usageData)
|
||||
}
|
||||
|
||||
key, _ := uuid.Parse(license.Key)
|
||||
payload := model.UsagePayload{
|
||||
LicenseKey: key,
|
||||
Usage: usagesPayload,
|
||||
}
|
||||
|
||||
body, errv2 := json.Marshal(payload)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
|
||||
return
|
||||
}
|
||||
|
||||
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return
|
||||
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (lm *Manager) Stop() {
|
||||
func (lm *Manager) Stop(ctx context.Context) {
|
||||
lm.scheduler.Stop()
|
||||
|
||||
zap.L().Info("sending usage data before shutting down")
|
||||
// send usage before shutting down
|
||||
lm.UploadUsage()
|
||||
|
||||
lm.UploadUsage(ctx)
|
||||
atomic.StoreUint32(&locker, stateUnlocked)
|
||||
}
|
||||
|
||||
@@ -17,15 +17,21 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
CloudIntegration = "cloud_integration"
|
||||
Org = "org"
|
||||
User = "user"
|
||||
UserNoCascade = "user_no_cascade"
|
||||
FactorPassword = "factor_password"
|
||||
CloudIntegration = "cloud_integration"
|
||||
AgentConfigVersion = "agent_config_version"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
|
||||
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
AgentConfigVersionReference = `("version_id") REFERENCES "agent_config_version" ("id")`
|
||||
)
|
||||
|
||||
type dialect struct{}
|
||||
@@ -264,8 +270,14 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
fkReferences = append(fkReferences, OrgReference)
|
||||
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||
fkReferences = append(fkReferences, UserReference)
|
||||
} else if reference == UserNoCascade && !slices.Contains(fkReferences, UserReferenceNoCascade) {
|
||||
fkReferences = append(fkReferences, UserReferenceNoCascade)
|
||||
} else if reference == FactorPassword && !slices.Contains(fkReferences, FactorPasswordReference) {
|
||||
fkReferences = append(fkReferences, FactorPasswordReference)
|
||||
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||
} else if reference == AgentConfigVersion && !slices.Contains(fkReferences, AgentConfigVersionReference) {
|
||||
fkReferences = append(fkReferences, AgentConfigVersionReference)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type GettablePAT struct {
|
||||
CreatedByUser PatUser `json:"createdByUser"`
|
||||
UpdatedByUser PatUser `json:"updatedByUser"`
|
||||
|
||||
StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
type PatUser struct {
|
||||
types.User
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
|
||||
return GettablePAT{
|
||||
StorablePersonalAccessToken: NewStorablePersonalAccessToken(name, role, userID, expiresAt),
|
||||
}
|
||||
}
|
||||
|
||||
type StorablePersonalAccessToken struct {
|
||||
bun.BaseModel `bun:"table:personal_access_token"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
|
||||
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
|
||||
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
|
||||
Token string `json:"token" bun:"token,type:text,notnull,unique"`
|
||||
Name string `json:"name" bun:"name,type:text,notnull"`
|
||||
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
|
||||
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
|
||||
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
|
||||
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
|
||||
}
|
||||
|
||||
func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64) StorablePersonalAccessToken {
|
||||
now := time.Now()
|
||||
if expiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
expiresAt = now.Unix() + (expiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
|
||||
return StorablePersonalAccessToken{
|
||||
Token: encodedToken,
|
||||
Name: name,
|
||||
Role: role,
|
||||
UserID: userID,
|
||||
ExpiresAt: expiresAt,
|
||||
LastUsed: 0,
|
||||
Revoked: false,
|
||||
UpdatedByUserID: "",
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
},
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -110,6 +110,8 @@ module.exports = {
|
||||
// eslint rules need to remove
|
||||
'@typescript-eslint/no-shadow': 'off',
|
||||
'import/no-cycle': 'off',
|
||||
// https://typescript-eslint.io/rules/consistent-return/ check the warning for details
|
||||
'consistent-return': 'off',
|
||||
'prettier/prettier': [
|
||||
'error',
|
||||
{},
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
# Ignore artifacts:
|
||||
build
|
||||
coverage
|
||||
public/
|
||||
|
||||
# Ignore all MD files:
|
||||
**/*.md
|
||||
**/*.md
|
||||
|
||||
# Ignore all JSON files:
|
||||
**/*.json
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
NODE_ENV="development"
|
||||
BUNDLE_ANALYSER="true"
|
||||
FRONTEND_API_ENDPOINT="http://localhost:8080/"
|
||||
INTERCOM_APP_ID="intercom-app-id"
|
||||
PYLON_APP_ID="pylon-app-id"
|
||||
APPCUES_APP_ID="appcess-app-id"
|
||||
|
||||
PLAYWRIGHT_TEST_BASE_URL="http://localhost:8080"
|
||||
CI="1"
|
||||
@@ -15,6 +15,7 @@ const config: Config.InitialOptions = {
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
'ts-jest': {
|
||||
useESM: true,
|
||||
isolatedModules: true,
|
||||
},
|
||||
},
|
||||
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],
|
||||
@@ -30,11 +31,6 @@ const config: Config.InitialOptions = {
|
||||
testPathIgnorePatterns: ['/node_modules/', '/public/'],
|
||||
moduleDirectories: ['node_modules', 'src'],
|
||||
testEnvironment: 'jest-environment-jsdom',
|
||||
testEnvironmentOptions: {
|
||||
'jest-playwright': {
|
||||
browsers: ['chromium', 'firefox', 'webkit'],
|
||||
},
|
||||
},
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
statements: 80,
|
||||
|
||||
@@ -15,10 +15,6 @@
|
||||
"jest:coverage": "jest --coverage",
|
||||
"jest:watch": "jest --watch",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
|
||||
"playwright": "NODE_ENV=testing playwright test --config=./playwright.config.ts",
|
||||
"playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium",
|
||||
"playwright:codegen:local": "playwright codegen http://localhost:3301",
|
||||
"playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json",
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
@@ -35,6 +31,7 @@
|
||||
"@dnd-kit/core": "6.1.0",
|
||||
"@dnd-kit/modifiers": "7.0.0",
|
||||
"@dnd-kit/sortable": "8.0.0",
|
||||
"@dnd-kit/utilities": "3.2.2",
|
||||
"@grafana/data": "^11.2.3",
|
||||
"@mdx-js/loader": "2.3.0",
|
||||
"@mdx-js/react": "2.3.0",
|
||||
@@ -81,7 +78,8 @@
|
||||
"fontfaceobserver": "2.3.0",
|
||||
"history": "4.10.1",
|
||||
"html-webpack-plugin": "5.5.0",
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"http-proxy-middleware": "3.0.5",
|
||||
"http-status-codes": "2.3.0",
|
||||
"i18next": "^21.6.12",
|
||||
"i18next-browser-languagedetector": "^6.1.3",
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
@@ -90,7 +88,7 @@
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "0.427.0",
|
||||
"lucide-react": "0.498.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"motion": "12.4.13",
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
@@ -136,7 +134,7 @@
|
||||
"uuid": "^8.3.2",
|
||||
"web-vitals": "^0.2.4",
|
||||
"webpack": "5.94.0",
|
||||
"webpack-dev-server": "^4.15.2",
|
||||
"webpack-dev-server": "^5.2.1",
|
||||
"webpack-retry-chunk-load-plugin": "3.1.1",
|
||||
"xstate": "^4.31.0"
|
||||
},
|
||||
@@ -163,7 +161,6 @@
|
||||
"@commitlint/config-conventional": "^16.2.4",
|
||||
"@faker-js/faker": "9.3.0",
|
||||
"@jest/globals": "^27.5.1",
|
||||
"@playwright/test": "^1.22.0",
|
||||
"@testing-library/jest-dom": "5.16.5",
|
||||
"@testing-library/react": "13.4.0",
|
||||
"@testing-library/user-event": "14.4.3",
|
||||
@@ -200,7 +197,6 @@
|
||||
"babel-plugin-styled-components": "^1.12.0",
|
||||
"compression-webpack-plugin": "9.0.0",
|
||||
"copy-webpack-plugin": "^11.0.0",
|
||||
"critters-webpack-plugin": "^3.0.1",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-config-airbnb": "^19.0.4",
|
||||
"eslint-config-airbnb-typescript": "^16.1.4",
|
||||
@@ -238,7 +234,7 @@
|
||||
"ts-node": "^10.2.1",
|
||||
"typescript-plugin-css-modules": "5.0.1",
|
||||
"webpack-bundle-analyzer": "^4.5.0",
|
||||
"webpack-cli": "^4.9.2"
|
||||
"webpack-cli": "^5.1.4"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.(js|jsx|ts|tsx)": [
|
||||
@@ -254,9 +250,10 @@
|
||||
"xml2js": "0.5.0",
|
||||
"phin": "^3.7.1",
|
||||
"body-parser": "1.20.3",
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"http-proxy-middleware": "3.0.5",
|
||||
"cross-spawn": "7.0.5",
|
||||
"cookie": "^0.7.1",
|
||||
"serialize-javascript": "6.0.2"
|
||||
"serialize-javascript": "6.0.2",
|
||||
"prismjs": "1.30.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import { PlaywrightTestConfig } from '@playwright/test';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const config: PlaywrightTestConfig = {
|
||||
forbidOnly: !!process.env.CI,
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
preserveOutput: 'always',
|
||||
name: 'Signoz',
|
||||
testDir: './tests',
|
||||
use: {
|
||||
trace: 'retain-on-failure',
|
||||
baseURL: process.env.PLAYWRIGHT_TEST_BASE_URL || 'http://localhost:3301',
|
||||
},
|
||||
updateSnapshots: 'all',
|
||||
fullyParallel: !!process.env.CI,
|
||||
quiet: false,
|
||||
testMatch: ['**/*.spec.ts'],
|
||||
reporter: process.env.CI ? 'github' : 'list',
|
||||
};
|
||||
|
||||
export default config;
|
||||
2
frontend/public/Icons/empty-funnel-icon.svg
Normal file
2
frontend/public/Icons/empty-funnel-icon.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 5.9 KiB |
1
frontend/public/Icons/funnel-add.svg
Normal file
1
frontend/public/Icons/funnel-add.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="m12.192 3.18-1.167 2.33-.583 1.165M7.31 12.74a.583.583 0 0 1-.835-.24L1.808 3.179"/><path d="M7 1.167c2.9 0 5.25.783 5.25 1.75 0 .966-2.35 1.75-5.25 1.75s-5.25-.784-5.25-1.75c0-.967 2.35-1.75 5.25-1.75ZM8.75 10.5h3.5M10.5 12.25v-3.5"/></g></svg>
|
||||
|
After Width: | Height: | Size: 418 B |
1
frontend/public/Icons/solid-info-circle.svg
Normal file
1
frontend/public/Icons/solid-info-circle.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#a)" stroke-linecap="round" stroke-linejoin="round"><path d="M8 14.666A6.667 6.667 0 1 0 8 1.333a6.667 6.667 0 0 0 0 13.333Z" fill="#C0C1C3" stroke="#C0C1C3" stroke-width="2"/><path d="M8 11.333v-4H6.333M8 4.667h.007" stroke="#121317" stroke-width="1.333"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>
|
||||
|
After Width: | Height: | Size: 439 B |
@@ -9,8 +9,8 @@
|
||||
"tooltip_notification_channels": "More details on how to setting notification channels",
|
||||
"sending_channels_note": "The alerts will be sent to all the configured channels.",
|
||||
"loading_channels_message": "Loading Channels..",
|
||||
"page_title_create": "New Notification Channels",
|
||||
"page_title_edit": "Edit Notification Channels",
|
||||
"page_title_create": "New Notification Channel",
|
||||
"page_title_edit": "Edit Notification Channel",
|
||||
"button_save_channel": "Save",
|
||||
"button_test_channel": "Test",
|
||||
"button_return": "Back",
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via chat support or "
|
||||
}
|
||||
|
||||
@@ -9,8 +9,8 @@
|
||||
"tooltip_notification_channels": "More details on how to setting notification channels",
|
||||
"sending_channels_note": "The alerts will be sent to all the configured channels.",
|
||||
"loading_channels_message": "Loading Channels..",
|
||||
"page_title_create": "New Notification Channels",
|
||||
"page_title_edit": "Edit Notification Channels",
|
||||
"page_title_create": "New Notification Channel",
|
||||
"page_title_edit": "Edit Notification Channel",
|
||||
"button_save_channel": "Save",
|
||||
"button_test_channel": "Test",
|
||||
"button_return": "Back",
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via chat support or "
|
||||
}
|
||||
|
||||
@@ -69,5 +69,5 @@
|
||||
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | API Monitoring"
|
||||
"API_MONITORING": "SigNoz | External APIs"
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ done
|
||||
# create temporary tsconfig which includes only passed files
|
||||
str="{
|
||||
\"extends\": \"./tsconfig.json\",
|
||||
\"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./conf/default.conf\",\"./public\",\"./tests\",\"./playwright.config.ts\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files]
|
||||
\"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./public\",\"./tests\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files]
|
||||
}"
|
||||
echo $str > tsconfig.tmp
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import getOrgUser from 'api/user/getOrgUser';
|
||||
import getAll from 'api/v1/user/get';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { ORG_PREFERENCES } from 'constants/orgPreferences';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import history from 'lib/history';
|
||||
@@ -11,8 +12,12 @@ import { useAppContext } from 'providers/App/App';
|
||||
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { matchPath, useLocation } from 'react-router-dom';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
|
||||
import { OrgPreference } from 'types/api/preferences/preference';
|
||||
import { Organization } from 'types/api/user/getOrganization';
|
||||
import { UserResponse } from 'types/api/user/getUser';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { routePermission } from 'utils/permission';
|
||||
|
||||
@@ -33,8 +38,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
user,
|
||||
isLoggedIn: isLoggedInState,
|
||||
isFetchingOrgPreferences,
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicenseV3,
|
||||
activeLicense,
|
||||
isFetchingActiveLicense,
|
||||
trialInfo,
|
||||
featureFlags,
|
||||
} = useAppContext();
|
||||
@@ -58,12 +63,13 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
|
||||
const [orgData, setOrgData] = useState<Organization | undefined>(undefined);
|
||||
|
||||
const { data: orgUsers, isFetching: isFetchingOrgUsers } = useQuery({
|
||||
const { data: usersData, isFetching: isFetchingUsers } = useQuery<
|
||||
SuccessResponseV2<UserResponse[]> | undefined,
|
||||
APIError
|
||||
>({
|
||||
queryFn: () => {
|
||||
if (orgData && orgData.id !== undefined) {
|
||||
return getOrgUser({
|
||||
orgId: orgData.id,
|
||||
});
|
||||
return getAll();
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
@@ -72,26 +78,27 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
});
|
||||
|
||||
const checkFirstTimeUser = useCallback((): boolean => {
|
||||
const users = orgUsers?.payload || [];
|
||||
const users = usersData?.data || [];
|
||||
|
||||
const remainingUsers = users.filter(
|
||||
const remainingUsers = (Array.isArray(users) ? users : []).filter(
|
||||
(user) => user.email !== 'admin@signoz.cloud',
|
||||
);
|
||||
|
||||
return remainingUsers.length === 1;
|
||||
}, [orgUsers?.payload]);
|
||||
}, [usersData?.data]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
isCloudUserVal &&
|
||||
!isFetchingOrgPreferences &&
|
||||
orgPreferences &&
|
||||
!isFetchingOrgUsers &&
|
||||
orgUsers &&
|
||||
orgUsers.payload
|
||||
!isFetchingUsers &&
|
||||
usersData &&
|
||||
usersData.data
|
||||
) {
|
||||
const isOnboardingComplete = orgPreferences?.find(
|
||||
(preference: Record<string, any>) => preference.key === 'ORG_ONBOARDING',
|
||||
(preference: OrgPreference) =>
|
||||
preference.name === ORG_PREFERENCES.ORG_ONBOARDING,
|
||||
)?.value;
|
||||
|
||||
const isFirstUser = checkFirstTimeUser();
|
||||
@@ -108,9 +115,9 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
checkFirstTimeUser,
|
||||
isCloudUserVal,
|
||||
isFetchingOrgPreferences,
|
||||
isFetchingOrgUsers,
|
||||
isFetchingUsers,
|
||||
orgPreferences,
|
||||
orgUsers,
|
||||
usersData,
|
||||
pathname,
|
||||
]);
|
||||
|
||||
@@ -141,16 +148,16 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
|
||||
if (!isFetchingActiveLicense && activeLicense) {
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
|
||||
const isTerminated = activeLicenseV3.state === LicenseState.TERMINATED;
|
||||
const isExpired = activeLicenseV3.state === LicenseState.EXPIRED;
|
||||
const isCancelled = activeLicenseV3.state === LicenseState.CANCELLED;
|
||||
const isTerminated = activeLicense.state === LicenseState.TERMINATED;
|
||||
const isExpired = activeLicense.state === LicenseState.EXPIRED;
|
||||
const isCancelled = activeLicense.state === LicenseState.CANCELLED;
|
||||
|
||||
const isWorkspaceAccessRestricted = isTerminated || isExpired || isCancelled;
|
||||
|
||||
const { platform } = activeLicenseV3;
|
||||
const { platform } = activeLicense;
|
||||
|
||||
if (
|
||||
isWorkspaceAccessRestricted &&
|
||||
@@ -160,26 +167,26 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
navigateToWorkSpaceAccessRestricted(currentRoute);
|
||||
}
|
||||
}
|
||||
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
|
||||
}, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3) {
|
||||
if (!isFetchingActiveLicense) {
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
const shouldBlockWorkspace = trialInfo?.workSpaceBlock;
|
||||
|
||||
if (
|
||||
shouldBlockWorkspace &&
|
||||
currentRoute &&
|
||||
activeLicenseV3?.platform === LicensePlatform.CLOUD
|
||||
activeLicense?.platform === LicensePlatform.CLOUD
|
||||
) {
|
||||
navigateToWorkSpaceBlocked(currentRoute);
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
isFetchingActiveLicenseV3,
|
||||
isFetchingActiveLicense,
|
||||
trialInfo?.workSpaceBlock,
|
||||
activeLicenseV3?.platform,
|
||||
activeLicense?.platform,
|
||||
mapRoutes,
|
||||
pathname,
|
||||
]);
|
||||
@@ -193,20 +200,20 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
|
||||
if (!isFetchingActiveLicense && activeLicense) {
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
const shouldSuspendWorkspace =
|
||||
activeLicenseV3.state === LicenseState.DEFAULTED;
|
||||
activeLicense.state === LicenseState.DEFAULTED;
|
||||
|
||||
if (
|
||||
shouldSuspendWorkspace &&
|
||||
currentRoute &&
|
||||
activeLicenseV3.platform === LicensePlatform.CLOUD
|
||||
activeLicense.platform === LicensePlatform.CLOUD
|
||||
) {
|
||||
navigateToWorkSpaceSuspended(currentRoute);
|
||||
}
|
||||
}
|
||||
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
|
||||
}, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
if (org && org.length > 0 && org[0].id !== undefined) {
|
||||
|
||||
@@ -5,6 +5,7 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
@@ -12,9 +13,9 @@ import AppLayout from 'container/AppLayout';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import { StatusCodes } from 'http-status-codes';
|
||||
import history from 'lib/history';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import posthog from 'posthog-js';
|
||||
@@ -22,10 +23,12 @@ import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { IUser } from 'providers/App/types';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { LicenseStatus } from 'types/api/licensesV3/getActive';
|
||||
import { Userpilot } from 'userpilot';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
@@ -40,14 +43,13 @@ import defaultRoutes, {
|
||||
function App(): JSX.Element {
|
||||
const themeConfig = useThemeConfig();
|
||||
const {
|
||||
licenses,
|
||||
user,
|
||||
isFetchingUser,
|
||||
isFetchingLicenses,
|
||||
isFetchingFeatureFlags,
|
||||
trialInfo,
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicenseV3,
|
||||
activeLicense,
|
||||
isFetchingActiveLicense,
|
||||
activeLicenseFetchError,
|
||||
userFetchError,
|
||||
featureFlagsFetchError,
|
||||
isLoggedIn: isLoggedInState,
|
||||
@@ -65,18 +67,18 @@ function App(): JSX.Element {
|
||||
const enableAnalytics = useCallback(
|
||||
(user: IUser): void => {
|
||||
// wait for the required data to be loaded before doing init for anything!
|
||||
if (!isFetchingActiveLicenseV3 && activeLicenseV3 && org) {
|
||||
if (!isFetchingActiveLicense && activeLicense && org) {
|
||||
const orgName =
|
||||
org && Array.isArray(org) && org.length > 0 ? org[0].displayName : '';
|
||||
|
||||
const { name, email, role } = user;
|
||||
const { displayName, email, role } = user;
|
||||
|
||||
const domain = extractDomain(email);
|
||||
const hostNameParts = hostname.split('.');
|
||||
|
||||
const identifyPayload = {
|
||||
email,
|
||||
name,
|
||||
name: displayName,
|
||||
company_name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
@@ -102,10 +104,24 @@ function App(): JSX.Element {
|
||||
if (domain) {
|
||||
logEvent('Domain Identified', groupTraits, 'group');
|
||||
}
|
||||
if (window && window.Appcues) {
|
||||
window.Appcues.identify(email, {
|
||||
name: displayName,
|
||||
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
|
||||
companyName: orgName,
|
||||
email,
|
||||
paidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
}
|
||||
|
||||
Userpilot.identify(email, {
|
||||
email,
|
||||
name,
|
||||
name: displayName,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
@@ -117,7 +133,7 @@ function App(): JSX.Element {
|
||||
|
||||
posthog?.identify(email, {
|
||||
email,
|
||||
name,
|
||||
name: displayName,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
@@ -136,24 +152,12 @@ function App(): JSX.Element {
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
|
||||
if (
|
||||
window.cioanalytics &&
|
||||
typeof window.cioanalytics.identify === 'function'
|
||||
) {
|
||||
window.cioanalytics.reset();
|
||||
window.cioanalytics.identify(email, {
|
||||
name: user.name,
|
||||
email,
|
||||
role: user.role,
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
hostname,
|
||||
isFetchingActiveLicenseV3,
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicense,
|
||||
activeLicense,
|
||||
org,
|
||||
trialInfo?.trialConvertedToSubscription,
|
||||
],
|
||||
@@ -162,18 +166,19 @@ function App(): JSX.Element {
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
if (
|
||||
!isFetchingLicenses &&
|
||||
licenses &&
|
||||
!isFetchingActiveLicense &&
|
||||
(activeLicense || activeLicenseFetchError) &&
|
||||
!isFetchingUser &&
|
||||
user &&
|
||||
!!user.email
|
||||
) {
|
||||
// either the active API returns error with 404 or 501 and if it returns a terminated license means it's on basic plan
|
||||
const isOnBasicPlan =
|
||||
licenses.licenses?.some(
|
||||
(license) =>
|
||||
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
|
||||
) || licenses.licenses === null;
|
||||
|
||||
(activeLicenseFetchError &&
|
||||
[StatusCodes.NOT_FOUND, StatusCodes.NOT_IMPLEMENTED].includes(
|
||||
activeLicenseFetchError?.getHttpStatusCode(),
|
||||
)) ||
|
||||
(activeLicense?.status && activeLicense.status === LicenseStatus.INVALID);
|
||||
const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER);
|
||||
|
||||
if (isLoggedInState && user && user.id && user.email && !isIdentifiedUser) {
|
||||
@@ -189,6 +194,11 @@ function App(): JSX.Element {
|
||||
(route) => route?.path !== ROUTES.BILLING,
|
||||
);
|
||||
}
|
||||
|
||||
if (isEnterpriseSelfHostedUser) {
|
||||
updatedRoutes.push(LIST_LICENSES);
|
||||
}
|
||||
|
||||
// always add support route for cloud users
|
||||
updatedRoutes = [...updatedRoutes, SUPPORT_ROUTE];
|
||||
} else {
|
||||
@@ -203,22 +213,23 @@ function App(): JSX.Element {
|
||||
}, [
|
||||
isLoggedInState,
|
||||
user,
|
||||
licenses,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isFetchingLicenses,
|
||||
isFetchingActiveLicense,
|
||||
isFetchingUser,
|
||||
activeLicense,
|
||||
activeLicenseFetchError,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname === ROUTES.ONBOARDING) {
|
||||
window.Intercom('update', {
|
||||
hide_default_launcher: true,
|
||||
});
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
window.Pylon('hideChatBubble');
|
||||
} else {
|
||||
window.Intercom('update', {
|
||||
hide_default_launcher: false,
|
||||
});
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
window.Pylon('showChatBubble');
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
@@ -230,8 +241,7 @@ function App(): JSX.Element {
|
||||
if (
|
||||
!isFetchingFeatureFlags &&
|
||||
(featureFlags || featureFlagsFetchError) &&
|
||||
licenses &&
|
||||
activeLicenseV3 &&
|
||||
activeLicense &&
|
||||
trialInfo
|
||||
) {
|
||||
let isChatSupportEnabled = false;
|
||||
@@ -254,11 +264,13 @@ function App(): JSX.Element {
|
||||
!showAddCreditCardModal &&
|
||||
(isCloudUser || isEnterpriseSelfHostedUser)
|
||||
) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
name: user?.name || '',
|
||||
});
|
||||
window.pylon = {
|
||||
chat_settings: {
|
||||
app_id: process.env.PYLON_APP_ID,
|
||||
email: user.email,
|
||||
name: user.displayName,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}, [
|
||||
@@ -269,8 +281,7 @@ function App(): JSX.Element {
|
||||
featureFlags,
|
||||
isFetchingFeatureFlags,
|
||||
featureFlagsFetchError,
|
||||
licenses,
|
||||
activeLicenseV3,
|
||||
activeLicense,
|
||||
trialInfo,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
@@ -321,10 +332,6 @@ function App(): JSX.Element {
|
||||
} else {
|
||||
posthog.reset();
|
||||
Sentry.close();
|
||||
|
||||
if (window.cioanalytics && typeof window.cioanalytics.reset === 'function') {
|
||||
window.cioanalytics.reset();
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
@@ -332,7 +339,7 @@ function App(): JSX.Element {
|
||||
// if the user is in logged in state
|
||||
if (isLoggedInState) {
|
||||
// if the setup calls are loading then return a spinner
|
||||
if (isFetchingLicenses || isFetchingUser || isFetchingFeatureFlags) {
|
||||
if (isFetchingActiveLicense || isFetchingUser || isFetchingFeatureFlags) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
|
||||
@@ -344,7 +351,11 @@ function App(): JSX.Element {
|
||||
}
|
||||
|
||||
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
|
||||
if ((!licenses || !user.email || !featureFlags) && !userFetchError) {
|
||||
if (
|
||||
(!activeLicense || !user.email || !featureFlags) &&
|
||||
!userFetchError &&
|
||||
!activeLicenseFetchError
|
||||
) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
}
|
||||
@@ -354,35 +365,38 @@ function App(): JSX.Element {
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<UserpilotRouteTracker />
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
<ErrorModalProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</ErrorModalProvider>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
|
||||
@@ -47,9 +47,10 @@ export const TracesFunnels = Loadable(
|
||||
import(/* webpackChunkName: "Traces Funnels" */ 'pages/TracesModulePage'),
|
||||
);
|
||||
export const TracesFunnelDetails = Loadable(
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "Traces Funnel Details" */ 'pages/TracesFunnelDetails'
|
||||
/* webpackChunkName: "Traces Funnel Details" */ 'pages/TracesModulePage'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -127,12 +128,11 @@ export const AlertOverview = Loadable(
|
||||
);
|
||||
|
||||
export const CreateAlertChannelAlerts = Loadable(
|
||||
() =>
|
||||
import(/* webpackChunkName: "Create Channels" */ 'pages/AlertChannelCreate'),
|
||||
() => import(/* webpackChunkName: "Create Channels" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const EditAlertChannelsAlerts = Loadable(
|
||||
() => import(/* webpackChunkName: "Edit Channels" */ 'pages/ChannelsEdit'),
|
||||
() => import(/* webpackChunkName: "Edit Channels" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const AllAlertChannels = Loadable(
|
||||
@@ -164,7 +164,7 @@ export const APIKeys = Loadable(
|
||||
);
|
||||
|
||||
export const MySettings = Loadable(
|
||||
() => import(/* webpackChunkName: "All MySettings" */ 'pages/MySettings'),
|
||||
() => import(/* webpackChunkName: "All MySettings" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const CustomDomainSettings = Loadable(
|
||||
@@ -221,7 +221,7 @@ export const LogsIndexToFields = Loadable(
|
||||
);
|
||||
|
||||
export const BillingPage = Loadable(
|
||||
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Billing'),
|
||||
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const SupportPage = Loadable(
|
||||
@@ -248,7 +248,7 @@ export const WorkspaceAccessRestricted = Loadable(
|
||||
);
|
||||
|
||||
export const ShortcutsPage = Loadable(
|
||||
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'),
|
||||
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const InstalledIntegrations = Loadable(
|
||||
|
||||
@@ -7,12 +7,9 @@ import {
|
||||
AlertOverview,
|
||||
AllAlertChannels,
|
||||
AllErrors,
|
||||
APIKeys,
|
||||
ApiMonitoring,
|
||||
BillingPage,
|
||||
CreateAlertChannelAlerts,
|
||||
CreateNewAlerts,
|
||||
CustomDomainSettings,
|
||||
DashboardPage,
|
||||
DashboardWidget,
|
||||
EditAlertChannelsAlerts,
|
||||
@@ -20,7 +17,6 @@ import {
|
||||
ErrorDetails,
|
||||
Home,
|
||||
InfrastructureMonitoring,
|
||||
IngestionSettings,
|
||||
InstalledIntegrations,
|
||||
LicensePage,
|
||||
ListAllALertsPage,
|
||||
@@ -31,12 +27,10 @@ import {
|
||||
LogsIndexToFields,
|
||||
LogsSaveViews,
|
||||
MetricsExplorer,
|
||||
MySettings,
|
||||
NewDashboardPage,
|
||||
OldLogsExplorer,
|
||||
Onboarding,
|
||||
OnboardingV2,
|
||||
OrganizationSettings,
|
||||
OrgOnboarding,
|
||||
PasswordReset,
|
||||
PipelinePage,
|
||||
@@ -45,7 +39,6 @@ import {
|
||||
ServicesTablePage,
|
||||
ServiceTopLevelOperationsPage,
|
||||
SettingsPage,
|
||||
ShortcutsPage,
|
||||
SignupPage,
|
||||
SomethingWentWrong,
|
||||
StatusPage,
|
||||
@@ -150,7 +143,7 @@ const routes: AppRoutes[] = [
|
||||
},
|
||||
{
|
||||
path: ROUTES.SETTINGS,
|
||||
exact: true,
|
||||
exact: false,
|
||||
component: SettingsPage,
|
||||
isPrivate: true,
|
||||
key: 'SETTINGS',
|
||||
@@ -295,41 +288,6 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'VERSION',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ORG_SETTINGS,
|
||||
exact: true,
|
||||
component: OrganizationSettings,
|
||||
isPrivate: true,
|
||||
key: 'ORG_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.INGESTION_SETTINGS,
|
||||
exact: true,
|
||||
component: IngestionSettings,
|
||||
isPrivate: true,
|
||||
key: 'INGESTION_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.API_KEYS,
|
||||
exact: true,
|
||||
component: APIKeys,
|
||||
isPrivate: true,
|
||||
key: 'API_KEYS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.MY_SETTINGS,
|
||||
exact: true,
|
||||
component: MySettings,
|
||||
isPrivate: true,
|
||||
key: 'MY_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.CUSTOM_DOMAIN_SETTINGS,
|
||||
exact: true,
|
||||
component: CustomDomainSettings,
|
||||
isPrivate: true,
|
||||
key: 'CUSTOM_DOMAIN_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.LOGS,
|
||||
exact: true,
|
||||
@@ -393,13 +351,6 @@ const routes: AppRoutes[] = [
|
||||
key: 'SOMETHING_WENT_WRONG',
|
||||
isPrivate: false,
|
||||
},
|
||||
{
|
||||
path: ROUTES.BILLING,
|
||||
exact: true,
|
||||
component: BillingPage,
|
||||
key: 'BILLING',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.WORKSPACE_LOCKED,
|
||||
exact: true,
|
||||
@@ -421,13 +372,6 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'WORKSPACE_ACCESS_RESTRICTED',
|
||||
},
|
||||
{
|
||||
path: ROUTES.SHORTCUTS,
|
||||
exact: true,
|
||||
component: ShortcutsPage,
|
||||
isPrivate: true,
|
||||
key: 'SHORTCUTS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.INTEGRATIONS,
|
||||
exact: true,
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { APIKeyProps, CreateAPIKeyProps } from 'types/api/pat/types';
|
||||
|
||||
const createAPIKey = async (
|
||||
props: CreateAPIKeyProps,
|
||||
): Promise<SuccessResponse<APIKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/pats', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default createAPIKey;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllAPIKeyProps } from 'types/api/pat/types';
|
||||
|
||||
const deleteAPIKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllAPIKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.delete(`/pats/${id}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteAPIKey;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/alerts/get';
|
||||
|
||||
const get = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(`/pats/${props.id}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default get;
|
||||
@@ -1,6 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { AllAPIKeyProps } from 'types/api/pat/types';
|
||||
|
||||
export const getAllAPIKeys = (): Promise<AxiosResponse<AllAPIKeyProps>> =>
|
||||
axios.get(`/pats`);
|
||||
@@ -1,26 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, UpdateAPIKeyProps } from 'types/api/pat/types';
|
||||
|
||||
const updateAPIKey = async (
|
||||
props: UpdateAPIKeyProps,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.put(`/pats/${props.id}`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateAPIKey;
|
||||
46
frontend/src/api/ErrorResponseHandlerV2.ts
Normal file
46
frontend/src/api/ErrorResponseHandlerV2.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
// reference - https://axios-http.com/docs/handling_errors
|
||||
export function ErrorResponseHandlerV2(error: AxiosError<ErrorV2Resp>): never {
|
||||
const { response, request } = error;
|
||||
// The request was made and the server responded with a status code
|
||||
// that falls out of the range of 2xx
|
||||
if (response) {
|
||||
throw new APIError({
|
||||
httpStatusCode: response.status || 500,
|
||||
error: {
|
||||
code: response.data.error.code,
|
||||
message: response.data.error.message,
|
||||
url: response.data.error.url,
|
||||
errors: response.data.error.errors,
|
||||
},
|
||||
});
|
||||
}
|
||||
// The request was made but no response was received
|
||||
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
|
||||
// http.ClientRequest in node.js
|
||||
if (request) {
|
||||
throw new APIError({
|
||||
httpStatusCode: error.status || 500,
|
||||
error: {
|
||||
code: error.code || error.name,
|
||||
message: error.message,
|
||||
url: '',
|
||||
errors: [],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Something happened in setting up the request that triggered an Error
|
||||
throw new APIError({
|
||||
httpStatusCode: error.status || 500,
|
||||
error: {
|
||||
code: error.name,
|
||||
message: error.message,
|
||||
url: '',
|
||||
errors: [],
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/SAML/deleteDomain';
|
||||
|
||||
const deleteDomain = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.delete(`/domains/${props.id}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteDomain;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/SAML/listDomain';
|
||||
|
||||
const listAllDomain = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(`/orgs/${props.orgId}/domains`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default listAllDomain;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/SAML/updateDomain';
|
||||
|
||||
const updateDomain = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.put(`/domains/${props.id}`, props);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateDomain;
|
||||
@@ -1,29 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
CheckoutRequestPayloadProps,
|
||||
CheckoutSuccessPayloadProps,
|
||||
} from 'types/api/billing/checkout';
|
||||
|
||||
const updateCreditCardApi = async (
|
||||
props: CheckoutRequestPayloadProps,
|
||||
): Promise<SuccessResponse<CheckoutSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/checkout', {
|
||||
url: props.url,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateCreditCardApi;
|
||||
@@ -1,29 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
CheckoutRequestPayloadProps,
|
||||
CheckoutSuccessPayloadProps,
|
||||
} from 'types/api/billing/checkout';
|
||||
|
||||
const manageCreditCardApi = async (
|
||||
props: CheckoutRequestPayloadProps,
|
||||
): Promise<SuccessResponse<CheckoutSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/portal', {
|
||||
url: props.url,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default manageCreditCardApi;
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createEmail';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
email_configs: [
|
||||
{
|
||||
@@ -21,13 +21,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createMsTeams';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
msteamsv2_configs: [
|
||||
{
|
||||
@@ -21,13 +21,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createOpsgenie';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
opsgenie_configs: [
|
||||
{
|
||||
@@ -24,13 +24,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createPager';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
pagerduty_configs: [
|
||||
{
|
||||
@@ -29,13 +29,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createSlack';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
slack_configs: [
|
||||
{
|
||||
@@ -22,13 +22,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createWebhook';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
let httpConfig = {};
|
||||
const username = props.username ? props.username.trim() : '';
|
||||
@@ -28,7 +28,7 @@ const create = async (
|
||||
};
|
||||
}
|
||||
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
webhook_configs: [
|
||||
{
|
||||
@@ -40,13 +40,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,23 +1,22 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/delete';
|
||||
|
||||
const deleteChannel = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.delete(`/channels/${props.id}`);
|
||||
const response = await axios.delete<PayloadProps>(`/channels/${props.id}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/editEmail';
|
||||
|
||||
const editEmail = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.put(`/channels/${props.id}`, {
|
||||
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
|
||||
name: props.name,
|
||||
email_configs: [
|
||||
{
|
||||
@@ -21,13 +21,12 @@ const editEmail = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/editMsTeams';
|
||||
|
||||
const editMsTeams = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.put(`/channels/${props.id}`, {
|
||||
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
|
||||
name: props.name,
|
||||
msteamsv2_configs: [
|
||||
{
|
||||
@@ -21,13 +21,12 @@ const editMsTeams = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorResponse, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/editOpsgenie';
|
||||
|
||||
const editOpsgenie = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.put(`/channels/${props.id}`, {
|
||||
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
|
||||
name: props.name,
|
||||
opsgenie_configs: [
|
||||
{
|
||||
@@ -25,13 +25,12 @@ const editOpsgenie = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/editPager';
|
||||
|
||||
const editPager = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.put(`/channels/${props.id}`, {
|
||||
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
|
||||
name: props.name,
|
||||
pagerduty_configs: [
|
||||
{
|
||||
@@ -29,13 +29,12 @@ const editPager = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/editSlack';
|
||||
|
||||
const editSlack = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.put(`/channels/${props.id}`, {
|
||||
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
|
||||
name: props.name,
|
||||
slack_configs: [
|
||||
{
|
||||
@@ -22,13 +22,12 @@ const editSlack = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user