Compare commits
222 Commits
v0.82.0
...
feat/drill
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b23820f392 | ||
|
|
32b087d2ae | ||
|
|
bf4def51e7 | ||
|
|
4200bf0a93 | ||
|
|
2886220122 | ||
|
|
6c6793ed78 | ||
|
|
9941a0032a | ||
|
|
db1bb6b6fb | ||
|
|
43f296600d | ||
|
|
97aeaf4363 | ||
|
|
5829a9506c | ||
|
|
e642273486 | ||
|
|
da0934fafa | ||
|
|
6a3044ff65 | ||
|
|
e69bec34ed | ||
|
|
6f9f9dc843 | ||
|
|
cf247b98c8 | ||
|
|
fbbbd9883a | ||
|
|
cd7473f218 | ||
|
|
e437525185 | ||
|
|
f8dbe60aca | ||
|
|
0a5cbfadd9 | ||
|
|
ad82dcf308 | ||
|
|
af55325403 | ||
|
|
db807d5836 | ||
|
|
803aa5bb85 | ||
|
|
97b10046ee | ||
|
|
479d32d09e | ||
|
|
0809ee0518 | ||
|
|
3609cc92f7 | ||
|
|
e18a78a013 | ||
|
|
3bd867ce88 | ||
|
|
3071e331f3 | ||
|
|
46a143a747 | ||
|
|
8eec2f79ac | ||
|
|
d5f476b73d | ||
|
|
e59bd7a698 | ||
|
|
b5952fcce1 | ||
|
|
b5ab1e683b | ||
|
|
cb3cb6a55b | ||
|
|
b96fd03342 | ||
|
|
afbd177b66 | ||
|
|
885b79e581 | ||
|
|
b6fd4bf882 | ||
|
|
72f5f86f69 | ||
|
|
94179e567e | ||
|
|
7af9b1aff1 | ||
|
|
e840138d7b | ||
|
|
130ed51776 | ||
|
|
0169c7d6f8 | ||
|
|
9dd53e93e1 | ||
|
|
1fcfdf84b7 | ||
|
|
839636617d | ||
|
|
6308c17bdf | ||
|
|
4822198759 | ||
|
|
8a4f23bd4d | ||
|
|
fe097ac6d9 | ||
|
|
56a4c47127 | ||
|
|
203333729b | ||
|
|
309ace824e | ||
|
|
4544813e64 | ||
|
|
f9dcdca64d | ||
|
|
4fd917c752 | ||
|
|
5f434b7031 | ||
|
|
e98597fecb | ||
|
|
5c6a49eb90 | ||
|
|
a7170ef928 | ||
|
|
665c815e73 | ||
|
|
269ef25682 | ||
|
|
ec54e5c0b2 | ||
|
|
e8280dbea4 | ||
|
|
44ea237039 | ||
|
|
72b0214d1d | ||
|
|
386a215324 | ||
|
|
ba0ba4bbc9 | ||
|
|
d60c9ab36b | ||
|
|
90770b90bd | ||
|
|
a19874c1dd | ||
|
|
65ff460d63 | ||
|
|
b9d542a294 | ||
|
|
e75e5bdbdb | ||
|
|
0d03203977 | ||
|
|
28f6f42ac4 | ||
|
|
92f8e4d5b9 | ||
|
|
037eea5262 | ||
|
|
cd4df6280f | ||
|
|
ad2d4ed56c | ||
|
|
7955497a8d | ||
|
|
6ed30318bd | ||
|
|
c32dd9f17e | ||
|
|
c58cf67eb0 | ||
|
|
440c3d8386 | ||
|
|
d683b94344 | ||
|
|
6a629623bc | ||
|
|
982688ccc9 | ||
|
|
74bbb26033 | ||
|
|
3bb9e05681 | ||
|
|
61b2f8cb31 | ||
|
|
9d397d0867 | ||
|
|
5fb4206a99 | ||
|
|
dd11ba9f48 | ||
|
|
f9cb9f10be | ||
|
|
b6180f6957 | ||
|
|
51d3ca16f7 | ||
|
|
91cbd17275 | ||
|
|
68effaf232 | ||
|
|
c08d1bccaf | ||
|
|
1d77780c70 | ||
|
|
80ded899c7 | ||
|
|
4733af974e | ||
|
|
1ab6c7177f | ||
|
|
c3123a4fa4 | ||
|
|
5a602bbeb7 | ||
|
|
f487f088bd | ||
|
|
1cb01e8dd2 | ||
|
|
595a500be4 | ||
|
|
bec52c3d3e | ||
|
|
0a6a7ba729 | ||
|
|
3d758d4358 | ||
|
|
9c8435119d | ||
|
|
d732f8ba42 | ||
|
|
83b8eaf623 | ||
|
|
ae7364f098 | ||
|
|
0ec1be1ddf | ||
|
|
93de4681a9 | ||
|
|
69e94cbd38 | ||
|
|
62810428d8 | ||
|
|
b921a2280b | ||
|
|
8990fb7a73 | ||
|
|
aaeffae1bd | ||
|
|
d1d7da6c9b | ||
|
|
28a01bf042 | ||
|
|
fb1f320346 | ||
|
|
4d484b225f | ||
|
|
3a396602a8 | ||
|
|
650cf81329 | ||
|
|
cdbf23d053 | ||
|
|
3ca3db2567 | ||
|
|
0925ae73a9 | ||
|
|
cffa511cf3 | ||
|
|
2ba693f040 | ||
|
|
403630ad31 | ||
|
|
93ca3fee33 | ||
|
|
b1c78c2f12 | ||
|
|
7feb94e5eb | ||
|
|
47dc2b98f1 | ||
|
|
f4dc2a8fb8 | ||
|
|
77d1492aac | ||
|
|
6090a6be6e | ||
|
|
eabddf87d2 | ||
|
|
9e13245d1b | ||
|
|
a1c7a948fa | ||
|
|
0bfe53a93c | ||
|
|
16140991be | ||
|
|
aadf2a3ac7 | ||
|
|
824302be38 | ||
|
|
8d4c4dc5f2 | ||
|
|
91fae8c0f3 | ||
|
|
4bbe8c0ee7 | ||
|
|
0f7d226b9b | ||
|
|
e03342e001 | ||
|
|
57f96574ff | ||
|
|
354e4b4b8f | ||
|
|
d7102f69a9 | ||
|
|
040c45b144 | ||
|
|
207d7602ab | ||
|
|
018346ca18 | ||
|
|
7290ab3602 | ||
|
|
88239cec4d | ||
|
|
10ba0e6b4f | ||
|
|
88e1e42bf0 | ||
|
|
a0d896557e | ||
|
|
2b28c5f2e2 | ||
|
|
6dbcc5fb9d | ||
|
|
175e9a4c5e | ||
|
|
33506cafce | ||
|
|
e34e61a20d | ||
|
|
da084b4686 | ||
|
|
6821efeb99 | ||
|
|
c5d5c84a0e | ||
|
|
9c298e83a5 | ||
|
|
9383b6576d | ||
|
|
f10f7a806f | ||
|
|
03600f4d6f | ||
|
|
9fbf111976 | ||
|
|
b8dff86a56 | ||
|
|
f525647b40 | ||
|
|
0a2b7ca1d8 | ||
|
|
16938c6cc0 | ||
|
|
81b8f93177 | ||
|
|
96cfb607d1 | ||
|
|
f526e887cc | ||
|
|
03ab6e704b | ||
|
|
9c0134da54 | ||
|
|
175b059268 | ||
|
|
dfca5b13c0 | ||
|
|
ad392e81ff | ||
|
|
92ceefccee | ||
|
|
9cc4e1b56f | ||
|
|
3758ee7451 | ||
|
|
02b605d109 | ||
|
|
eb86aabf3e | ||
|
|
8810693bda | ||
|
|
6334e09a60 | ||
|
|
1d379931b2 | ||
|
|
815a6d13c5 | ||
|
|
59af9d1c2f | ||
|
|
19d24da147 | ||
|
|
cd1c9ddf11 | ||
|
|
7ff3286c9c | ||
|
|
27830742f9 | ||
|
|
39f07e7477 | ||
|
|
0ab50da7b0 | ||
|
|
c03541cd6c | ||
|
|
727a039eb9 | ||
|
|
c7db85f44c | ||
|
|
08d9a74055 | ||
|
|
503e4cdf00 | ||
|
|
224f952da7 | ||
|
|
0c28067f89 | ||
|
|
8dc749b9dd | ||
|
|
82a111e5b1 |
@@ -40,7 +40,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
5
.github/CODEOWNERS
vendored
5
.github/CODEOWNERS
vendored
@@ -2,7 +2,7 @@
|
||||
# Owners are automatically requested for review for PRs that changes code
|
||||
# that they own.
|
||||
|
||||
/frontend/ @YounixM
|
||||
/frontend/ @SigNoz/frontend @YounixM
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
/deploy/ @SigNoz/devops
|
||||
@@ -11,4 +11,5 @@
|
||||
/pkg/errors/ @grandwizard28
|
||||
/pkg/factory/ @grandwizard28
|
||||
/pkg/types/ @grandwizard28
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
.golangci.yml @grandwizard28
|
||||
**/(zeus|licensing|sqlmigration)/ @vikrantgupta25
|
||||
73
.github/pull_request_template.md
vendored
73
.github/pull_request_template.md
vendored
@@ -1,17 +1,72 @@
|
||||
### Summary
|
||||
## 📄 Summary
|
||||
|
||||
<!-- ✍️ A clear and concise description...-->
|
||||
<!-- Describe the purpose of the PR in a few sentences. What does it fix/add/update? -->
|
||||
|
||||
#### Related Issues / PR's
|
||||
---
|
||||
|
||||
<!-- ✍️ Add the issues being resolved here and related PR's where applicable -->
|
||||
## ✅ Changes
|
||||
|
||||
#### Screenshots
|
||||
- [ ] Feature: Brief description
|
||||
- [ ] Bug fix: Brief description
|
||||
|
||||
NA
|
||||
---
|
||||
|
||||
<!-- ✍️ Add screenshots of before and after changes where applicable-->
|
||||
## 🏷️ Required: Add Relevant Labels
|
||||
|
||||
#### Affected Areas and Manually Tested Areas
|
||||
> ⚠️ **Manually add appropriate labels in the PR sidebar**
|
||||
Please select one or more labels (as applicable):
|
||||
|
||||
<!-- ✍️ Add details of blast radius and dev testing areas where applicable-->
|
||||
ex:
|
||||
|
||||
- `frontend`
|
||||
- `backend`
|
||||
- `devops`
|
||||
- `bug`
|
||||
- `enhancement`
|
||||
- `ui`
|
||||
- `test`
|
||||
|
||||
---
|
||||
|
||||
## 👥 Reviewers
|
||||
|
||||
> Tag the relevant teams for review:
|
||||
|
||||
- frontend / backend / devops
|
||||
|
||||
---
|
||||
|
||||
## 🧪 How to Test
|
||||
|
||||
<!-- Describe how reviewers can test this PR -->
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
|
||||
---
|
||||
|
||||
## 🔍 Related Issues
|
||||
|
||||
<!-- Reference any related issues (e.g. Fixes #123, Closes #456) -->
|
||||
Closes #
|
||||
|
||||
---
|
||||
|
||||
## 📸 Screenshots / Screen Recording (if applicable / mandatory for UI related changes)
|
||||
|
||||
<!-- Add screenshots or GIFs to help visualize changes -->
|
||||
|
||||
---
|
||||
|
||||
## 📋 Checklist
|
||||
|
||||
- [ ] Dev Review
|
||||
- [ ] Test cases added (Unit/ Integration / E2E)
|
||||
- [ ] Manually tested the changes
|
||||
|
||||
|
||||
---
|
||||
|
||||
## 👀 Notes for Reviewers
|
||||
|
||||
<!-- Anything reviewers should keep in mind while reviewing -->
|
||||
|
||||
1
.github/workflows/build-community.yaml
vendored
1
.github/workflows/build-community.yaml
vendored
@@ -62,6 +62,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
|
||||
6
.github/workflows/build-enterprise.yaml
vendored
6
.github/workflows/build-enterprise.yaml
vendored
@@ -67,9 +67,8 @@ jobs:
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -94,6 +93,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
|
||||
4
.github/workflows/build-staging.yaml
vendored
4
.github/workflows/build-staging.yaml
vendored
@@ -66,7 +66,8 @@ jobs:
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.NP_TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.NP_USERPILOT_KEY }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -91,6 +92,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
|
||||
14
.github/workflows/goci.yaml
vendored
14
.github/workflows/goci.yaml
vendored
@@ -18,6 +18,7 @@ jobs:
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_TEST_CONTEXT: ./...
|
||||
GO_VERSION: 1.23
|
||||
fmt:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -26,6 +27,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
lint:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -34,6 +36,16 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
deps:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
uses: signoz/primus.workflows/.github/workflows/go-deps.yaml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
build:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -45,7 +57,7 @@ jobs:
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
- name: qemu-install
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: aarch64-install
|
||||
|
||||
4
.github/workflows/gor-signoz-community.yaml
vendored
4
.github/workflows/gor-signoz-community.yaml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
9
.github/workflows/gor-signoz.yaml
vendored
9
.github/workflows/gor-signoz.yaml
vendored
@@ -33,9 +33,8 @@ jobs:
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> .env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> .env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> .env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> .env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> .env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
@@ -73,7 +72,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -136,7 +135,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -60,14 +60,13 @@ ee/query-service/db
|
||||
|
||||
e2e/node_modules/
|
||||
e2e/test-results/
|
||||
e2e/playwright-report/
|
||||
e2e/blob-report/
|
||||
e2e/playwright/.cache/
|
||||
e2e/.auth
|
||||
|
||||
# go
|
||||
vendor/
|
||||
**/main/**
|
||||
__debug_bin**
|
||||
|
||||
# git-town
|
||||
.git-branches.toml
|
||||
|
||||
33
.golangci.yml
Normal file
33
.golangci.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
linters:
|
||||
default: standard
|
||||
enable:
|
||||
- bodyclose
|
||||
- misspell
|
||||
- nilnil
|
||||
- sloglint
|
||||
- depguard
|
||||
- iface
|
||||
|
||||
linters-settings:
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
msg-style: lowercased
|
||||
key-naming-case: snake
|
||||
depguard:
|
||||
rules:
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: "go.uber.org/zap"
|
||||
desc: "Do not use zap logger. Use slog instead."
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
issues:
|
||||
exclude-dirs:
|
||||
- "pkg/query-service"
|
||||
- "ee/query-service"
|
||||
- "scripts/"
|
||||
8
Makefile
8
Makefile
@@ -76,9 +76,7 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/main.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster \
|
||||
--use-logs-new-schema true \
|
||||
--use-trace-new-schema true
|
||||
--cluster cluster
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@@ -96,9 +94,7 @@ go-run-community: ## Runs the community go backend server
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/main.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster \
|
||||
--use-logs-new-schema true \
|
||||
--use-trace-new-schema true
|
||||
--cluster cluster
|
||||
|
||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
go-build-community: ## Builds the go backend server for community
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<p align="center">All your logs, metrics, and traces in one place. Monitor your application, spot issues before they occur and troubleshoot downtime quickly with rich context. SigNoz is a cost-effective open-source alternative to Datadog and New Relic. Visit <a href="https://signoz.io" target="_blank">signoz.io</a> for the full documentation, tutorials, and guide.</p>
|
||||
|
||||
<p align="center">
|
||||
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/query-service?label=Docker Downloads"> </a>
|
||||
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/signoz.svg?label=Docker%20Downloads"> </a>
|
||||
<img alt="GitHub issues" src="https://img.shields.io/github/issues/signoz/signoz"> </a>
|
||||
<a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNozHQ&hashtags=opensource,signoz,observability">
|
||||
<img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a>
|
||||
|
||||
@@ -164,3 +164,54 @@ alertmanager:
|
||||
maintenance_interval: 15m
|
||||
# Retention of the notification logs.
|
||||
retention: 120h
|
||||
|
||||
|
||||
##################### Analytics #####################
|
||||
analytics:
|
||||
# Whether to enable analytics.
|
||||
enabled: false
|
||||
|
||||
##################### Emailing #####################
|
||||
emailing:
|
||||
# Whether to enable emailing.
|
||||
enabled: false
|
||||
templates:
|
||||
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
|
||||
directory: /opt/signoz/conf/templates/email
|
||||
smtp:
|
||||
# The SMTP server address.
|
||||
address: localhost:25
|
||||
# The email address to use for the SMTP server.
|
||||
from:
|
||||
# The hello message to use for the SMTP server.
|
||||
hello:
|
||||
# The static headers to send with the email.
|
||||
headers: {}
|
||||
auth:
|
||||
# The username to use for the SMTP server.
|
||||
username:
|
||||
# The password to use for the SMTP server.
|
||||
password:
|
||||
# The secret to use for the SMTP server.
|
||||
secret:
|
||||
# The identity to use for the SMTP server.
|
||||
identity:
|
||||
tls:
|
||||
# Whether to enable TLS. It should be false in most cases since the authentication mechanism should use the STARTTLS extension instead.
|
||||
enabled: false
|
||||
# Whether to skip TLS verification.
|
||||
insecure_skip_verify: false
|
||||
# The path to the CA file.
|
||||
ca_file_path:
|
||||
# The path to the key file.
|
||||
key_file_path:
|
||||
# The path to the certificate file.
|
||||
cert_file_path:
|
||||
|
||||
##################### Sharder (experimental) #####################
|
||||
sharder:
|
||||
# Specifies the sharder provider to use.
|
||||
provider: noop
|
||||
single:
|
||||
# The org id to which this instance belongs to.
|
||||
org_id: org_id
|
||||
|
||||
@@ -174,11 +174,9 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.82.0
|
||||
image: signoz/signoz:v0.86.2
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -208,7 +206,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.41
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -232,7 +230,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -110,11 +110,9 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.82.0
|
||||
image: signoz/signoz:v0.86.2
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -143,7 +141,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.41
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -167,7 +165,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -177,12 +177,10 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.82.0}
|
||||
image: signoz/signoz:${VERSION:-v0.86.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -212,7 +210,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -238,7 +236,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -249,7 +247,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,12 +110,10 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.82.0}
|
||||
image: signoz/signoz:${VERSION:-v0.86.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -144,7 +142,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +164,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +176,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type Pat struct {
|
||||
store sqlstore.SQLStore
|
||||
uuid *authtypes.UUID
|
||||
headers []string
|
||||
}
|
||||
|
||||
func NewPat(store sqlstore.SQLStore, headers []string) *Pat {
|
||||
return &Pat{store: store, uuid: authtypes.NewUUID(), headers: headers}
|
||||
}
|
||||
|
||||
func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var values []string
|
||||
var patToken string
|
||||
var pat eeTypes.StorablePersonalAccessToken
|
||||
|
||||
for _, header := range p.headers {
|
||||
values = append(values, r.Header.Get(header))
|
||||
}
|
||||
|
||||
ctx, err := p.uuid.ContextFromRequest(r.Context(), values...)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
patToken, ok := authtypes.UUIDFromContext(ctx)
|
||||
if !ok {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
err = p.store.BunDB().NewSelect().Model(&pat).Where("token = ?", patToken).Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// get user from db
|
||||
user := types.User{}
|
||||
err = p.store.BunDB().NewSelect().Model(&user).Where("id = ?", pat.UserID).Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
role, err := authtypes.NewRole(user.Role)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
jwt := authtypes.Claims{
|
||||
UserID: user.ID,
|
||||
Role: role,
|
||||
Email: user.Email,
|
||||
OrgID: user.OrgID,
|
||||
}
|
||||
|
||||
ctx = authtypes.NewContextWithClaims(ctx, jwt)
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
|
||||
pat.LastUsed = time.Now().Unix()
|
||||
_, err = p.store.BunDB().NewUpdate().Model(&pat).Column("last_used").Where("token = ?", patToken).Where("revoked = false").Exec(r.Context())
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
26
ee/licensing/config.go
Normal file
26
ee/licensing/config.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package licensing
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
)
|
||||
|
||||
var (
|
||||
config licensing.Config
|
||||
once sync.Once
|
||||
)
|
||||
|
||||
// initializes the licensing configuration
|
||||
func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
|
||||
once.Do(func() {
|
||||
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(fmt.Errorf("invalid licensing config: %w", err))
|
||||
}
|
||||
})
|
||||
|
||||
return config
|
||||
}
|
||||
168
ee/licensing/httplicensing/api.go
Normal file
168
ee/licensing/httplicensing/api.go
Normal file
@@ -0,0 +1,168 @@
|
||||
package httplicensing
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type licensingAPI struct {
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func NewLicensingAPI(licensing licensing.Licensing) licensing.API {
|
||||
return &licensingAPI{licensing: licensing}
|
||||
}
|
||||
|
||||
func (api *licensingAPI) Activate(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
req := new(licensetypes.PostableLicense)
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = api.licensing.Activate(r.Context(), orgID, req.Key)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusAccepted, nil)
|
||||
}
|
||||
|
||||
func (api *licensingAPI) GetActive(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
license, err := api.licensing.GetActive(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
gettableLicense := licensetypes.NewGettableLicense(license.Data, license.Key)
|
||||
render.Success(rw, http.StatusOK, gettableLicense)
|
||||
}
|
||||
|
||||
func (api *licensingAPI) Refresh(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
err = api.licensing.Refresh(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (api *licensingAPI) Checkout(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
req := new(licensetypes.PostableSubscription)
|
||||
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
gettableSubscription, err := api.licensing.Checkout(ctx, orgID, req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusCreated, gettableSubscription)
|
||||
}
|
||||
|
||||
func (api *licensingAPI) Portal(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
req := new(licensetypes.PostableSubscription)
|
||||
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
gettableSubscription, err := api.licensing.Portal(ctx, orgID, req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusCreated, gettableSubscription)
|
||||
}
|
||||
213
ee/licensing/httplicensing/provider.go
Normal file
213
ee/licensing/httplicensing/provider.go
Normal file
@@ -0,0 +1,213 @@
|
||||
package httplicensing
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
store licensetypes.Store
|
||||
zeus zeus.Zeus
|
||||
config licensing.Config
|
||||
settings factory.ScopedProviderSettings
|
||||
orgGetter organization.Getter
|
||||
stopChan chan struct{}
|
||||
}
|
||||
|
||||
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
|
||||
return New(ctx, providerSettings, config, store, zeus, orgGetter)
|
||||
})
|
||||
}
|
||||
|
||||
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) (licensing.Licensing, error) {
|
||||
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing")
|
||||
licensestore := sqllicensingstore.New(sqlstore)
|
||||
return &provider{
|
||||
store: licensestore,
|
||||
zeus: zeus,
|
||||
config: config,
|
||||
settings: settings,
|
||||
orgGetter: orgGetter,
|
||||
stopChan: make(chan struct{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
tick := time.NewTicker(provider.config.PollInterval)
|
||||
defer tick.Stop()
|
||||
|
||||
err := provider.Validate(ctx)
|
||||
if err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-provider.stopChan:
|
||||
return nil
|
||||
case <-tick.C:
|
||||
err := provider.Validate(ctx)
|
||||
if err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
provider.settings.Logger().DebugContext(ctx, "license validation stopped")
|
||||
close(provider.stopChan)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Validate(ctx context.Context) error {
|
||||
organizations, err := provider.orgGetter.ListByOwnedKeyRange(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, organization := range organizations {
|
||||
err := provider.Refresh(ctx, organization.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Activate(ctx context.Context, organizationID valuer.UUID, key string) error {
|
||||
data, err := provider.zeus.GetLicense(ctx, key)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to fetch license data with upstream server")
|
||||
}
|
||||
|
||||
license, err := licensetypes.NewLicense(data, organizationID)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity")
|
||||
}
|
||||
|
||||
storableLicense := licensetypes.NewStorableLicenseFromLicense(license)
|
||||
err = provider.store.Create(ctx, storableLicense)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetActive(ctx context.Context, organizationID valuer.UUID) (*licensetypes.License, error) {
|
||||
storableLicenses, err := provider.store.GetAll(ctx, organizationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
activeLicense, err := licensetypes.GetActiveLicenseFromStorableLicenses(storableLicenses, organizationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return activeLicense, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUID) error {
|
||||
activeLicense, err := provider.GetActive(ctx, organizationID)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil
|
||||
}
|
||||
provider.settings.Logger().ErrorContext(ctx, "license validation failed", "org_id", organizationID.StringValue())
|
||||
return err
|
||||
}
|
||||
|
||||
data, err := provider.zeus.GetLicense(ctx, activeLicense.Key)
|
||||
if err != nil {
|
||||
if time.Since(activeLicense.LastValidatedAt) > time.Duration(provider.config.FailureThreshold)*provider.config.PollInterval {
|
||||
activeLicense.UpdateFeatures(licensetypes.BasicPlan)
|
||||
updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense)
|
||||
err = provider.store.Update(ctx, organizationID, updatedStorableLicense)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
err = activeLicense.Update(data)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity from license data")
|
||||
}
|
||||
|
||||
updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense)
|
||||
err = provider.store.Update(ctx, organizationID, updatedStorableLicense)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Checkout(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) {
|
||||
activeLicense, err := provider.GetActive(ctx, organizationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
body, err := json.Marshal(postableSubscription)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal checkout payload")
|
||||
}
|
||||
|
||||
response, err := provider.zeus.GetCheckoutURL(ctx, activeLicense.Key, body)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate checkout session")
|
||||
}
|
||||
|
||||
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Portal(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) {
|
||||
activeLicense, err := provider.GetActive(ctx, organizationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
body, err := json.Marshal(postableSubscription)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal portal payload")
|
||||
}
|
||||
|
||||
response, err := provider.zeus.GetPortalURL(ctx, activeLicense.Key, body)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate portal session")
|
||||
}
|
||||
|
||||
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetFeatureFlags(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.Feature, error) {
|
||||
license, err := provider.GetActive(ctx, organizationID)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
return licensetypes.BasicPlan, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return license.Features, nil
|
||||
}
|
||||
81
ee/licensing/licensingstore/sqllicensingstore/store.go
Normal file
81
ee/licensing/licensingstore/sqllicensingstore/store.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package sqllicensingstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func New(sqlstore sqlstore.SQLStore) licensetypes.Store {
|
||||
return &store{sqlstore}
|
||||
}
|
||||
|
||||
func (store *store) Create(ctx context.Context, storableLicense *licensetypes.StorableLicense) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewInsert().
|
||||
Model(storableLicense).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "license with ID: %s already exists", storableLicense.ID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) Get(ctx context.Context, organizationID valuer.UUID, licenseID valuer.UUID) (*licensetypes.StorableLicense, error) {
|
||||
storableLicense := new(licensetypes.StorableLicense)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(storableLicense).
|
||||
Where("org_id = ?", organizationID).
|
||||
Where("id = ?", licenseID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "license with ID: %s does not exist", licenseID)
|
||||
}
|
||||
|
||||
return storableLicense, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAll(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.StorableLicense, error) {
|
||||
storableLicenses := make([]*licensetypes.StorableLicense, 0)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&storableLicenses).
|
||||
Where("org_id = ?", organizationID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "licenses for organizationID: %s does not exists", organizationID)
|
||||
}
|
||||
|
||||
return storableLicenses, nil
|
||||
}
|
||||
|
||||
func (store *store) Update(ctx context.Context, organizationID valuer.UUID, storableLicense *licensetypes.StorableLicense) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewUpdate().
|
||||
Model(storableLicense).
|
||||
WherePK().
|
||||
Where("org_id = ?", organizationID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to update license with ID: %s", storableLicense.ID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM golang:1.22-bullseye
|
||||
FROM golang:1.23-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
|
||||
@@ -5,21 +5,17 @@ import (
|
||||
"net/http/httputil"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
@@ -31,11 +27,8 @@ import (
|
||||
type APIHandlerOptions struct {
|
||||
DataConnector interfaces.DataConnector
|
||||
PreferSpanMetrics bool
|
||||
AppDao dao.ModelDao
|
||||
RulesManager *rules.Manager
|
||||
UsageManager *usage.Manager
|
||||
FeatureFlags baseint.FeatureLookup
|
||||
LicenseManager *license.Manager
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
@@ -55,23 +48,18 @@ type APIHandler struct {
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
quickfiltermodule := quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(signoz.SQLStore))
|
||||
quickFilter := quickfilter.NewAPI(quickfiltermodule)
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
PreferSpanMetrics: opts.PreferSpanMetrics,
|
||||
AppDao: opts.AppDao,
|
||||
RuleManager: opts.RulesManager,
|
||||
FeatureFlags: opts.FeatureFlags,
|
||||
IntegrationsController: opts.IntegrationsController,
|
||||
CloudIntegrationsController: opts.CloudIntegrationsController,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
FieldsAPI: fields.NewAPI(signoz.TelemetryStore, signoz.Instrumentation.Logger()),
|
||||
Signoz: signoz,
|
||||
QuickFilters: quickFilter,
|
||||
QuickFilterModule: quickfiltermodule,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -85,98 +73,39 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
return ah, nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) FF() baseint.FeatureLookup {
|
||||
return ah.opts.FeatureFlags
|
||||
}
|
||||
|
||||
func (ah *APIHandler) RM() *rules.Manager {
|
||||
return ah.opts.RulesManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) LM() *license.Manager {
|
||||
return ah.opts.LicenseManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) UM() *usage.Manager {
|
||||
return ah.opts.UsageManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) AppDao() dao.ModelDao {
|
||||
return ah.opts.AppDao
|
||||
}
|
||||
|
||||
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
|
||||
return ah.opts.Gateway
|
||||
}
|
||||
|
||||
func (ah *APIHandler) CheckFeature(f string) bool {
|
||||
err := ah.FF().CheckFeature(f)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// RegisterRoutes registers routes for this handler on the given router
|
||||
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// note: add ee override methods first
|
||||
|
||||
// routes available only in ee version
|
||||
|
||||
router.HandleFunc("/api/v1/featureFlags",
|
||||
am.OpenAccess(ah.getFeatureFlags)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/loginPrecheck",
|
||||
am.OpenAccess(ah.precheckLogin)).
|
||||
Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
|
||||
|
||||
// paid plans specific routes
|
||||
router.HandleFunc("/api/v1/complete/saml",
|
||||
am.OpenAccess(ah.receiveSAML)).
|
||||
Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v1/complete/google",
|
||||
am.OpenAccess(ah.receiveGoogleAuth)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/orgs/{orgId}/domains",
|
||||
am.AdminAccess(ah.listDomainsByOrg)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/domains",
|
||||
am.AdminAccess(ah.postDomain)).
|
||||
Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v1/domains/{id}",
|
||||
am.AdminAccess(ah.putDomain)).
|
||||
Methods(http.MethodPut)
|
||||
|
||||
router.HandleFunc("/api/v1/domains/{id}",
|
||||
am.AdminAccess(ah.deleteDomain)).
|
||||
Methods(http.MethodDelete)
|
||||
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost)
|
||||
|
||||
// base overrides
|
||||
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
||||
|
||||
// PAT APIs
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.getPATs)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.updatePAT)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.revokePAT)).Methods(http.MethodDelete)
|
||||
|
||||
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.LicensingAPI.Checkout)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.portalSession)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v1/dashboards/{uuid}/lock", am.EditAccess(ah.lockDashboard)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/dashboards/{uuid}/unlock", am.EditAccess(ah.unlockDashboard)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
|
||||
|
||||
// v3
|
||||
router.HandleFunc("/api/v3/licenses", am.ViewAccess(ah.listLicensesV3)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.applyLicenseV3)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.refreshLicensesV3)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.getActiveLicenseV3)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.LicensingAPI.GetActive)).Methods(http.MethodGet)
|
||||
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
@@ -3,191 +3,16 @@ package api
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func parseRequest(r *http.Request, req interface{}) error {
|
||||
defer r.Body.Close()
|
||||
requestBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(requestBody, &req)
|
||||
return err
|
||||
}
|
||||
|
||||
// loginUser overrides base handler and considers SSO case.
|
||||
func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
req := basemodel.LoginRequest{}
|
||||
err := parseRequest(r, &req)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if req.Email != "" && ah.CheckFeature(model.SSO) {
|
||||
var apierr basemodel.BaseApiError
|
||||
_, apierr = ah.AppDao().CanUsePassword(ctx, req.Email)
|
||||
if apierr != nil && !apierr.IsNil() {
|
||||
RespondError(w, apierr, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// if all looks good, call auth
|
||||
resp, err := baseauth.Login(ctx, &req, ah.opts.JWT)
|
||||
if ah.HandleError(w, err, http.StatusUnauthorized) {
|
||||
return
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, resp)
|
||||
}
|
||||
|
||||
// registerUser registers a user and responds with a precheck
|
||||
// so the front-end can decide the login method
|
||||
func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
ah.APIHandler.Register(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
var req *baseauth.RegisterRequest
|
||||
|
||||
defer r.Body.Close()
|
||||
requestBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
zap.L().Error("received no input in api", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
err = json.Unmarshal(requestBody, &req)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("received invalid user registration request", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// get invite object
|
||||
invite, err := baseauth.ValidateInvite(ctx, req)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to validate invite token", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if invite == nil {
|
||||
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// get auth domain from email domain
|
||||
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
|
||||
}
|
||||
|
||||
precheckResp := &basemodel.PrecheckResponse{
|
||||
SSO: false,
|
||||
IsUser: false,
|
||||
}
|
||||
|
||||
if domain != nil && domain.SsoEnabled {
|
||||
// sso is enabled, create user and respond precheck data
|
||||
user, apierr := baseauth.RegisterInvitedUser(ctx, req, true)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var precheckError basemodel.BaseApiError
|
||||
|
||||
precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl)
|
||||
if precheckError != nil {
|
||||
RespondError(w, precheckError, precheckResp)
|
||||
}
|
||||
|
||||
} else {
|
||||
// no-sso, validate password
|
||||
if err := baseauth.ValidatePassword(req.Password); err != nil {
|
||||
RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager, ah.Signoz.Modules.Organization, ah.QuickFilterModule)
|
||||
if !registerError.IsNil() {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
precheckResp.IsUser = true
|
||||
}
|
||||
|
||||
ah.Respond(w, precheckResp)
|
||||
}
|
||||
|
||||
// getInvite returns the invite object details for the given invite token. We do not need to
|
||||
// protect this API because invite token itself is meant to be private.
|
||||
func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
|
||||
token := mux.Vars(r)["token"]
|
||||
sourceUrl := r.URL.Query().Get("ref")
|
||||
|
||||
inviteObject, err := baseauth.GetInvite(r.Context(), token, ah.Signoz.Modules.Organization)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
resp := model.GettableInvitation{
|
||||
InvitationResponseObject: inviteObject,
|
||||
}
|
||||
|
||||
precheck, apierr := ah.AppDao().PrecheckLogin(r.Context(), inviteObject.Email, sourceUrl)
|
||||
resp.Precheck = precheck
|
||||
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, resp)
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, resp)
|
||||
}
|
||||
|
||||
// PrecheckLogin enables browser login page to display appropriate
|
||||
// login methods
|
||||
func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
email := r.URL.Query().Get("email")
|
||||
sourceUrl := r.URL.Query().Get("ref")
|
||||
|
||||
resp, apierr := ah.AppDao().PrecheckLogin(ctx, email, sourceUrl)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, resp)
|
||||
}
|
||||
|
||||
ah.Respond(w, resp)
|
||||
}
|
||||
|
||||
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
|
||||
ssoError := []byte("Login failed. Please contact your system administrator")
|
||||
dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
|
||||
@@ -196,84 +21,12 @@ func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string)
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// receiveGoogleAuth completes google OAuth response and forwards a request
|
||||
// to front-end to sign user in
|
||||
func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) {
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
q := r.URL.Query()
|
||||
if errType := q.Get("error"); errType != "" {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description")))
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
relayState := q.Get("state")
|
||||
zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// upgrade redirect url from the relay state for better accuracy
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// now that we have domain, use domain to fetch sso settings.
|
||||
// prepare google callback handler using parsedState -
|
||||
// which contains redirect URL (front-end endpoint)
|
||||
callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to prepare google oauth provider", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
identity, err := callbackHandler.HandleCallback(r)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email, ah.opts.JWT)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
http.Redirect(w, r, nextPage, http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// receiveSAML completes a SAML request and gets user logged in
|
||||
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// this is the source url that initiated the login request
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
@@ -297,12 +50,25 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
|
||||
domain, err := ah.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(domain.OrgID)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
_, err = ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
sp, err := domain.PrepareSamlRequest(parsedState)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
|
||||
@@ -330,7 +96,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
|
||||
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
|
||||
@@ -11,12 +11,12 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
@@ -36,6 +36,12 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
if cloudProvider != "aws" {
|
||||
RespondError(w, basemodel.BadRequest(fmt.Errorf(
|
||||
@@ -56,11 +62,9 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
SigNozAPIKey: apiKey,
|
||||
}
|
||||
|
||||
license, apiErr := ah.LM().GetRepo().GetActiveLicense(r.Context())
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't look for active license",
|
||||
), nil)
|
||||
license, err := ah.Signoz.Licensing.GetActive(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -116,7 +120,14 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
return "", apiErr
|
||||
}
|
||||
|
||||
allPats, err := ah.AppDao().ListPATs(ctx, orgId)
|
||||
orgIdUUID, err := valuer.NewUUID(orgId)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't parse orgId: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't list PATs: %w", err,
|
||||
@@ -133,19 +144,25 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newPAT := eeTypes.NewGettablePAT(
|
||||
newPAT, err := types.NewStorableAPIKey(
|
||||
integrationPATName,
|
||||
authtypes.RoleViewer.String(),
|
||||
integrationUser.ID,
|
||||
types.RoleViewer,
|
||||
0,
|
||||
)
|
||||
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration PAT: %w", err,
|
||||
))
|
||||
}
|
||||
return integrationPAT.Token, nil
|
||||
|
||||
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration PAT: %w", err,
|
||||
))
|
||||
}
|
||||
return newPAT.Token, nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
@@ -154,10 +171,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
|
||||
|
||||
// TODO(nitya): there should be orgId here
|
||||
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
|
||||
integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||
}
|
||||
|
||||
if integrationUserResult != nil {
|
||||
@@ -169,29 +185,18 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newUser := &types.User{
|
||||
ID: uuid.New().String(),
|
||||
Name: cloudIntegrationUser,
|
||||
Email: email,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
OrgID: orgId,
|
||||
}
|
||||
|
||||
newUser.Role = authtypes.RoleViewer.String()
|
||||
|
||||
passwordHash, err := auth.PasswordHash(uuid.NewString())
|
||||
newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't hash random password for cloud integration user: %w", err,
|
||||
"couldn't create cloud integration user: %w", err,
|
||||
))
|
||||
}
|
||||
newUser.Password = passwordHash
|
||||
|
||||
integrationUser, apiErr := ah.AppDao().CreateUser(ctx, newUser, false)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't create cloud integration user")
|
||||
password, err := types.NewFactorPassword(uuid.NewString())
|
||||
|
||||
integrationUser, err := ah.Signoz.Modules.User.CreateUserWithPassword(ctx, newUser, password)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
||||
}
|
||||
|
||||
return integrationUser, nil
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) lockDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
ah.lockUnlockDashboard(w, r, true)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) unlockDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
ah.lockUnlockDashboard(w, r, false)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request, lock bool) {
|
||||
// Locking can only be done by the owner of the dashboard
|
||||
// or an admin
|
||||
|
||||
// - Fetch the dashboard
|
||||
// - Check if the user is the owner or an admin
|
||||
// - If yes, lock/unlock the dashboard
|
||||
// - If no, return 403
|
||||
|
||||
// Get the dashboard UUID from the request
|
||||
uuid := mux.Vars(r)["uuid"]
|
||||
if strings.HasPrefix(uuid, "integration") {
|
||||
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "dashboards created by integrations cannot be modified"))
|
||||
return
|
||||
}
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
dashboard, err := dashboards.GetDashboard(r.Context(), claims.OrgID, uuid)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get dashboard"))
|
||||
return
|
||||
}
|
||||
|
||||
if err := claims.IsAdmin(); err != nil && (dashboard.CreatedBy != claims.Email) {
|
||||
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "You are not authorized to lock/unlock this dashboard"))
|
||||
return
|
||||
}
|
||||
|
||||
// Lock/Unlock the dashboard
|
||||
err = dashboards.LockUnlockDashboard(r.Context(), claims.OrgID, uuid, lock)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to lock/unlock dashboard"))
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, "Dashboard updated successfully")
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) listDomainsByOrg(w http.ResponseWriter, r *http.Request) {
|
||||
orgId := mux.Vars(r)["orgId"]
|
||||
domains, apierr := ah.AppDao().ListDomains(context.Background(), orgId)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, domains)
|
||||
return
|
||||
}
|
||||
ah.Respond(w, domains)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) postDomain(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
req := types.GettableOrgDomain{}
|
||||
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := req.ValidNew(); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if apierr := ah.AppDao().CreateDomain(ctx, &req); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, &req)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) putDomain(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
domainIdStr := mux.Vars(r)["id"]
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
req := types.GettableOrgDomain{StorableOrgDomain: types.StorableOrgDomain{ID: domainId}}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
req.ID = domainId
|
||||
if err := req.Valid(nil); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
}
|
||||
|
||||
if apierr := ah.AppDao().UpdateDomain(ctx, &req); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, &req)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) deleteDomain(w http.ResponseWriter, r *http.Request) {
|
||||
domainIdStr := mux.Vars(r)["id"]
|
||||
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("invalid domain id")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
apierr := ah.AppDao().DeleteDomain(context.Background(), domainId)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
ah.Respond(w, nil)
|
||||
}
|
||||
@@ -9,13 +9,29 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
pkgError "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
featureSet, err := ah.FF().GetFeatureFlags()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, pkgError.Newf(pkgError.TypeInvalidInput, pkgError.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
featureSet, err := ah.Signoz.Licensing.GetFeatureFlags(r.Context(), orgID)
|
||||
if err != nil {
|
||||
ah.HandleError(w, err, http.StatusInternalServerError)
|
||||
return
|
||||
@@ -23,7 +39,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if constants.FetchFeatures == "true" {
|
||||
zap.L().Debug("fetching license")
|
||||
license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
|
||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to fetch license", zap.Error(err))
|
||||
} else if license == nil {
|
||||
@@ -44,9 +60,16 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
if ah.opts.PreferSpanMetrics {
|
||||
for idx := range featureSet {
|
||||
feature := &featureSet[idx]
|
||||
if feature.Name == basemodel.UseSpanMetrics {
|
||||
for idx, feature := range featureSet {
|
||||
if feature.Name == licensetypes.UseSpanMetrics {
|
||||
featureSet[idx].Active = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if constants.IsDotMetricsEnabled {
|
||||
for idx, feature := range featureSet {
|
||||
if feature.Name == licensetypes.DotMetricsEnabled {
|
||||
featureSet[idx].Active = true
|
||||
}
|
||||
}
|
||||
@@ -57,7 +80,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
// fetchZeusFeatures makes an HTTP GET request to the /zeusFeatures endpoint
|
||||
// and returns the FeatureSet.
|
||||
func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) {
|
||||
func fetchZeusFeatures(url, licenseKey string) ([]*licensetypes.Feature, error) {
|
||||
// Check if the URL is empty
|
||||
if url == "" {
|
||||
return nil, fmt.Errorf("url is empty")
|
||||
@@ -116,28 +139,28 @@ func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) {
|
||||
}
|
||||
|
||||
type ZeusFeaturesResponse struct {
|
||||
Status string `json:"status"`
|
||||
Data basemodel.FeatureSet `json:"data"`
|
||||
Status string `json:"status"`
|
||||
Data []*licensetypes.Feature `json:"data"`
|
||||
}
|
||||
|
||||
// MergeFeatureSets merges two FeatureSet arrays with precedence to zeusFeatures.
|
||||
func MergeFeatureSets(zeusFeatures, internalFeatures basemodel.FeatureSet) basemodel.FeatureSet {
|
||||
func MergeFeatureSets(zeusFeatures, internalFeatures []*licensetypes.Feature) []*licensetypes.Feature {
|
||||
// Create a map to store the merged features
|
||||
featureMap := make(map[string]basemodel.Feature)
|
||||
featureMap := make(map[string]*licensetypes.Feature)
|
||||
|
||||
// Add all features from the otherFeatures set to the map
|
||||
for _, feature := range internalFeatures {
|
||||
featureMap[feature.Name] = feature
|
||||
featureMap[feature.Name.StringValue()] = feature
|
||||
}
|
||||
|
||||
// Add all features from the zeusFeatures set to the map
|
||||
// If a feature already exists (i.e., same name), the zeusFeature will overwrite it
|
||||
for _, feature := range zeusFeatures {
|
||||
featureMap[feature.Name] = feature
|
||||
featureMap[feature.Name.StringValue()] = feature
|
||||
}
|
||||
|
||||
// Convert the map back to a FeatureSet slice
|
||||
var mergedFeatures basemodel.FeatureSet
|
||||
var mergedFeatures []*licensetypes.Feature
|
||||
for _, feature := range featureMap {
|
||||
mergedFeatures = append(mergedFeatures, feature)
|
||||
}
|
||||
|
||||
@@ -3,78 +3,79 @@ package api
|
||||
import (
|
||||
"testing"
|
||||
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestMergeFeatureSets(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
zeusFeatures basemodel.FeatureSet
|
||||
internalFeatures basemodel.FeatureSet
|
||||
expected basemodel.FeatureSet
|
||||
zeusFeatures []*licensetypes.Feature
|
||||
internalFeatures []*licensetypes.Feature
|
||||
expected []*licensetypes.Feature
|
||||
}{
|
||||
{
|
||||
name: "empty zeusFeatures and internalFeatures",
|
||||
zeusFeatures: basemodel.FeatureSet{},
|
||||
internalFeatures: basemodel.FeatureSet{},
|
||||
expected: basemodel.FeatureSet{},
|
||||
zeusFeatures: []*licensetypes.Feature{},
|
||||
internalFeatures: []*licensetypes.Feature{},
|
||||
expected: []*licensetypes.Feature{},
|
||||
},
|
||||
{
|
||||
name: "non-empty zeusFeatures and empty internalFeatures",
|
||||
zeusFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
zeusFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
internalFeatures: basemodel.FeatureSet{},
|
||||
expected: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
internalFeatures: []*licensetypes.Feature{},
|
||||
expected: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty zeusFeatures and non-empty internalFeatures",
|
||||
zeusFeatures: basemodel.FeatureSet{},
|
||||
internalFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
zeusFeatures: []*licensetypes.Feature{},
|
||||
internalFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
expected: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
expected: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "non-empty zeusFeatures and non-empty internalFeatures with no conflicts",
|
||||
zeusFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature3", Active: false},
|
||||
zeusFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature3"), Active: false},
|
||||
},
|
||||
internalFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature2", Active: true},
|
||||
{Name: "Feature4", Active: false},
|
||||
internalFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature2"), Active: true},
|
||||
{Name: valuer.NewString("Feature4"), Active: false},
|
||||
},
|
||||
expected: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: true},
|
||||
{Name: "Feature3", Active: false},
|
||||
{Name: "Feature4", Active: false},
|
||||
expected: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: true},
|
||||
{Name: valuer.NewString("Feature3"), Active: false},
|
||||
{Name: valuer.NewString("Feature4"), Active: false},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "non-empty zeusFeatures and non-empty internalFeatures with conflicts",
|
||||
zeusFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
zeusFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
},
|
||||
internalFeatures: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: false},
|
||||
{Name: "Feature3", Active: true},
|
||||
internalFeatures: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: false},
|
||||
{Name: valuer.NewString("Feature3"), Active: true},
|
||||
},
|
||||
expected: basemodel.FeatureSet{
|
||||
{Name: "Feature1", Active: true},
|
||||
{Name: "Feature2", Active: false},
|
||||
{Name: "Feature3", Active: true},
|
||||
expected: []*licensetypes.Feature{
|
||||
{Name: valuer.NewString("Feature1"), Active: true},
|
||||
{Name: valuer.NewString("Feature2"), Active: false},
|
||||
{Name: valuer.NewString("Feature3"), Active: true},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -5,10 +5,26 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
validPath := false
|
||||
for _, allowedPrefix := range gateway.AllowedPrefix {
|
||||
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
|
||||
@@ -22,9 +38,9 @@ func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request
|
||||
return
|
||||
}
|
||||
|
||||
license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
|
||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
RespondError(rw, err, nil)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -6,11 +6,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
)
|
||||
|
||||
type DayWiseBreakdown struct {
|
||||
@@ -49,10 +45,6 @@ type details struct {
|
||||
BillTotal float64 `json:"billTotal"`
|
||||
}
|
||||
|
||||
type Redirect struct {
|
||||
RedirectURL string `json:"redirectURL"`
|
||||
}
|
||||
|
||||
type billingDetails struct {
|
||||
Status string `json:"status"`
|
||||
Data struct {
|
||||
@@ -64,97 +56,6 @@ type billingDetails struct {
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type ApplyLicenseRequest struct {
|
||||
LicenseKey string `json:"key"`
|
||||
}
|
||||
|
||||
func (ah *APIHandler) listLicensesV3(w http.ResponseWriter, r *http.Request) {
|
||||
ah.listLicensesV2(w, r)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getActiveLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
activeLicense, err := ah.LM().GetRepo().GetActiveLicenseV3(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// return 404 not found if there is no active license
|
||||
if activeLicense == nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no active license found")}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// TODO deprecate this when we move away from key for stripe
|
||||
activeLicense.Data["key"] = activeLicense.Key
|
||||
render.Success(w, http.StatusOK, activeLicense.Data)
|
||||
}
|
||||
|
||||
// this function is called by zeus when inserting licenses in the query-service
|
||||
func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
var licenseKey ApplyLicenseRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&licenseKey); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if licenseKey.LicenseKey == "" {
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
_, err = ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey)
|
||||
if err != nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED, map[string]interface{}{"err": err.Error()}, claims.Email, true, false)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusAccepted, nil)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) refreshLicensesV3(w http.ResponseWriter, r *http.Request) {
|
||||
err := ah.LM().RefreshLicense(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func getCheckoutPortalResponse(redirectURL string) *Redirect {
|
||||
return &Redirect{RedirectURL: redirectURL}
|
||||
}
|
||||
|
||||
func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
|
||||
checkoutRequest := &model.CheckoutRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(checkoutRequest); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
license := ah.LM().GetActiveLicense()
|
||||
if license == nil {
|
||||
RespondError(w, model.BadRequestStr("cannot proceed with checkout without license key"), nil)
|
||||
return
|
||||
}
|
||||
|
||||
redirectUrl, err := signozio.CheckoutSession(r.Context(), checkoutRequest, license.Key, ah.Signoz.Zeus)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, getCheckoutPortalResponse(redirectUrl))
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
|
||||
licenseKey := r.URL.Query().Get("licenseKey")
|
||||
|
||||
@@ -188,71 +89,3 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
|
||||
// TODO(srikanthccv):Fetch the current day usage and add it to the response
|
||||
ah.Respond(w, billingResponse.Data)
|
||||
}
|
||||
|
||||
func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License {
|
||||
licensesV2 := []model.License{}
|
||||
for _, l := range licenses {
|
||||
planKeyFromPlanName, ok := model.MapOldPlanKeyToNewPlanName[l.PlanName]
|
||||
if !ok {
|
||||
planKeyFromPlanName = model.Basic
|
||||
}
|
||||
licenseV2 := model.License{
|
||||
Key: l.Key,
|
||||
ActivationId: "",
|
||||
PlanDetails: "",
|
||||
FeatureSet: l.Features,
|
||||
ValidationMessage: "",
|
||||
IsCurrent: l.IsCurrent,
|
||||
LicensePlan: model.LicensePlan{
|
||||
PlanKey: planKeyFromPlanName,
|
||||
ValidFrom: l.ValidFrom,
|
||||
ValidUntil: l.ValidUntil,
|
||||
Status: l.Status},
|
||||
}
|
||||
licensesV2 = append(licensesV2, licenseV2)
|
||||
}
|
||||
return licensesV2
|
||||
}
|
||||
|
||||
func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
licensesV3, apierr := ah.LM().GetLicensesV3(r.Context())
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
licenses := convertLicenseV3ToLicenseV2(licensesV3)
|
||||
|
||||
resp := model.Licenses{
|
||||
TrialStart: -1,
|
||||
TrialEnd: -1,
|
||||
OnTrial: false,
|
||||
WorkSpaceBlock: false,
|
||||
TrialConvertedToSubscription: false,
|
||||
GracePeriodEnd: -1,
|
||||
Licenses: licenses,
|
||||
}
|
||||
|
||||
ah.Respond(w, resp)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
|
||||
portalRequest := &model.PortalRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(portalRequest); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
license := ah.LM().GetActiveLicense()
|
||||
if license == nil {
|
||||
RespondError(w, model.BadRequestStr("cannot request the portal session without license key"), nil)
|
||||
return
|
||||
}
|
||||
|
||||
redirectUrl, err := signozio.PortalSession(r.Context(), portalRequest, license.Key, ah.Signoz.Zeus)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, getCheckoutPortalResponse(redirectUrl))
|
||||
}
|
||||
|
||||
@@ -1,187 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := model.CreatePATRequestBody{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
pat := eeTypes.NewGettablePAT(
|
||||
req.Name,
|
||||
req.Role,
|
||||
claims.UserID,
|
||||
req.ExpiresInDays,
|
||||
)
|
||||
err = validatePATRequest(pat)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
|
||||
var apierr basemodel.BaseApiError
|
||||
if pat, apierr = ah.AppDao().CreatePAT(r.Context(), claims.OrgID, pat); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, &pat)
|
||||
}
|
||||
|
||||
func validatePATRequest(req eeTypes.GettablePAT) error {
|
||||
_, err := authtypes.NewRole(req.Role)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if req.ExpiresAt < 0 {
|
||||
return fmt.Errorf("valid expiresAt is required")
|
||||
}
|
||||
|
||||
if req.Name == "" {
|
||||
return fmt.Errorf("valid name is required")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := eeTypes.GettablePAT{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(r.Context(), claims.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(r.Context(), existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
err = validatePATRequest(req)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
req.UpdatedByUserID = claims.UserID
|
||||
req.UpdatedAt = time.Now()
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
if apierr = ah.AppDao().UpdatePAT(r.Context(), claims.OrgID, req, id); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, map[string]string{"data": "pat updated successfully"})
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
pats, apierr := ah.AppDao().ListPATs(r.Context(), claims.OrgID)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, pats)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(r.Context(), claims.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(r.Context(), existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
|
||||
if apierr := ah.AppDao().RevokePAT(r.Context(), claims.OrgID, id, claims.UserID); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
ah.Respond(w, map[string]string{"data": "pat revoked successfully"})
|
||||
}
|
||||
@@ -33,3 +33,7 @@ func NewDataConnector(
|
||||
ClickHouseReader: chReader,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickhouseReader) GetSQLStore() sqlstore.SQLStore {
|
||||
return r.appdb
|
||||
}
|
||||
|
||||
@@ -11,16 +11,16 @@ import (
|
||||
"github.com/gorilla/handlers"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
eemiddleware "github.com/SigNoz/signoz/ee/http/middleware"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/db"
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -30,14 +30,9 @@ import (
|
||||
"github.com/rs/cors"
|
||||
"github.com/soheilhy/cmux"
|
||||
|
||||
licensepkg "github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
|
||||
baseexplorer "github.com/SigNoz/signoz/pkg/query-service/app/explorer"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
@@ -92,33 +87,11 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
|
||||
// NewServer creates and initializes Server
|
||||
func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
modelDao, err := dao.InitDao(serverOptions.SigNoz.SQLStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := baseexplorer.InitWithDSN(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// initiate license manager
|
||||
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.Zeus)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// set license manager as feature flag provider in dao
|
||||
modelDao.SetFlagProvider(lm)
|
||||
|
||||
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -141,6 +114,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
serverOptions.SigNoz.Modules.OrgGetter,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -185,17 +159,23 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// start the usagemanager
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus)
|
||||
usageManager, err := usage.New(serverOptions.SigNoz.Licensing, serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus, serverOptions.SigNoz.Modules.OrgGetter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = usageManager.Start()
|
||||
err = usageManager.Start(context.Background())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
telemetry.GetInstance().SetReader(reader)
|
||||
telemetry.GetInstance().SetSqlStore(serverOptions.SigNoz.SQLStore)
|
||||
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
|
||||
telemetry.GetInstance().SetSavedViewsInfoCallback(telemetry.GetSavedViewsInfo)
|
||||
telemetry.GetInstance().SetAlertsInfoCallback(telemetry.GetAlertsInfo)
|
||||
telemetry.GetInstance().SetGetUsersCallback(telemetry.GetUsers)
|
||||
telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount)
|
||||
telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo)
|
||||
|
||||
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
|
||||
if err != nil {
|
||||
@@ -205,11 +185,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
apiOpts := api.APIHandlerOptions{
|
||||
DataConnector: reader,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
AppDao: modelDao,
|
||||
RulesManager: rm,
|
||||
UsageManager: usageManager,
|
||||
FeatureFlags: lm,
|
||||
LicenseManager: lm,
|
||||
IntegrationsController: integrationsController,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
@@ -250,7 +227,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
&opAmpModel.AllAgents, agentConfMgr,
|
||||
)
|
||||
|
||||
orgs, err := apiHandler.Signoz.Modules.Organization.GetAll(context.Background())
|
||||
orgs, err := apiHandler.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -265,18 +242,17 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
|
||||
|
||||
r := baseapp.NewRouter()
|
||||
|
||||
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
|
||||
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
|
||||
r.Use(middleware.NewTimeout(zap.L(),
|
||||
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
|
||||
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
|
||||
s.serverOptions.Config.APIServer.Timeout.Default,
|
||||
s.serverOptions.Config.APIServer.Timeout.Max,
|
||||
).Wrap)
|
||||
r.Use(middleware.NewAnalytics(zap.L()).Wrap)
|
||||
r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
r.Use(middleware.NewAnalytics().Wrap)
|
||||
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
|
||||
apiHandler.RegisterPrivateRoutes(r)
|
||||
|
||||
@@ -300,15 +276,15 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger())
|
||||
|
||||
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
|
||||
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
|
||||
r.Use(middleware.NewTimeout(zap.L(),
|
||||
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
|
||||
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
|
||||
s.serverOptions.Config.APIServer.Timeout.Default,
|
||||
s.serverOptions.Config.APIServer.Timeout.Max,
|
||||
).Wrap)
|
||||
r.Use(middleware.NewAnalytics(zap.L()).Wrap)
|
||||
r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
r.Use(middleware.NewAnalytics().Wrap)
|
||||
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
|
||||
apiHandler.RegisterRoutes(r, am)
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
@@ -322,6 +298,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
apiHandler.MetricExplorerRoutes(r, am)
|
||||
apiHandler.RegisterTraceFunnelsRoutes(r, am)
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: []string{"*"},
|
||||
@@ -442,15 +419,15 @@ func (s *Server) Start(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Server) Stop() error {
|
||||
func (s *Server) Stop(ctx context.Context) error {
|
||||
if s.httpServer != nil {
|
||||
if err := s.httpServer.Shutdown(context.Background()); err != nil {
|
||||
if err := s.httpServer.Shutdown(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if s.privateHTTP != nil {
|
||||
if err := s.privateHTTP.Shutdown(context.Background()); err != nil {
|
||||
if err := s.privateHTTP.Shutdown(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -458,11 +435,11 @@ func (s *Server) Stop() error {
|
||||
s.opampServer.Stop()
|
||||
|
||||
if s.ruleManager != nil {
|
||||
s.ruleManager.Stop(context.Background())
|
||||
s.ruleManager.Stop(ctx)
|
||||
}
|
||||
|
||||
// stop usage manager
|
||||
s.usageManager.Stop()
|
||||
s.usageManager.Stop(ctx)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -475,6 +452,7 @@ func makeRulesManager(
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
orgGetter organization.Getter,
|
||||
) (*baserules.Manager, error) {
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
@@ -490,6 +468,7 @@ func makeRulesManager(
|
||||
PrepareTestRuleFunc: rules.TestNotification,
|
||||
Alertmanager: alertmanager,
|
||||
SQLStore: sqlstore,
|
||||
OrgGetter: orgGetter,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -33,3 +33,13 @@ func GetOrDefaultEnv(key string, fallback string) string {
|
||||
func GetDefaultSiteURL() string {
|
||||
return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL)
|
||||
}
|
||||
|
||||
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
|
||||
|
||||
var IsDotMetricsEnabled = false
|
||||
|
||||
func init() {
|
||||
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
|
||||
IsDotMetricsEnabled = true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao/sqlite"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
)
|
||||
|
||||
func InitDao(sqlStore sqlstore.SQLStore) (ModelDao, error) {
|
||||
return sqlite.InitDB(sqlStore)
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type ModelDao interface {
|
||||
basedao.ModelDao
|
||||
|
||||
// SetFlagProvider sets the feature lookup provider
|
||||
SetFlagProvider(flags baseint.FeatureLookup)
|
||||
|
||||
DB() *bun.DB
|
||||
|
||||
// auth methods
|
||||
CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
|
||||
PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError)
|
||||
GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error)
|
||||
|
||||
// org domain (auth domains) CRUD ops
|
||||
ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
CreateDomain(ctx context.Context, d *types.GettableOrgDomain) basemodel.BaseApiError
|
||||
UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError
|
||||
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
|
||||
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
|
||||
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
|
||||
}
|
||||
@@ -1,191 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*types.User, basemodel.BaseApiError) {
|
||||
// get auth domain from email domain
|
||||
domain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
return nil, model.InternalErrorStr("failed to get domain from email")
|
||||
}
|
||||
if domain == nil {
|
||||
zap.L().Error("email domain does not match any authenticated domain", zap.String("email", email))
|
||||
return nil, model.InternalErrorStr("email domain does not match any authenticated domain")
|
||||
}
|
||||
|
||||
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
|
||||
if err != nil {
|
||||
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
|
||||
return nil, model.InternalErrorStr("failed to generate password hash")
|
||||
}
|
||||
|
||||
user := &types.User{
|
||||
ID: uuid.New().String(),
|
||||
Name: "",
|
||||
Email: email,
|
||||
Password: hash,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
ProfilePictureURL: "", // Currently unused
|
||||
Role: authtypes.RoleViewer.String(),
|
||||
OrgID: domain.OrgID,
|
||||
}
|
||||
|
||||
user, apiErr := m.CreateUser(ctx, user, false)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("CreateUser failed", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
return user, nil
|
||||
|
||||
}
|
||||
|
||||
// PrepareSsoRedirect prepares redirect page link after SSO response
|
||||
// is successfully parsed (i.e. valid email is available)
|
||||
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError) {
|
||||
|
||||
userPayload, apierr := m.GetUserByEmail(ctx, email)
|
||||
if !apierr.IsNil() {
|
||||
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
|
||||
return "", model.BadRequestStr("invalid user email received from the auth provider")
|
||||
}
|
||||
|
||||
user := &types.User{}
|
||||
|
||||
if userPayload == nil {
|
||||
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
|
||||
user = newUser
|
||||
if apiErr != nil {
|
||||
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
|
||||
return "", apiErr
|
||||
}
|
||||
} else {
|
||||
user = &userPayload.User
|
||||
}
|
||||
|
||||
tokenStore, err := baseauth.GenerateJWTForUser(user, jwt)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
|
||||
return "", model.InternalErrorStr("failed to generate token for the user")
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
|
||||
redirectUri,
|
||||
tokenStore.AccessJwt,
|
||||
user.ID,
|
||||
tokenStore.RefreshJwt), nil
|
||||
}
|
||||
|
||||
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
|
||||
domain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
return false, apierr
|
||||
}
|
||||
|
||||
if domain != nil && domain.SsoEnabled {
|
||||
// sso is enabled, check if the user has admin role
|
||||
userPayload, baseapierr := m.GetUserByEmail(ctx, email)
|
||||
|
||||
if baseapierr != nil || userPayload == nil {
|
||||
return false, baseapierr
|
||||
}
|
||||
|
||||
if userPayload.Role != authtypes.RoleAdmin.String() {
|
||||
return false, model.BadRequest(fmt.Errorf("auth method not supported"))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// PrecheckLogin is called when the login or signup page is loaded
|
||||
// to check sso login is to be prompted
|
||||
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, basemodel.BaseApiError) {
|
||||
|
||||
// assume user is valid unless proven otherwise
|
||||
resp := &basemodel.PrecheckResponse{IsUser: true, CanSelfRegister: false}
|
||||
|
||||
// check if email is a valid user
|
||||
userPayload, baseApiErr := m.GetUserByEmail(ctx, email)
|
||||
if baseApiErr != nil {
|
||||
return resp, baseApiErr
|
||||
}
|
||||
|
||||
if userPayload == nil {
|
||||
resp.IsUser = false
|
||||
}
|
||||
|
||||
ssoAvailable := true
|
||||
err := m.checkFeature(model.SSO)
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case basemodel.ErrFeatureUnavailable:
|
||||
// do nothing, just skip sso
|
||||
ssoAvailable = false
|
||||
default:
|
||||
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
|
||||
return resp, model.BadRequestStr(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
if ssoAvailable {
|
||||
|
||||
resp.IsUser = true
|
||||
|
||||
// find domain from email
|
||||
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
|
||||
return resp, apierr
|
||||
}
|
||||
|
||||
if orgDomain != nil && orgDomain.SsoEnabled {
|
||||
// saml is enabled for this domain, lets prepare sso url
|
||||
|
||||
if sourceUrl == "" {
|
||||
sourceUrl = constants.GetDefaultSiteURL()
|
||||
}
|
||||
|
||||
// parse source url that generated the login request
|
||||
var err error
|
||||
escapedUrl, _ := url.QueryUnescape(sourceUrl)
|
||||
siteUrl, err := url.Parse(escapedUrl)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to parse referer", zap.Error(err))
|
||||
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
|
||||
}
|
||||
|
||||
// build Idp URL that will authenticat the user
|
||||
// the front-end will redirect user to this url
|
||||
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
|
||||
return resp, model.InternalError(err)
|
||||
}
|
||||
|
||||
// set SSO to true, as the url is generated correctly
|
||||
resp.SSO = true
|
||||
}
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
@@ -1,272 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// GetDomainFromSsoResponse uses relay state received from IdP to fetch
|
||||
// user domain. The domain is further used to process validity of the response.
|
||||
// when sending login request to IdP we send relay state as URL (site url)
|
||||
// with domainId or domainName as query parameter.
|
||||
func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error) {
|
||||
// derive domain id from relay state now
|
||||
var domainIdStr string
|
||||
var domainNameStr string
|
||||
var domain *types.GettableOrgDomain
|
||||
|
||||
for k, v := range relayState.Query() {
|
||||
if k == "domainId" && len(v) > 0 {
|
||||
domainIdStr = strings.Replace(v[0], ":", "-", -1)
|
||||
}
|
||||
if k == "domainName" && len(v) > 0 {
|
||||
domainNameStr = v[0]
|
||||
}
|
||||
}
|
||||
|
||||
if domainIdStr != "" {
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to parse domainId from relay state", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse domainId from IdP response")
|
||||
}
|
||||
|
||||
domain, err = m.GetDomain(ctx, domainId)
|
||||
if (err != nil) || domain == nil {
|
||||
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
}
|
||||
|
||||
if domainNameStr != "" {
|
||||
|
||||
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
|
||||
domain = domainFromDB
|
||||
if (err != nil) || domain == nil {
|
||||
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
}
|
||||
if domain != nil {
|
||||
return domain, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("failed to find domain received in IdP response")
|
||||
}
|
||||
|
||||
// GetDomainByName returns org domain for a given domain name
|
||||
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
|
||||
stored := types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("name = ?", name).
|
||||
Limit(1).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.BadRequest(fmt.Errorf("invalid domain name"))
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
|
||||
if err := domain.LoadConfig(stored.Data); err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
return domain, nil
|
||||
}
|
||||
|
||||
// GetDomain returns org domain for a given domain id
|
||||
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
|
||||
stored := types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("id = ?", id).
|
||||
Limit(1).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.BadRequest(fmt.Errorf("invalid domain id"))
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
|
||||
if err := domain.LoadConfig(stored.Data); err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
return domain, nil
|
||||
}
|
||||
|
||||
// ListDomains gets the list of auth domains by org id
|
||||
func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
domains := []types.GettableOrgDomain{}
|
||||
|
||||
stored := []types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("org_id = ?", orgId).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return domains, nil
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
for _, s := range stored {
|
||||
domain := types.GettableOrgDomain{StorableOrgDomain: s}
|
||||
if err := domain.LoadConfig(s.Data); err != nil {
|
||||
zap.L().Error("ListDomains() failed", zap.Error(err))
|
||||
}
|
||||
domains = append(domains, domain)
|
||||
}
|
||||
|
||||
return domains, nil
|
||||
}
|
||||
|
||||
// CreateDomain creates a new auth domain
|
||||
func (m *modelDao) CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError {
|
||||
|
||||
if domain.ID == uuid.Nil {
|
||||
domain.ID = uuid.New()
|
||||
}
|
||||
|
||||
if domain.OrgID == "" || domain.Name == "" {
|
||||
return model.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgID, Name "))
|
||||
}
|
||||
|
||||
configJson, err := json.Marshal(domain)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain creation failed"))
|
||||
}
|
||||
|
||||
storableDomain := types.StorableOrgDomain{
|
||||
ID: domain.ID,
|
||||
Name: domain.Name,
|
||||
OrgID: domain.OrgID,
|
||||
Data: string(configJson),
|
||||
TimeAuditable: ossTypes.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
|
||||
}
|
||||
|
||||
_, err = m.DB().NewInsert().
|
||||
Model(&storableDomain).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to insert domain in db", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain creation failed"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateDomain updates stored config params for a domain
|
||||
func (m *modelDao) UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError {
|
||||
|
||||
if domain.ID == uuid.Nil {
|
||||
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
configJson, err := json.Marshal(domain)
|
||||
if err != nil {
|
||||
zap.L().Error("domain update failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
storableDomain := &types.StorableOrgDomain{
|
||||
ID: domain.ID,
|
||||
Name: domain.Name,
|
||||
OrgID: domain.OrgID,
|
||||
Data: string(configJson),
|
||||
TimeAuditable: ossTypes.TimeAuditable{UpdatedAt: time.Now()},
|
||||
}
|
||||
|
||||
_, err = m.DB().NewUpdate().
|
||||
Model(storableDomain).
|
||||
Column("data", "updated_at").
|
||||
WherePK().
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("domain update failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteDomain deletes an org domain
|
||||
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
|
||||
|
||||
if id == uuid.Nil {
|
||||
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
return model.InternalError(fmt.Errorf("domain delete failed"))
|
||||
}
|
||||
|
||||
storableDomain := &types.StorableOrgDomain{ID: id}
|
||||
_, err := m.DB().NewDelete().
|
||||
Model(storableDomain).
|
||||
WherePK().
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("domain delete failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain delete failed"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
|
||||
if email == "" {
|
||||
return nil, model.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email "))
|
||||
}
|
||||
|
||||
components := strings.Split(email, "@")
|
||||
if len(components) < 2 {
|
||||
return nil, model.BadRequest(fmt.Errorf("invalid email address"))
|
||||
}
|
||||
|
||||
parsedDomain := components[1]
|
||||
|
||||
stored := types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("name = ?", parsedDomain).
|
||||
Limit(1).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
|
||||
if err := domain.LoadConfig(stored.Data); err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
return domain, nil
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
basedsql "github.com/SigNoz/signoz/pkg/query-service/dao/sqlite"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type modelDao struct {
|
||||
*basedsql.ModelDaoSqlite
|
||||
flags baseint.FeatureLookup
|
||||
}
|
||||
|
||||
// SetFlagProvider sets the feature lookup provider
|
||||
func (m *modelDao) SetFlagProvider(flags baseint.FeatureLookup) {
|
||||
m.flags = flags
|
||||
}
|
||||
|
||||
// CheckFeature confirms if a feature is available
|
||||
func (m *modelDao) checkFeature(key string) error {
|
||||
if m.flags == nil {
|
||||
return fmt.Errorf("flag provider not set")
|
||||
}
|
||||
|
||||
return m.flags.CheckFeature(key)
|
||||
}
|
||||
|
||||
// InitDB creates and extends base model DB repository
|
||||
func InitDB(sqlStore sqlstore.SQLStore) (*modelDao, error) {
|
||||
dao, err := basedsql.InitDB(sqlStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// set package variable so dependent base methods (e.g. AuthCache) will work
|
||||
basedao.SetDB(dao)
|
||||
m := &modelDao{ModelDaoSqlite: dao}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) DB() *bun.DB {
|
||||
return m.ModelDaoSqlite.DB()
|
||||
}
|
||||
@@ -1,198 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
|
||||
p.StorablePersonalAccessToken.OrgID = orgID
|
||||
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
|
||||
_, err := m.DB().NewInsert().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, p.UserID)
|
||||
if createdByUser == nil {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Column("role", "name", "updated_at", "updated_by_user_id").
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT update failed"))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("revoked = false").
|
||||
Where("org_id = ?", orgID).
|
||||
Order("updated_at DESC").
|
||||
Scan(ctx); err != nil {
|
||||
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
|
||||
}
|
||||
|
||||
patsWithUsers := []types.GettablePAT{}
|
||||
for i := range pats {
|
||||
patWithUser := types.GettablePAT{
|
||||
StorablePersonalAccessToken: pats[i],
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
|
||||
if createdByUser == nil {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
|
||||
if updatedByUser == nil {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: updatedByUser.CreatedAt,
|
||||
UpdatedAt: updatedByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
patsWithUsers = append(patsWithUsers, patWithUser)
|
||||
}
|
||||
return patsWithUsers, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
|
||||
updatedAt := time.Now().Unix()
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&types.StorablePersonalAccessToken{}).
|
||||
Set("revoked = ?", true).
|
||||
Set("updated_by_user_id = ?", userID).
|
||||
Set("updated_at = ?", updatedAt).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("org_id = ?", orgID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT revoke failed"))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("token = ?", token).
|
||||
Where("revoked = false").
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
|
||||
}
|
||||
|
||||
if len(pats) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple PATs with same token, %s", token),
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
|
||||
}
|
||||
|
||||
if len(pats) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple PATs with same token"),
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
package signozio
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
func ValidateLicenseV3(ctx context.Context, licenseKey string, zeus zeus.Zeus) (*model.LicenseV3, error) {
|
||||
data, err := zeus.GetLicense(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var m map[string]any
|
||||
if err = json.Unmarshal(data, &m); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return license, nil
|
||||
}
|
||||
|
||||
// SendUsage reports the usage of signoz to license server
|
||||
func SendUsage(ctx context.Context, usage model.UsagePayload, zeus zeus.Zeus) error {
|
||||
body, err := json.Marshal(usage)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return zeus.PutMeters(ctx, usage.LicenseKey.String(), body)
|
||||
}
|
||||
|
||||
func CheckoutSession(ctx context.Context, checkoutRequest *model.CheckoutRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
|
||||
body, err := json.Marshal(checkoutRequest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
response, err := zeus.GetCheckoutURL(ctx, licenseKey, body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return gjson.GetBytes(response, "url").String(), nil
|
||||
}
|
||||
|
||||
func PortalSession(ctx context.Context, portalRequest *model.PortalRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
|
||||
body, err := json.Marshal(portalRequest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
response, err := zeus.GetPortalURL(ctx, licenseKey, body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return gjson.GetBytes(response, "url").String(), nil
|
||||
}
|
||||
@@ -1,248 +0,0 @@
|
||||
package license
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/mattn/go-sqlite3"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// Repo is license repo. stores license keys in a secured DB
|
||||
type Repo struct {
|
||||
db *sqlx.DB
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
// NewLicenseRepo initiates a new license repo
|
||||
func NewLicenseRepo(db *sqlx.DB, store sqlstore.SQLStore) Repo {
|
||||
return Repo{
|
||||
db: db,
|
||||
store: store,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Repo) GetLicensesV3(ctx context.Context) ([]*model.LicenseV3, error) {
|
||||
licensesData := []model.LicenseDB{}
|
||||
licenseV3Data := []*model.LicenseV3{}
|
||||
|
||||
query := "SELECT id,key,data FROM licenses_v3"
|
||||
|
||||
err := r.db.Select(&licensesData, query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get licenses from db: %v", err)
|
||||
}
|
||||
|
||||
for _, l := range licensesData {
|
||||
var licenseData map[string]interface{}
|
||||
err := json.Unmarshal([]byte(l.Data), &licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
|
||||
}
|
||||
licenseV3Data = append(licenseV3Data, license)
|
||||
}
|
||||
|
||||
return licenseV3Data, nil
|
||||
}
|
||||
|
||||
// GetActiveLicense fetches the latest active license from DB.
|
||||
// If the license is not present, expect a nil license and a nil error in the output.
|
||||
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) {
|
||||
activeLicenseV3, err := r.GetActiveLicenseV3(ctx)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
|
||||
}
|
||||
|
||||
if activeLicenseV3 == nil {
|
||||
return nil, nil
|
||||
}
|
||||
activeLicenseV2 := model.ConvertLicenseV3ToLicenseV2(activeLicenseV3)
|
||||
return activeLicenseV2, nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetActiveLicenseV3(ctx context.Context) (*model.LicenseV3, error) {
|
||||
var err error
|
||||
licenses := []model.LicenseDB{}
|
||||
|
||||
query := "SELECT id,key,data FROM licenses_v3"
|
||||
|
||||
err = r.db.Select(&licenses, query)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
|
||||
}
|
||||
|
||||
var active *model.LicenseV3
|
||||
for _, l := range licenses {
|
||||
var licenseData map[string]interface{}
|
||||
err := json.Unmarshal([]byte(l.Data), &licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
|
||||
}
|
||||
|
||||
if active == nil &&
|
||||
(license.ValidFrom != 0) &&
|
||||
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
|
||||
active = license
|
||||
}
|
||||
if active != nil &&
|
||||
license.ValidFrom > active.ValidFrom &&
|
||||
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
|
||||
active = license
|
||||
}
|
||||
}
|
||||
|
||||
return active, nil
|
||||
}
|
||||
|
||||
// InsertLicenseV3 inserts a new license v3 in db
|
||||
func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) *model.ApiError {
|
||||
|
||||
query := `INSERT INTO licenses_v3 (id, key, data) VALUES ($1, $2, $3)`
|
||||
|
||||
// licsense is the entity of zeus so putting the entire license here without defining schema
|
||||
licenseData, err := json.Marshal(l.Data)
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: basemodel.ErrorBadData, Err: err}
|
||||
}
|
||||
|
||||
_, err = r.db.ExecContext(ctx,
|
||||
query,
|
||||
l.ID,
|
||||
l.Key,
|
||||
string(licenseData),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if sqliteErr, ok := err.(sqlite3.Error); ok {
|
||||
if sqliteErr.ExtendedCode == sqlite3.ErrConstraintUnique {
|
||||
zap.L().Error("error in inserting license data: ", zap.Error(sqliteErr))
|
||||
return &model.ApiError{Typ: model.ErrorConflict, Err: sqliteErr}
|
||||
}
|
||||
}
|
||||
zap.L().Error("error in inserting license data: ", zap.Error(err))
|
||||
return &model.ApiError{Typ: basemodel.ErrorExec, Err: err}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateLicenseV3 updates a new license v3 in db
|
||||
func (r *Repo) UpdateLicenseV3(ctx context.Context, l *model.LicenseV3) error {
|
||||
|
||||
// the key and id for the license can't change so only update the data here!
|
||||
query := `UPDATE licenses_v3 SET data=$1 WHERE id=$2;`
|
||||
|
||||
license, err := json.Marshal(l.Data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("insert license failed: license marshal error")
|
||||
}
|
||||
_, err = r.db.ExecContext(ctx,
|
||||
query,
|
||||
license,
|
||||
l.ID,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("error in updating license data: ", zap.Error(err))
|
||||
return fmt.Errorf("failed to update license in db: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repo) CreateFeature(req *types.FeatureStatus) *basemodel.ApiError {
|
||||
|
||||
_, err := r.store.BunDB().NewInsert().
|
||||
Model(req).
|
||||
Exec(context.Background())
|
||||
if err != nil {
|
||||
return &basemodel.ApiError{Typ: basemodel.ErrorInternal, Err: err}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetFeature(featureName string) (types.FeatureStatus, error) {
|
||||
var feature types.FeatureStatus
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&feature).
|
||||
Where("name = ?", featureName).
|
||||
Scan(context.Background())
|
||||
|
||||
if err != nil {
|
||||
return feature, err
|
||||
}
|
||||
if feature.Name == "" {
|
||||
return feature, basemodel.ErrFeatureUnavailable{Key: featureName}
|
||||
}
|
||||
return feature, nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetAllFeatures() ([]basemodel.Feature, error) {
|
||||
|
||||
var feature []basemodel.Feature
|
||||
|
||||
err := r.db.Select(&feature,
|
||||
`SELECT * FROM feature_status;`)
|
||||
if err != nil {
|
||||
return feature, err
|
||||
}
|
||||
|
||||
return feature, nil
|
||||
}
|
||||
|
||||
func (r *Repo) UpdateFeature(req types.FeatureStatus) error {
|
||||
|
||||
_, err := r.store.BunDB().NewUpdate().
|
||||
Model(&req).
|
||||
Where("name = ?", req.Name).
|
||||
Exec(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repo) InitFeatures(req []types.FeatureStatus) error {
|
||||
// get a feature by name, if it doesn't exist, create it. If it does exist, update it.
|
||||
for _, feature := range req {
|
||||
currentFeature, err := r.GetFeature(feature.Name)
|
||||
if err != nil && err == sql.ErrNoRows {
|
||||
err := r.CreateFeature(&feature)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
feature.Usage = int(currentFeature.Usage)
|
||||
if feature.Usage >= feature.UsageLimit && feature.UsageLimit != -1 {
|
||||
feature.Active = false
|
||||
}
|
||||
err = r.UpdateFeature(feature)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,318 +0,0 @@
|
||||
package license
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"sync"
|
||||
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
|
||||
validate "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var LM *Manager
|
||||
|
||||
// validate and update license every 24 hours
|
||||
var validationFrequency = 24 * 60 * time.Minute
|
||||
|
||||
type Manager struct {
|
||||
repo *Repo
|
||||
zeus zeus.Zeus
|
||||
mutex sync.Mutex
|
||||
validatorRunning bool
|
||||
// end the license validation, this is important to gracefully
|
||||
// stopping validation and protect in-consistent updates
|
||||
done chan struct{}
|
||||
// terminated waits for the validate go routine to end
|
||||
terminated chan struct{}
|
||||
// last time the license was validated
|
||||
lastValidated int64
|
||||
// keep track of validation failure attempts
|
||||
failedAttempts uint64
|
||||
// keep track of active license and features
|
||||
activeLicenseV3 *model.LicenseV3
|
||||
activeFeatures basemodel.FeatureSet
|
||||
}
|
||||
|
||||
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, zeus zeus.Zeus, features ...basemodel.Feature) (*Manager, error) {
|
||||
if LM != nil {
|
||||
return LM, nil
|
||||
}
|
||||
|
||||
repo := NewLicenseRepo(db, store)
|
||||
m := &Manager{
|
||||
repo: &repo,
|
||||
zeus: zeus,
|
||||
}
|
||||
if err := m.start(features...); err != nil {
|
||||
return m, err
|
||||
}
|
||||
|
||||
LM = m
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// start loads active license in memory and initiates validator
|
||||
func (lm *Manager) start(features ...basemodel.Feature) error {
|
||||
return lm.LoadActiveLicenseV3(features...)
|
||||
}
|
||||
|
||||
func (lm *Manager) Stop() {
|
||||
close(lm.done)
|
||||
<-lm.terminated
|
||||
}
|
||||
|
||||
func (lm *Manager) SetActiveV3(l *model.LicenseV3, features ...basemodel.Feature) {
|
||||
lm.mutex.Lock()
|
||||
defer lm.mutex.Unlock()
|
||||
|
||||
if l == nil {
|
||||
return
|
||||
}
|
||||
|
||||
lm.activeLicenseV3 = l
|
||||
lm.activeFeatures = append(l.Features, features...)
|
||||
// set default features
|
||||
setDefaultFeatures(lm)
|
||||
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Panic("Couldn't activate features", zap.Error(err))
|
||||
}
|
||||
if !lm.validatorRunning {
|
||||
// we want to make sure only one validator runs,
|
||||
// we already have lock() so good to go
|
||||
lm.validatorRunning = true
|
||||
go lm.ValidatorV3(context.Background())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func setDefaultFeatures(lm *Manager) {
|
||||
lm.activeFeatures = append(lm.activeFeatures, baseconstants.DEFAULT_FEATURE_SET...)
|
||||
}
|
||||
|
||||
func (lm *Manager) LoadActiveLicenseV3(features ...basemodel.Feature) error {
|
||||
active, err := lm.repo.GetActiveLicenseV3(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if active != nil {
|
||||
lm.SetActiveV3(active, features...)
|
||||
} else {
|
||||
zap.L().Info("No active license found, defaulting to basic plan")
|
||||
// if no active license is found, we default to basic(free) plan with all default features
|
||||
lm.activeFeatures = model.BasicPlan
|
||||
setDefaultFeatures(lm)
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Error("Couldn't initialize features", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) GetLicensesV3(ctx context.Context) (response []*model.LicenseV3, apiError *model.ApiError) {
|
||||
|
||||
licenses, err := lm.repo.GetLicensesV3(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
for _, l := range licenses {
|
||||
if lm.activeLicenseV3 != nil && l.Key == lm.activeLicenseV3.Key {
|
||||
l.IsCurrent = true
|
||||
}
|
||||
if l.ValidUntil == -1 {
|
||||
// for subscriptions, there is no end-date as such
|
||||
// but for showing user some validity we default one year timespan
|
||||
l.ValidUntil = l.ValidFrom + 31556926
|
||||
}
|
||||
response = append(response, l)
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Validator validates license after an epoch of time
|
||||
func (lm *Manager) ValidatorV3(ctx context.Context) {
|
||||
zap.L().Info("ValidatorV3 started!")
|
||||
defer close(lm.terminated)
|
||||
|
||||
tick := time.NewTicker(validationFrequency)
|
||||
defer tick.Stop()
|
||||
|
||||
_ = lm.ValidateV3(ctx)
|
||||
for {
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
default:
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
case <-tick.C:
|
||||
_ = lm.ValidateV3(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func (lm *Manager) RefreshLicense(ctx context.Context) error {
|
||||
license, err := validate.ValidateLicenseV3(ctx, lm.activeLicenseV3.Key, lm.zeus)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = lm.repo.UpdateLicenseV3(ctx, license)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
lm.SetActiveV3(license)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
|
||||
if lm.activeLicenseV3 == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
defer func() {
|
||||
lm.mutex.Lock()
|
||||
|
||||
lm.lastValidated = time.Now().Unix()
|
||||
if reterr != nil {
|
||||
zap.L().Error("License validation completed with error", zap.Error(reterr))
|
||||
|
||||
atomic.AddUint64(&lm.failedAttempts, 1)
|
||||
// default to basic plan if validation fails for three consecutive times
|
||||
if atomic.LoadUint64(&lm.failedAttempts) > 3 {
|
||||
zap.L().Error("License validation completed with error for three consecutive times, defaulting to basic plan", zap.String("license_id", lm.activeLicenseV3.ID), zap.Bool("license_validation", false))
|
||||
lm.activeLicenseV3 = nil
|
||||
lm.activeFeatures = model.BasicPlan
|
||||
setDefaultFeatures(lm)
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Error("Couldn't initialize features", zap.Error(err))
|
||||
}
|
||||
lm.done <- struct{}{}
|
||||
lm.validatorRunning = false
|
||||
}
|
||||
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
|
||||
map[string]interface{}{"err": reterr.Error()}, "", true, false)
|
||||
} else {
|
||||
// reset the failed attempts counter
|
||||
atomic.StoreUint64(&lm.failedAttempts, 0)
|
||||
zap.L().Info("License validation completed with no errors")
|
||||
}
|
||||
|
||||
lm.mutex.Unlock()
|
||||
}()
|
||||
|
||||
err := lm.RefreshLicense(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (*model.LicenseV3, error) {
|
||||
license, err := validate.ValidateLicenseV3(ctx, licenseKey, lm.zeus)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// insert the new license to the sqlite db
|
||||
modelErr := lm.repo.InsertLicenseV3(ctx, license)
|
||||
if modelErr != nil {
|
||||
zap.L().Error("failed to activate license", zap.Error(modelErr))
|
||||
return nil, modelErr
|
||||
}
|
||||
|
||||
// license is valid, activate it
|
||||
lm.SetActiveV3(license)
|
||||
return license, nil
|
||||
}
|
||||
|
||||
func (lm *Manager) GetActiveLicense() *model.LicenseV3 {
|
||||
return lm.activeLicenseV3
|
||||
}
|
||||
|
||||
// CheckFeature will be internally used by backend routines
|
||||
// for feature gating
|
||||
func (lm *Manager) CheckFeature(featureKey string) error {
|
||||
feature, err := lm.repo.GetFeature(featureKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if feature.Active {
|
||||
return nil
|
||||
}
|
||||
return basemodel.ErrFeatureUnavailable{Key: featureKey}
|
||||
}
|
||||
|
||||
// GetFeatureFlags returns current active features
|
||||
func (lm *Manager) GetFeatureFlags() (basemodel.FeatureSet, error) {
|
||||
return lm.repo.GetAllFeatures()
|
||||
}
|
||||
|
||||
func (lm *Manager) InitFeatures(features basemodel.FeatureSet) error {
|
||||
featureStatus := make([]types.FeatureStatus, len(features))
|
||||
for i, f := range features {
|
||||
featureStatus[i] = types.FeatureStatus{
|
||||
Name: f.Name,
|
||||
Active: f.Active,
|
||||
Usage: int(f.Usage),
|
||||
UsageLimit: int(f.UsageLimit),
|
||||
Route: f.Route,
|
||||
}
|
||||
}
|
||||
return lm.repo.InitFeatures(featureStatus)
|
||||
}
|
||||
|
||||
func (lm *Manager) UpdateFeatureFlag(feature basemodel.Feature) error {
|
||||
return lm.repo.UpdateFeature(types.FeatureStatus{
|
||||
Name: feature.Name,
|
||||
Active: feature.Active,
|
||||
Usage: int(feature.Usage),
|
||||
UsageLimit: int(feature.UsageLimit),
|
||||
Route: feature.Route,
|
||||
})
|
||||
}
|
||||
|
||||
func (lm *Manager) GetFeatureFlag(key string) (basemodel.Feature, error) {
|
||||
featureStatus, err := lm.repo.GetFeature(key)
|
||||
if err != nil {
|
||||
return basemodel.Feature{}, err
|
||||
}
|
||||
return basemodel.Feature{
|
||||
Name: featureStatus.Name,
|
||||
Active: featureStatus.Active,
|
||||
Usage: int64(featureStatus.Usage),
|
||||
UsageLimit: int64(featureStatus.UsageLimit),
|
||||
Route: featureStatus.Route,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetRepo return the license repo
|
||||
func (lm *Manager) GetRepo() *Repo {
|
||||
return lm.repo
|
||||
}
|
||||
@@ -6,6 +6,8 @@ import (
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/ee/zeus"
|
||||
@@ -13,11 +15,16 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
pkglicensing "github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
pkgzeus "github.com/SigNoz/signoz/pkg/zeus"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
@@ -85,8 +92,9 @@ func main() {
|
||||
loggerMgr := initZapLog()
|
||||
zap.ReplaceGlobals(loggerMgr)
|
||||
defer loggerMgr.Sync() // flushes buffer, if any
|
||||
ctx := context.Background()
|
||||
|
||||
config, err := signoz.NewConfig(context.Background(), config.ResolverConfig{
|
||||
config, err := signoz.NewConfig(ctx, config.ResolverConfig{
|
||||
Uris: []string{"env:"},
|
||||
ProviderFactories: []config.ProviderFactory{
|
||||
envprovider.NewFactory(),
|
||||
@@ -109,20 +117,6 @@ func main() {
|
||||
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
zeus.Config(),
|
||||
httpzeus.NewProviderFactory(),
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
|
||||
if len(jwtSecret) == 0 {
|
||||
@@ -133,6 +127,26 @@ func main() {
|
||||
|
||||
jwt := authtypes.NewJWT(jwtSecret, 30*time.Minute, 30*24*time.Hour)
|
||||
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
jwt,
|
||||
zeus.Config(),
|
||||
httpzeus.NewProviderFactory(),
|
||||
licensing.Config(24*time.Hour, 3),
|
||||
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
|
||||
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter)
|
||||
},
|
||||
signoz.NewEmailingProviderFactories(),
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
serverOptions := &app.ServerOptions{
|
||||
Config: config,
|
||||
SigNoz: signoz,
|
||||
@@ -151,22 +165,22 @@ func main() {
|
||||
zap.L().Fatal("Failed to create server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := server.Start(context.Background()); err != nil {
|
||||
if err := server.Start(ctx); err != nil {
|
||||
zap.L().Fatal("Could not start server", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz.Start(context.Background())
|
||||
signoz.Start(ctx)
|
||||
|
||||
if err := signoz.Wait(context.Background()); err != nil {
|
||||
if err := signoz.Wait(ctx); err != nil {
|
||||
zap.L().Fatal("Failed to start signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
err = server.Stop()
|
||||
err = server.Stop(ctx)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to stop server", zap.Error(err))
|
||||
}
|
||||
|
||||
err = signoz.Stop(context.Background())
|
||||
err = signoz.Stop(ctx)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to stop signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
// GettableInvitation overrides base object and adds precheck into
|
||||
// response
|
||||
type GettableInvitation struct {
|
||||
*basemodel.InvitationResponseObject
|
||||
Precheck *basemodel.PrecheckResponse `json:"precheck"`
|
||||
}
|
||||
@@ -1,244 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type License struct {
|
||||
Key string `json:"key" db:"key"`
|
||||
ActivationId string `json:"activationId" db:"activationId"`
|
||||
CreatedAt time.Time `db:"created_at"`
|
||||
|
||||
// PlanDetails contains the encrypted plan info
|
||||
PlanDetails string `json:"planDetails" db:"planDetails"`
|
||||
|
||||
// stores parsed license details
|
||||
LicensePlan
|
||||
|
||||
FeatureSet basemodel.FeatureSet
|
||||
|
||||
// populated in case license has any errors
|
||||
ValidationMessage string `db:"validationMessage"`
|
||||
|
||||
// used only for sending details to front-end
|
||||
IsCurrent bool `json:"isCurrent"`
|
||||
}
|
||||
|
||||
func (l *License) MarshalJSON() ([]byte, error) {
|
||||
|
||||
return json.Marshal(&struct {
|
||||
Key string `json:"key" db:"key"`
|
||||
ActivationId string `json:"activationId" db:"activationId"`
|
||||
ValidationMessage string `db:"validationMessage"`
|
||||
IsCurrent bool `json:"isCurrent"`
|
||||
PlanKey string `json:"planKey"`
|
||||
ValidFrom time.Time `json:"ValidFrom"`
|
||||
ValidUntil time.Time `json:"ValidUntil"`
|
||||
Status string `json:"status"`
|
||||
}{
|
||||
Key: l.Key,
|
||||
ActivationId: l.ActivationId,
|
||||
IsCurrent: l.IsCurrent,
|
||||
PlanKey: l.PlanKey,
|
||||
ValidFrom: time.Unix(l.ValidFrom, 0),
|
||||
ValidUntil: time.Unix(l.ValidUntil, 0),
|
||||
Status: l.Status,
|
||||
ValidationMessage: l.ValidationMessage,
|
||||
})
|
||||
}
|
||||
|
||||
type LicensePlan struct {
|
||||
PlanKey string `json:"planKey"`
|
||||
ValidFrom int64 `json:"validFrom"`
|
||||
ValidUntil int64 `json:"validUntil"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
type Licenses struct {
|
||||
TrialStart int64 `json:"trialStart"`
|
||||
TrialEnd int64 `json:"trialEnd"`
|
||||
OnTrial bool `json:"onTrial"`
|
||||
WorkSpaceBlock bool `json:"workSpaceBlock"`
|
||||
TrialConvertedToSubscription bool `json:"trialConvertedToSubscription"`
|
||||
GracePeriodEnd int64 `json:"gracePeriodEnd"`
|
||||
Licenses []License `json:"licenses"`
|
||||
}
|
||||
|
||||
type SubscriptionServerResp struct {
|
||||
Status string `json:"status"`
|
||||
Data Licenses `json:"data"`
|
||||
}
|
||||
|
||||
type Plan struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type LicenseDB struct {
|
||||
ID string `json:"id"`
|
||||
Key string `json:"key"`
|
||||
Data string `json:"data"`
|
||||
}
|
||||
type LicenseV3 struct {
|
||||
ID string
|
||||
Key string
|
||||
Data map[string]interface{}
|
||||
PlanName string
|
||||
Features basemodel.FeatureSet
|
||||
Status string
|
||||
IsCurrent bool
|
||||
ValidFrom int64
|
||||
ValidUntil int64
|
||||
}
|
||||
|
||||
func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) {
|
||||
var zeroValue T
|
||||
if val, ok := data[key]; ok {
|
||||
if value, ok := val.(T); ok {
|
||||
return value, nil
|
||||
}
|
||||
return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue))
|
||||
}
|
||||
return zeroValue, fmt.Errorf("%s key is missing", key)
|
||||
}
|
||||
|
||||
func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
|
||||
var features basemodel.FeatureSet
|
||||
|
||||
// extract id from data
|
||||
licenseID, err := extractKeyFromMapStringInterface[string](data, "id")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
delete(data, "id")
|
||||
|
||||
// extract key from data
|
||||
licenseKey, err := extractKeyFromMapStringInterface[string](data, "key")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
delete(data, "key")
|
||||
|
||||
// extract status from data
|
||||
status, err := extractKeyFromMapStringInterface[string](data, "status")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
planMap, err := extractKeyFromMapStringInterface[map[string]any](data, "plan")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
planName, err := extractKeyFromMapStringInterface[string](planMap, "name")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// if license status is invalid then default it to basic
|
||||
if status == LicenseStatusInvalid {
|
||||
planName = PlanNameBasic
|
||||
}
|
||||
|
||||
featuresFromZeus := basemodel.FeatureSet{}
|
||||
if _features, ok := data["features"]; ok {
|
||||
featuresData, err := json.Marshal(_features)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to marshal features data")
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to unmarshal features data")
|
||||
}
|
||||
}
|
||||
|
||||
switch planName {
|
||||
case PlanNameEnterprise:
|
||||
features = append(features, EnterprisePlan...)
|
||||
case PlanNameBasic:
|
||||
features = append(features, BasicPlan...)
|
||||
default:
|
||||
features = append(features, BasicPlan...)
|
||||
}
|
||||
|
||||
if len(featuresFromZeus) > 0 {
|
||||
for _, feature := range featuresFromZeus {
|
||||
exists := false
|
||||
for i, existingFeature := range features {
|
||||
if existingFeature.Name == feature.Name {
|
||||
features[i] = feature // Replace existing feature
|
||||
exists = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !exists {
|
||||
features = append(features, feature) // Append if it doesn't exist
|
||||
}
|
||||
}
|
||||
}
|
||||
data["features"] = features
|
||||
|
||||
_validFrom, err := extractKeyFromMapStringInterface[float64](data, "valid_from")
|
||||
if err != nil {
|
||||
_validFrom = 0
|
||||
}
|
||||
validFrom := int64(_validFrom)
|
||||
|
||||
_validUntil, err := extractKeyFromMapStringInterface[float64](data, "valid_until")
|
||||
if err != nil {
|
||||
_validUntil = 0
|
||||
}
|
||||
validUntil := int64(_validUntil)
|
||||
|
||||
return &LicenseV3{
|
||||
ID: licenseID,
|
||||
Key: licenseKey,
|
||||
Data: data,
|
||||
PlanName: planName,
|
||||
Features: features,
|
||||
ValidFrom: validFrom,
|
||||
ValidUntil: validUntil,
|
||||
Status: status,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}) (*LicenseV3, error) {
|
||||
licenseDataWithIdAndKey := data
|
||||
licenseDataWithIdAndKey["id"] = id
|
||||
licenseDataWithIdAndKey["key"] = key
|
||||
return NewLicenseV3(licenseDataWithIdAndKey)
|
||||
}
|
||||
|
||||
func ConvertLicenseV3ToLicenseV2(l *LicenseV3) *License {
|
||||
planKeyFromPlanName, ok := MapOldPlanKeyToNewPlanName[l.PlanName]
|
||||
if !ok {
|
||||
planKeyFromPlanName = Basic
|
||||
}
|
||||
return &License{
|
||||
Key: l.Key,
|
||||
ActivationId: "",
|
||||
PlanDetails: "",
|
||||
FeatureSet: l.Features,
|
||||
ValidationMessage: "",
|
||||
IsCurrent: l.IsCurrent,
|
||||
LicensePlan: LicensePlan{
|
||||
PlanKey: planKeyFromPlanName,
|
||||
ValidFrom: l.ValidFrom,
|
||||
ValidUntil: l.ValidUntil,
|
||||
Status: l.Status},
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
type CheckoutRequest struct {
|
||||
SuccessURL string `json:"url"`
|
||||
}
|
||||
|
||||
type PortalRequest struct {
|
||||
SuccessURL string `json:"url"`
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestNewLicenseV3(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
data []byte
|
||||
pass bool
|
||||
expected *LicenseV3
|
||||
error error
|
||||
}{
|
||||
{
|
||||
name: "Error for missing license id",
|
||||
data: []byte(`{}`),
|
||||
pass: false,
|
||||
error: errors.New("id key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for license id not being a valid string",
|
||||
data: []byte(`{"id": 10}`),
|
||||
pass: false,
|
||||
error: errors.New("id key is not a valid string"),
|
||||
},
|
||||
{
|
||||
name: "Error for missing license key",
|
||||
data: []byte(`{"id":"does-not-matter"}`),
|
||||
pass: false,
|
||||
error: errors.New("key key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid string license key",
|
||||
data: []byte(`{"id":"does-not-matter","key":10}`),
|
||||
pass: false,
|
||||
error: errors.New("key key is not a valid string"),
|
||||
},
|
||||
{
|
||||
name: "Error for missing license status",
|
||||
data: []byte(`{"id":"does-not-matter", "key": "does-not-matter","category":"FREE"}`),
|
||||
pass: false,
|
||||
error: errors.New("status key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid string license status",
|
||||
data: []byte(`{"id":"does-not-matter","key": "does-not-matter", "category":"FREE", "status":10}`),
|
||||
pass: false,
|
||||
error: errors.New("status key is not a valid string"),
|
||||
},
|
||||
{
|
||||
name: "Error for missing license plan",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`),
|
||||
pass: false,
|
||||
error: errors.New("plan key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid json license plan",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`),
|
||||
pass: false,
|
||||
error: errors.New("plan key is not a valid map[string]interface {}"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid license plan",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`),
|
||||
pass: false,
|
||||
error: errors.New("name key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Parse the entire license properly",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "ACTIVE",
|
||||
IsCurrent: false,
|
||||
Features: model.FeatureSet{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Fallback to basic plan if license status is invalid",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "INVALID",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameBasic,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "INVALID",
|
||||
IsCurrent: false,
|
||||
Features: model.FeatureSet{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "fallback states for validFrom and validUntil",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"valid_from": 1234.456,
|
||||
"valid_until": 5678.567,
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
ValidFrom: 1234,
|
||||
ValidUntil: 5678,
|
||||
Status: "ACTIVE",
|
||||
IsCurrent: false,
|
||||
Features: model.FeatureSet{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
var licensePayload map[string]interface{}
|
||||
err := json.Unmarshal(tc.data, &licensePayload)
|
||||
require.NoError(t, err)
|
||||
license, err := NewLicenseV3(licensePayload)
|
||||
if license != nil {
|
||||
license.Features = make(model.FeatureSet, 0)
|
||||
delete(license.Data, "features")
|
||||
}
|
||||
|
||||
if tc.pass {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, license)
|
||||
assert.Equal(t, tc.expected, license)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
assert.EqualError(t, err, tc.error.Error())
|
||||
require.Nil(t, license)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package model
|
||||
|
||||
type CreatePATRequestBody struct {
|
||||
Name string `json:"name"`
|
||||
Role string `json:"role"`
|
||||
ExpiresInDays int64 `json:"expiresInDays"`
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
const SSO = "SSO"
|
||||
const Basic = "BASIC_PLAN"
|
||||
const Enterprise = "ENTERPRISE_PLAN"
|
||||
|
||||
var (
|
||||
PlanNameEnterprise = "ENTERPRISE"
|
||||
PlanNameBasic = "BASIC"
|
||||
)
|
||||
|
||||
var (
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
|
||||
)
|
||||
|
||||
var (
|
||||
LicenseStatusInvalid = "INVALID"
|
||||
)
|
||||
|
||||
const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
const Gateway = "GATEWAY"
|
||||
const PremiumSupport = "PREMIUM_SUPPORT"
|
||||
|
||||
var BasicPlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
Name: SSO,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Gateway,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: PremiumSupport,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var EnterprisePlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
Name: SSO,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Onboarding,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: ChatSupport,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Gateway,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: PremiumSupport,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
package sso
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// SSOIdentity contains details of user received from SSO provider
|
||||
type SSOIdentity struct {
|
||||
UserID string
|
||||
Username string
|
||||
PreferredUsername string
|
||||
Email string
|
||||
EmailVerified bool
|
||||
ConnectorData []byte
|
||||
}
|
||||
|
||||
// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth
|
||||
// style redirect flow to determine user information.
|
||||
type OAuthCallbackProvider interface {
|
||||
// The initial URL user would be redirect to.
|
||||
// OAuth2 implementations support various scopes but we only need profile and user as
|
||||
// the roles are still being managed in SigNoz.
|
||||
BuildAuthURL(state string) (string, error)
|
||||
|
||||
// Handle the callback to the server (after login at oauth provider site)
|
||||
// and return a email identity.
|
||||
// At the moment we dont support auto signup flow (based on domain), so
|
||||
// the full identity (including name, group etc) is not required outside of the
|
||||
// connector
|
||||
HandleCallback(r *http.Request) (identity *SSOIdentity, err error)
|
||||
}
|
||||
@@ -14,9 +14,9 @@ import (
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
@@ -35,64 +35,68 @@ var (
|
||||
type Manager struct {
|
||||
clickhouseConn clickhouse.Conn
|
||||
|
||||
licenseRepo *license.Repo
|
||||
licenseService licensing.Licensing
|
||||
|
||||
scheduler *gocron.Scheduler
|
||||
|
||||
modelDao dao.ModelDao
|
||||
|
||||
zeus zeus.Zeus
|
||||
|
||||
orgGetter organization.Getter
|
||||
}
|
||||
|
||||
func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, zeus zeus.Zeus) (*Manager, error) {
|
||||
func New(licenseService licensing.Licensing, clickhouseConn clickhouse.Conn, zeus zeus.Zeus, orgGetter organization.Getter) (*Manager, error) {
|
||||
m := &Manager{
|
||||
clickhouseConn: clickhouseConn,
|
||||
licenseRepo: licenseRepo,
|
||||
licenseService: licenseService,
|
||||
scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC
|
||||
modelDao: modelDao,
|
||||
zeus: zeus,
|
||||
orgGetter: orgGetter,
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// start loads collects and exports any exported snapshot and starts the exporter
|
||||
func (lm *Manager) Start() error {
|
||||
func (lm *Manager) Start(ctx context.Context) error {
|
||||
// compares the locker and stateUnlocked if both are same lock is applied else returns error
|
||||
if !atomic.CompareAndSwapUint32(&locker, stateUnlocked, stateLocked) {
|
||||
return fmt.Errorf("usage exporter is locked")
|
||||
}
|
||||
|
||||
_, err := lm.scheduler.Do(func() { lm.UploadUsage() })
|
||||
// upload usage once when starting the service
|
||||
|
||||
_, err := lm.scheduler.Do(func() { lm.UploadUsage(ctx) })
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// upload usage once when starting the service
|
||||
lm.UploadUsage()
|
||||
|
||||
lm.UploadUsage(ctx)
|
||||
lm.scheduler.StartAsync()
|
||||
|
||||
return nil
|
||||
}
|
||||
func (lm *Manager) UploadUsage() {
|
||||
ctx := context.Background()
|
||||
// check if license is present or not
|
||||
license, err := lm.licenseRepo.GetActiveLicense(ctx)
|
||||
func (lm *Manager) UploadUsage(ctx context.Context) {
|
||||
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get active license", zap.Error(err))
|
||||
return
|
||||
}
|
||||
if license == nil {
|
||||
// we will not start the usage reporting if license is not present.
|
||||
zap.L().Info("no license present, skipping usage reporting")
|
||||
zap.L().Error("failed to get organizations", zap.Error(err))
|
||||
return
|
||||
}
|
||||
for _, organization := range organizations {
|
||||
// check if license is present or not
|
||||
license, err := lm.licenseService.GetActive(ctx, organization.ID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get active license", zap.Error(err))
|
||||
return
|
||||
}
|
||||
if license == nil {
|
||||
// we will not start the usage reporting if license is not present.
|
||||
zap.L().Info("no license present, skipping usage reporting")
|
||||
return
|
||||
}
|
||||
|
||||
usages := []model.UsageDB{}
|
||||
usages := []model.UsageDB{}
|
||||
|
||||
// get usage from clickhouse
|
||||
dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"}
|
||||
query := `
|
||||
// get usage from clickhouse
|
||||
dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"}
|
||||
query := `
|
||||
SELECT tenant, collector_id, exporter_id, timestamp, data
|
||||
FROM %s.distributed_usage as u1
|
||||
GLOBAL INNER JOIN
|
||||
@@ -107,76 +111,76 @@ func (lm *Manager) UploadUsage() {
|
||||
order by timestamp
|
||||
`
|
||||
|
||||
for _, db := range dbs {
|
||||
dbusages := []model.UsageDB{}
|
||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
return
|
||||
for _, db := range dbs {
|
||||
dbusages := []model.UsageDB{}
|
||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
for _, u := range dbusages {
|
||||
u.Type = db
|
||||
usages = append(usages, u)
|
||||
}
|
||||
}
|
||||
for _, u := range dbusages {
|
||||
u.Type = db
|
||||
usages = append(usages, u)
|
||||
}
|
||||
}
|
||||
|
||||
if len(usages) <= 0 {
|
||||
zap.L().Info("no snapshots to upload, skipping.")
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("uploading usage data")
|
||||
|
||||
usagesPayload := []model.Usage{}
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
if err != nil {
|
||||
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
|
||||
if len(usages) <= 0 {
|
||||
zap.L().Info("no snapshots to upload, skipping.")
|
||||
return
|
||||
}
|
||||
|
||||
usageData := model.Usage{}
|
||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||
if err != nil {
|
||||
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
zap.L().Info("uploading usage data")
|
||||
|
||||
usagesPayload := []model.Usage{}
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
if err != nil {
|
||||
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
usageData := model.Usage{}
|
||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||
if err != nil {
|
||||
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
usageData.CollectorID = usage.CollectorID
|
||||
usageData.ExporterID = usage.ExporterID
|
||||
usageData.Type = usage.Type
|
||||
usageData.Tenant = "default"
|
||||
usageData.OrgName = "default"
|
||||
usageData.TenantId = "default"
|
||||
usagesPayload = append(usagesPayload, usageData)
|
||||
}
|
||||
|
||||
key, _ := uuid.Parse(license.Key)
|
||||
payload := model.UsagePayload{
|
||||
LicenseKey: key,
|
||||
Usage: usagesPayload,
|
||||
}
|
||||
|
||||
body, errv2 := json.Marshal(payload)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
|
||||
return
|
||||
}
|
||||
|
||||
usageData.CollectorID = usage.CollectorID
|
||||
usageData.ExporterID = usage.ExporterID
|
||||
usageData.Type = usage.Type
|
||||
usageData.Tenant = "default"
|
||||
usageData.OrgName = "default"
|
||||
usageData.TenantId = "default"
|
||||
usagesPayload = append(usagesPayload, usageData)
|
||||
}
|
||||
|
||||
key, _ := uuid.Parse(license.Key)
|
||||
payload := model.UsagePayload{
|
||||
LicenseKey: key,
|
||||
Usage: usagesPayload,
|
||||
}
|
||||
|
||||
body, errv2 := json.Marshal(payload)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
|
||||
return
|
||||
}
|
||||
|
||||
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return
|
||||
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (lm *Manager) Stop() {
|
||||
func (lm *Manager) Stop(ctx context.Context) {
|
||||
lm.scheduler.Stop()
|
||||
|
||||
zap.L().Info("sending usage data before shutting down")
|
||||
// send usage before shutting down
|
||||
lm.UploadUsage()
|
||||
|
||||
lm.UploadUsage(ctx)
|
||||
atomic.StoreUint32(&locker, stateUnlocked)
|
||||
}
|
||||
|
||||
@@ -19,12 +19,16 @@ var (
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
UserNoCascade = "user_no_cascade"
|
||||
FactorPassword = "factor_password"
|
||||
CloudIntegration = "cloud_integration"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
|
||||
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
)
|
||||
|
||||
@@ -264,6 +268,10 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
fkReferences = append(fkReferences, OrgReference)
|
||||
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||
fkReferences = append(fkReferences, UserReference)
|
||||
} else if reference == UserNoCascade && !slices.Contains(fkReferences, UserReferenceNoCascade) {
|
||||
fkReferences = append(fkReferences, UserReferenceNoCascade)
|
||||
} else if reference == FactorPassword && !slices.Contains(fkReferences, FactorPasswordReference) {
|
||||
fkReferences = append(fkReferences, FactorPasswordReference)
|
||||
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||
}
|
||||
|
||||
@@ -106,3 +106,7 @@ func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, for
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (dialect *dialect) ToggleForeignKeyConstraint(ctx context.Context, bun *bun.DB, enable bool) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type GettablePAT struct {
|
||||
CreatedByUser PatUser `json:"createdByUser"`
|
||||
UpdatedByUser PatUser `json:"updatedByUser"`
|
||||
|
||||
StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
type PatUser struct {
|
||||
types.User
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
|
||||
return GettablePAT{
|
||||
StorablePersonalAccessToken: NewStorablePersonalAccessToken(name, role, userID, expiresAt),
|
||||
}
|
||||
}
|
||||
|
||||
type StorablePersonalAccessToken struct {
|
||||
bun.BaseModel `bun:"table:personal_access_token"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
|
||||
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
|
||||
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
|
||||
Token string `json:"token" bun:"token,type:text,notnull,unique"`
|
||||
Name string `json:"name" bun:"name,type:text,notnull"`
|
||||
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
|
||||
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
|
||||
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
|
||||
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
|
||||
}
|
||||
|
||||
func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64) StorablePersonalAccessToken {
|
||||
now := time.Now()
|
||||
if expiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
expiresAt = now.Unix() + (expiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
|
||||
return StorablePersonalAccessToken{
|
||||
Token: encodedToken,
|
||||
Name: name,
|
||||
Role: role,
|
||||
UserID: userID,
|
||||
ExpiresAt: expiresAt,
|
||||
LastUsed: 0,
|
||||
Revoked: false,
|
||||
UpdatedByUserID: "",
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
},
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
module.exports = {
|
||||
ignorePatterns: ['src/antlr-parser/*.ts'],
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
@@ -110,6 +111,8 @@ module.exports = {
|
||||
// eslint rules need to remove
|
||||
'@typescript-eslint/no-shadow': 'off',
|
||||
'import/no-cycle': 'off',
|
||||
// https://typescript-eslint.io/rules/consistent-return/ check the warning for details
|
||||
'consistent-return': 'off',
|
||||
'prettier/prettier': [
|
||||
'error',
|
||||
{},
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
# Ignore artifacts:
|
||||
build
|
||||
coverage
|
||||
public/
|
||||
|
||||
# Ignore all MD files:
|
||||
**/*.md
|
||||
**/*.md
|
||||
|
||||
# Ignore all JSON files:
|
||||
**/*.json
|
||||
|
||||
154
frontend/docs/QuerySearch.md
Normal file
154
frontend/docs/QuerySearch.md
Normal file
@@ -0,0 +1,154 @@
|
||||
# QuerySearch Component Documentation
|
||||
|
||||
## Overview
|
||||
The QuerySearch component is a sophisticated query builder interface that allows users to construct complex search queries with real-time validation and autocomplete functionality.
|
||||
|
||||
## Dependencies
|
||||
```typescript
|
||||
// Core UI
|
||||
import { Card, Collapse, Space, Tag, Typography } from 'antd';
|
||||
|
||||
// Code Editor
|
||||
import {
|
||||
autocompletion,
|
||||
CompletionContext,
|
||||
CompletionResult,
|
||||
startCompletion,
|
||||
} from '@codemirror/autocomplete';
|
||||
import { javascript } from '@codemirror/lang-javascript';
|
||||
import { ViewPlugin, ViewUpdate } from '@codemirror/view';
|
||||
import { copilot } from '@uiw/codemirror-theme-copilot';
|
||||
import CodeMirror, { EditorView, Extension } from '@uiw/react-codemirror';
|
||||
|
||||
// Custom Hooks and Utilities
|
||||
import { useGetQueryKeySuggestions } from 'hooks/querySuggestions/useGetQueryKeySuggestions';
|
||||
import { getValueSuggestions } from 'api/querySuggestions/getValueSuggestion';
|
||||
import { queryOperatorSuggestions, validateQuery } from 'utils/antlrQueryUtils';
|
||||
import { getQueryContextAtCursor } from 'utils/queryContextUtils';
|
||||
```
|
||||
|
||||
## Key Features
|
||||
1. Real-time query validation
|
||||
2. Context-aware autocompletion
|
||||
3. Support for various query operators (=, !=, IN, LIKE, etc.)
|
||||
4. Support for complex conditions with AND/OR operators
|
||||
5. Support for functions (HAS, HASANY, HASALL, HASNONE)
|
||||
6. Support for parentheses and nested conditions
|
||||
7. Query examples for common use cases
|
||||
|
||||
## State Management
|
||||
```typescript
|
||||
const [query, setQuery] = useState<string>('');
|
||||
const [valueSuggestions, setValueSuggestions] = useState<any[]>([]);
|
||||
const [activeKey, setActiveKey] = useState<string>('');
|
||||
const [isLoadingSuggestions, setIsLoadingSuggestions] = useState(false);
|
||||
const [queryContext, setQueryContext] = useState<IQueryContext | null>(null);
|
||||
const [validation, setValidation] = useState<IValidationResult>({...});
|
||||
const [editingMode, setEditingMode] = useState<'key' | 'operator' | 'value' | 'conjunction' | 'function' | 'parenthesis' | 'bracketList' | null>(null);
|
||||
```
|
||||
|
||||
## Core Functions
|
||||
|
||||
### 1. Autocomplete Handler
|
||||
```typescript
|
||||
function myCompletions(context: CompletionContext): CompletionResult | null {
|
||||
// Handles autocomplete suggestions based on context
|
||||
// Supports different contexts: key, operator, value, function, etc.
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Value Suggestions Fetcher
|
||||
```typescript
|
||||
const fetchValueSuggestions = useCallback(
|
||||
async (key: string): Promise<void> => {
|
||||
// Fetches value suggestions for a given key
|
||||
// Handles loading states and error cases
|
||||
},
|
||||
[activeKey, isLoadingSuggestions],
|
||||
);
|
||||
```
|
||||
|
||||
### 3. Query Change Handler
|
||||
```typescript
|
||||
const handleQueryChange = useCallback(async (newQuery: string) => {
|
||||
// Updates query and validates it
|
||||
// Handles validation errors
|
||||
}, []);
|
||||
```
|
||||
|
||||
## Query Context Types
|
||||
1. Key context: When editing a field name
|
||||
2. Operator context: When selecting an operator
|
||||
3. Value context: When entering a value
|
||||
4. Conjunction context: When using AND/OR
|
||||
5. Function context: When using functions
|
||||
6. Parenthesis context: When using parentheses
|
||||
7. Bracket list context: When using IN operator
|
||||
|
||||
## Example Queries
|
||||
```typescript
|
||||
const queryExamples = [
|
||||
{ label: 'Basic Query', query: "status = 'error'" },
|
||||
{ label: 'Multiple Conditions', query: "status = 'error' AND service = 'frontend'" },
|
||||
{ label: 'IN Operator', query: "status IN ['error', 'warning']" },
|
||||
{ label: 'Function Usage', query: "HAS(service, 'frontend')" },
|
||||
{ label: 'Numeric Comparison', query: 'duration > 1000' },
|
||||
// ... more examples
|
||||
];
|
||||
```
|
||||
|
||||
## Performance Optimizations
|
||||
1. Uses `useCallback` for memoized functions
|
||||
2. Tracks component mount state to prevent updates after unmount
|
||||
3. Debounces suggestion fetching
|
||||
4. Caches key suggestions
|
||||
|
||||
## Error Handling
|
||||
```typescript
|
||||
try {
|
||||
const validationResponse = validateQuery(newQuery);
|
||||
setValidation(validationResponse);
|
||||
} catch (error) {
|
||||
setValidation({
|
||||
isValid: false,
|
||||
message: 'Failed to process query',
|
||||
errors: [error as IDetailedError],
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
## Usage Example
|
||||
```typescript
|
||||
<QuerySearch />
|
||||
```
|
||||
|
||||
## Styling
|
||||
- Uses SCSS for styling
|
||||
- Custom classes for different components
|
||||
- Theme integration with CodeMirror
|
||||
|
||||
## Best Practices
|
||||
1. Always validate queries before submission
|
||||
2. Handle loading states appropriately
|
||||
3. Provide clear error messages
|
||||
4. Use appropriate operators for different data types
|
||||
5. Consider performance implications of complex queries
|
||||
|
||||
## Common Issues and Solutions
|
||||
1. Query validation errors
|
||||
- Check syntax and operator usage
|
||||
- Verify data types match operator requirements
|
||||
2. Performance issues
|
||||
- Optimize suggestion fetching
|
||||
- Cache frequently used values
|
||||
3. UI/UX issues
|
||||
- Ensure clear error messages
|
||||
- Provide helpful suggestions
|
||||
- Show appropriate loading states
|
||||
|
||||
## Future Improvements
|
||||
1. Add more query examples
|
||||
2. Enhance error messages
|
||||
3. Improve performance for large datasets
|
||||
4. Add more operator support
|
||||
5. Enhance UI/UX features
|
||||
@@ -1,7 +1,7 @@
|
||||
NODE_ENV="development"
|
||||
BUNDLE_ANALYSER="true"
|
||||
FRONTEND_API_ENDPOINT="http://localhost:8080/"
|
||||
INTERCOM_APP_ID="intercom-app-id"
|
||||
PYLON_APP_ID="pylon-app-id"
|
||||
APPCUES_APP_ID="appcess-app-id"
|
||||
|
||||
PLAYWRIGHT_TEST_BASE_URL="http://localhost:8080"
|
||||
CI="1"
|
||||
@@ -15,6 +15,7 @@ const config: Config.InitialOptions = {
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
'ts-jest': {
|
||||
useESM: true,
|
||||
isolatedModules: true,
|
||||
},
|
||||
},
|
||||
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],
|
||||
@@ -30,11 +31,6 @@ const config: Config.InitialOptions = {
|
||||
testPathIgnorePatterns: ['/node_modules/', '/public/'],
|
||||
moduleDirectories: ['node_modules', 'src'],
|
||||
testEnvironment: 'jest-environment-jsdom',
|
||||
testEnvironmentOptions: {
|
||||
'jest-playwright': {
|
||||
browsers: ['chromium', 'firefox', 'webkit'],
|
||||
},
|
||||
},
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
statements: 80,
|
||||
|
||||
@@ -15,10 +15,6 @@
|
||||
"jest:coverage": "jest --coverage",
|
||||
"jest:watch": "jest --watch",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
|
||||
"playwright": "NODE_ENV=testing playwright test --config=./playwright.config.ts",
|
||||
"playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium",
|
||||
"playwright:codegen:local": "playwright codegen http://localhost:3301",
|
||||
"playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json",
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
@@ -32,9 +28,12 @@
|
||||
"dependencies": {
|
||||
"@ant-design/colors": "6.0.0",
|
||||
"@ant-design/icons": "4.8.0",
|
||||
"@codemirror/autocomplete": "6.18.6",
|
||||
"@codemirror/lang-javascript": "6.2.3",
|
||||
"@dnd-kit/core": "6.1.0",
|
||||
"@dnd-kit/modifiers": "7.0.0",
|
||||
"@dnd-kit/sortable": "8.0.0",
|
||||
"@dnd-kit/utilities": "3.2.2",
|
||||
"@grafana/data": "^11.2.3",
|
||||
"@mdx-js/loader": "2.3.0",
|
||||
"@mdx-js/react": "2.3.0",
|
||||
@@ -46,6 +45,8 @@
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
"@tanstack/react-virtual": "3.11.2",
|
||||
"@uiw/codemirror-theme-copilot": "4.23.11",
|
||||
"@uiw/react-codemirror": "4.23.10",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
"@visx/group": "3.3.0",
|
||||
"@visx/hierarchy": "3.12.0",
|
||||
@@ -56,6 +57,7 @@
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"axios": "1.8.2",
|
||||
"antlr4": "4.13.2",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
@@ -82,6 +84,7 @@
|
||||
"history": "4.10.1",
|
||||
"html-webpack-plugin": "5.5.0",
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"http-status-codes": "2.3.0",
|
||||
"i18next": "^21.6.12",
|
||||
"i18next-browser-languagedetector": "^6.1.3",
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
@@ -90,7 +93,7 @@
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "0.427.0",
|
||||
"lucide-react": "0.498.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"motion": "12.4.13",
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
@@ -163,7 +166,6 @@
|
||||
"@commitlint/config-conventional": "^16.2.4",
|
||||
"@faker-js/faker": "9.3.0",
|
||||
"@jest/globals": "^27.5.1",
|
||||
"@playwright/test": "^1.22.0",
|
||||
"@testing-library/jest-dom": "5.16.5",
|
||||
"@testing-library/react": "13.4.0",
|
||||
"@testing-library/user-event": "14.4.3",
|
||||
@@ -257,6 +259,7 @@
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"cross-spawn": "7.0.5",
|
||||
"cookie": "^0.7.1",
|
||||
"serialize-javascript": "6.0.2"
|
||||
"serialize-javascript": "6.0.2",
|
||||
"prismjs": "1.30.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import { PlaywrightTestConfig } from '@playwright/test';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const config: PlaywrightTestConfig = {
|
||||
forbidOnly: !!process.env.CI,
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
preserveOutput: 'always',
|
||||
name: 'Signoz',
|
||||
testDir: './tests',
|
||||
use: {
|
||||
trace: 'retain-on-failure',
|
||||
baseURL: process.env.PLAYWRIGHT_TEST_BASE_URL || 'http://localhost:3301',
|
||||
},
|
||||
updateSnapshots: 'all',
|
||||
fullyParallel: !!process.env.CI,
|
||||
quiet: false,
|
||||
testMatch: ['**/*.spec.ts'],
|
||||
reporter: process.env.CI ? 'github' : 'list',
|
||||
};
|
||||
|
||||
export default config;
|
||||
2
frontend/public/Icons/empty-funnel-icon.svg
Normal file
2
frontend/public/Icons/empty-funnel-icon.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 5.9 KiB |
1
frontend/public/Icons/funnel-add.svg
Normal file
1
frontend/public/Icons/funnel-add.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="m12.192 3.18-1.167 2.33-.583 1.165M7.31 12.74a.583.583 0 0 1-.835-.24L1.808 3.179"/><path d="M7 1.167c2.9 0 5.25.783 5.25 1.75 0 .966-2.35 1.75-5.25 1.75s-5.25-.784-5.25-1.75c0-.967 2.35-1.75 5.25-1.75ZM8.75 10.5h3.5M10.5 12.25v-3.5"/></g></svg>
|
||||
|
After Width: | Height: | Size: 418 B |
1
frontend/public/Icons/solid-info-circle.svg
Normal file
1
frontend/public/Icons/solid-info-circle.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#a)" stroke-linecap="round" stroke-linejoin="round"><path d="M8 14.666A6.667 6.667 0 1 0 8 1.333a6.667 6.667 0 0 0 0 13.333Z" fill="#C0C1C3" stroke="#C0C1C3" stroke-width="2"/><path d="M8 11.333v-4H6.333M8 4.667h.007" stroke="#121317" stroke-width="1.333"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>
|
||||
|
After Width: | Height: | Size: 439 B |
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via chat support or "
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via chat support or "
|
||||
}
|
||||
|
||||
@@ -69,5 +69,5 @@
|
||||
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | API Monitoring"
|
||||
"API_MONITORING": "SigNoz | External APIs"
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ done
|
||||
# create temporary tsconfig which includes only passed files
|
||||
str="{
|
||||
\"extends\": \"./tsconfig.json\",
|
||||
\"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./conf/default.conf\",\"./public\",\"./tests\",\"./playwright.config.ts\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files]
|
||||
\"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./public\",\"./tests\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files]
|
||||
}"
|
||||
echo $str > tsconfig.tmp
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import getOrgUser from 'api/user/getOrgUser';
|
||||
import getAll from 'api/v1/user/get';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
@@ -11,8 +11,11 @@ import { useAppContext } from 'providers/App/App';
|
||||
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { matchPath, useLocation } from 'react-router-dom';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
|
||||
import { Organization } from 'types/api/user/getOrganization';
|
||||
import { UserResponse } from 'types/api/user/getUser';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { routePermission } from 'utils/permission';
|
||||
|
||||
@@ -33,8 +36,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
user,
|
||||
isLoggedIn: isLoggedInState,
|
||||
isFetchingOrgPreferences,
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicenseV3,
|
||||
activeLicense,
|
||||
isFetchingActiveLicense,
|
||||
trialInfo,
|
||||
featureFlags,
|
||||
} = useAppContext();
|
||||
@@ -58,12 +61,13 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
|
||||
const [orgData, setOrgData] = useState<Organization | undefined>(undefined);
|
||||
|
||||
const { data: orgUsers, isFetching: isFetchingOrgUsers } = useQuery({
|
||||
const { data: usersData, isFetching: isFetchingUsers } = useQuery<
|
||||
SuccessResponseV2<UserResponse[]> | undefined,
|
||||
APIError
|
||||
>({
|
||||
queryFn: () => {
|
||||
if (orgData && orgData.id !== undefined) {
|
||||
return getOrgUser({
|
||||
orgId: orgData.id,
|
||||
});
|
||||
return getAll();
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
@@ -72,23 +76,23 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
});
|
||||
|
||||
const checkFirstTimeUser = useCallback((): boolean => {
|
||||
const users = orgUsers?.payload || [];
|
||||
const users = usersData?.data || [];
|
||||
|
||||
const remainingUsers = users.filter(
|
||||
const remainingUsers = (Array.isArray(users) ? users : []).filter(
|
||||
(user) => user.email !== 'admin@signoz.cloud',
|
||||
);
|
||||
|
||||
return remainingUsers.length === 1;
|
||||
}, [orgUsers?.payload]);
|
||||
}, [usersData?.data]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
isCloudUserVal &&
|
||||
!isFetchingOrgPreferences &&
|
||||
orgPreferences &&
|
||||
!isFetchingOrgUsers &&
|
||||
orgUsers &&
|
||||
orgUsers.payload
|
||||
!isFetchingUsers &&
|
||||
usersData &&
|
||||
usersData.data
|
||||
) {
|
||||
const isOnboardingComplete = orgPreferences?.find(
|
||||
(preference: Record<string, any>) => preference.key === 'ORG_ONBOARDING',
|
||||
@@ -108,9 +112,9 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
checkFirstTimeUser,
|
||||
isCloudUserVal,
|
||||
isFetchingOrgPreferences,
|
||||
isFetchingOrgUsers,
|
||||
isFetchingUsers,
|
||||
orgPreferences,
|
||||
orgUsers,
|
||||
usersData,
|
||||
pathname,
|
||||
]);
|
||||
|
||||
@@ -141,16 +145,16 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
|
||||
if (!isFetchingActiveLicense && activeLicense) {
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
|
||||
const isTerminated = activeLicenseV3.state === LicenseState.TERMINATED;
|
||||
const isExpired = activeLicenseV3.state === LicenseState.EXPIRED;
|
||||
const isCancelled = activeLicenseV3.state === LicenseState.CANCELLED;
|
||||
const isTerminated = activeLicense.state === LicenseState.TERMINATED;
|
||||
const isExpired = activeLicense.state === LicenseState.EXPIRED;
|
||||
const isCancelled = activeLicense.state === LicenseState.CANCELLED;
|
||||
|
||||
const isWorkspaceAccessRestricted = isTerminated || isExpired || isCancelled;
|
||||
|
||||
const { platform } = activeLicenseV3;
|
||||
const { platform } = activeLicense;
|
||||
|
||||
if (
|
||||
isWorkspaceAccessRestricted &&
|
||||
@@ -160,26 +164,26 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
navigateToWorkSpaceAccessRestricted(currentRoute);
|
||||
}
|
||||
}
|
||||
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
|
||||
}, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3) {
|
||||
if (!isFetchingActiveLicense) {
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
const shouldBlockWorkspace = trialInfo?.workSpaceBlock;
|
||||
|
||||
if (
|
||||
shouldBlockWorkspace &&
|
||||
currentRoute &&
|
||||
activeLicenseV3?.platform === LicensePlatform.CLOUD
|
||||
activeLicense?.platform === LicensePlatform.CLOUD
|
||||
) {
|
||||
navigateToWorkSpaceBlocked(currentRoute);
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
isFetchingActiveLicenseV3,
|
||||
isFetchingActiveLicense,
|
||||
trialInfo?.workSpaceBlock,
|
||||
activeLicenseV3?.platform,
|
||||
activeLicense?.platform,
|
||||
mapRoutes,
|
||||
pathname,
|
||||
]);
|
||||
@@ -193,20 +197,20 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
|
||||
if (!isFetchingActiveLicense && activeLicense) {
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
const shouldSuspendWorkspace =
|
||||
activeLicenseV3.state === LicenseState.DEFAULTED;
|
||||
activeLicense.state === LicenseState.DEFAULTED;
|
||||
|
||||
if (
|
||||
shouldSuspendWorkspace &&
|
||||
currentRoute &&
|
||||
activeLicenseV3.platform === LicensePlatform.CLOUD
|
||||
activeLicense.platform === LicensePlatform.CLOUD
|
||||
) {
|
||||
navigateToWorkSpaceSuspended(currentRoute);
|
||||
}
|
||||
}
|
||||
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
|
||||
}, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
if (org && org.length > 0 && org[0].id !== undefined) {
|
||||
|
||||
@@ -5,6 +5,7 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
@@ -12,9 +13,9 @@ import AppLayout from 'container/AppLayout';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import { StatusCodes } from 'http-status-codes';
|
||||
import history from 'lib/history';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import posthog from 'posthog-js';
|
||||
@@ -22,10 +23,12 @@ import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { IUser } from 'providers/App/types';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { LicenseStatus } from 'types/api/licensesV3/getActive';
|
||||
import { Userpilot } from 'userpilot';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
@@ -40,14 +43,13 @@ import defaultRoutes, {
|
||||
function App(): JSX.Element {
|
||||
const themeConfig = useThemeConfig();
|
||||
const {
|
||||
licenses,
|
||||
user,
|
||||
isFetchingUser,
|
||||
isFetchingLicenses,
|
||||
isFetchingFeatureFlags,
|
||||
trialInfo,
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicenseV3,
|
||||
activeLicense,
|
||||
isFetchingActiveLicense,
|
||||
activeLicenseFetchError,
|
||||
userFetchError,
|
||||
featureFlagsFetchError,
|
||||
isLoggedIn: isLoggedInState,
|
||||
@@ -65,18 +67,18 @@ function App(): JSX.Element {
|
||||
const enableAnalytics = useCallback(
|
||||
(user: IUser): void => {
|
||||
// wait for the required data to be loaded before doing init for anything!
|
||||
if (!isFetchingActiveLicenseV3 && activeLicenseV3 && org) {
|
||||
if (!isFetchingActiveLicense && activeLicense && org) {
|
||||
const orgName =
|
||||
org && Array.isArray(org) && org.length > 0 ? org[0].displayName : '';
|
||||
|
||||
const { name, email, role } = user;
|
||||
const { displayName, email, role } = user;
|
||||
|
||||
const domain = extractDomain(email);
|
||||
const hostNameParts = hostname.split('.');
|
||||
|
||||
const identifyPayload = {
|
||||
email,
|
||||
name,
|
||||
name: displayName,
|
||||
company_name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
@@ -102,10 +104,24 @@ function App(): JSX.Element {
|
||||
if (domain) {
|
||||
logEvent('Domain Identified', groupTraits, 'group');
|
||||
}
|
||||
if (window && window.Appcues) {
|
||||
window.Appcues.identify(email, {
|
||||
name: displayName,
|
||||
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
|
||||
companyName: orgName,
|
||||
email,
|
||||
paidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
}
|
||||
|
||||
Userpilot.identify(email, {
|
||||
email,
|
||||
name,
|
||||
name: displayName,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
@@ -117,7 +133,7 @@ function App(): JSX.Element {
|
||||
|
||||
posthog?.identify(email, {
|
||||
email,
|
||||
name,
|
||||
name: displayName,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
@@ -136,24 +152,12 @@ function App(): JSX.Element {
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
|
||||
if (
|
||||
window.cioanalytics &&
|
||||
typeof window.cioanalytics.identify === 'function'
|
||||
) {
|
||||
window.cioanalytics.reset();
|
||||
window.cioanalytics.identify(email, {
|
||||
name: user.name,
|
||||
email,
|
||||
role: user.role,
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
hostname,
|
||||
isFetchingActiveLicenseV3,
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicense,
|
||||
activeLicense,
|
||||
org,
|
||||
trialInfo?.trialConvertedToSubscription,
|
||||
],
|
||||
@@ -162,18 +166,19 @@ function App(): JSX.Element {
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
if (
|
||||
!isFetchingLicenses &&
|
||||
licenses &&
|
||||
!isFetchingActiveLicense &&
|
||||
(activeLicense || activeLicenseFetchError) &&
|
||||
!isFetchingUser &&
|
||||
user &&
|
||||
!!user.email
|
||||
) {
|
||||
// either the active API returns error with 404 or 501 and if it returns a terminated license means it's on basic plan
|
||||
const isOnBasicPlan =
|
||||
licenses.licenses?.some(
|
||||
(license) =>
|
||||
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
|
||||
) || licenses.licenses === null;
|
||||
|
||||
(activeLicenseFetchError &&
|
||||
[StatusCodes.NOT_FOUND, StatusCodes.NOT_IMPLEMENTED].includes(
|
||||
activeLicenseFetchError?.getHttpStatusCode(),
|
||||
)) ||
|
||||
(activeLicense?.status && activeLicense.status === LicenseStatus.INVALID);
|
||||
const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER);
|
||||
|
||||
if (isLoggedInState && user && user.id && user.email && !isIdentifiedUser) {
|
||||
@@ -188,6 +193,10 @@ function App(): JSX.Element {
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
(route) => route?.path !== ROUTES.BILLING,
|
||||
);
|
||||
|
||||
if (isEnterpriseSelfHostedUser) {
|
||||
updatedRoutes.push(LIST_LICENSES);
|
||||
}
|
||||
}
|
||||
// always add support route for cloud users
|
||||
updatedRoutes = [...updatedRoutes, SUPPORT_ROUTE];
|
||||
@@ -203,22 +212,23 @@ function App(): JSX.Element {
|
||||
}, [
|
||||
isLoggedInState,
|
||||
user,
|
||||
licenses,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isFetchingLicenses,
|
||||
isFetchingActiveLicense,
|
||||
isFetchingUser,
|
||||
activeLicense,
|
||||
activeLicenseFetchError,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname === ROUTES.ONBOARDING) {
|
||||
window.Intercom('update', {
|
||||
hide_default_launcher: true,
|
||||
});
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
window.Pylon('hideChatBubble');
|
||||
} else {
|
||||
window.Intercom('update', {
|
||||
hide_default_launcher: false,
|
||||
});
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
window.Pylon('showChatBubble');
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
@@ -230,8 +240,7 @@ function App(): JSX.Element {
|
||||
if (
|
||||
!isFetchingFeatureFlags &&
|
||||
(featureFlags || featureFlagsFetchError) &&
|
||||
licenses &&
|
||||
activeLicenseV3 &&
|
||||
activeLicense &&
|
||||
trialInfo
|
||||
) {
|
||||
let isChatSupportEnabled = false;
|
||||
@@ -254,11 +263,13 @@ function App(): JSX.Element {
|
||||
!showAddCreditCardModal &&
|
||||
(isCloudUser || isEnterpriseSelfHostedUser)
|
||||
) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
name: user?.name || '',
|
||||
});
|
||||
window.pylon = {
|
||||
chat_settings: {
|
||||
app_id: process.env.PYLON_APP_ID,
|
||||
email: user.email,
|
||||
name: user.displayName,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}, [
|
||||
@@ -269,8 +280,7 @@ function App(): JSX.Element {
|
||||
featureFlags,
|
||||
isFetchingFeatureFlags,
|
||||
featureFlagsFetchError,
|
||||
licenses,
|
||||
activeLicenseV3,
|
||||
activeLicense,
|
||||
trialInfo,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
@@ -321,10 +331,6 @@ function App(): JSX.Element {
|
||||
} else {
|
||||
posthog.reset();
|
||||
Sentry.close();
|
||||
|
||||
if (window.cioanalytics && typeof window.cioanalytics.reset === 'function') {
|
||||
window.cioanalytics.reset();
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
@@ -332,7 +338,7 @@ function App(): JSX.Element {
|
||||
// if the user is in logged in state
|
||||
if (isLoggedInState) {
|
||||
// if the setup calls are loading then return a spinner
|
||||
if (isFetchingLicenses || isFetchingUser || isFetchingFeatureFlags) {
|
||||
if (isFetchingActiveLicense || isFetchingUser || isFetchingFeatureFlags) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
|
||||
@@ -344,7 +350,11 @@ function App(): JSX.Element {
|
||||
}
|
||||
|
||||
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
|
||||
if ((!licenses || !user.email || !featureFlags) && !userFetchError) {
|
||||
if (
|
||||
(!activeLicense || !user.email || !featureFlags) &&
|
||||
!userFetchError &&
|
||||
!activeLicenseFetchError
|
||||
) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
}
|
||||
@@ -354,35 +364,38 @@ function App(): JSX.Element {
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<UserpilotRouteTracker />
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
<ErrorModalProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</ErrorModalProvider>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
|
||||
@@ -47,9 +47,10 @@ export const TracesFunnels = Loadable(
|
||||
import(/* webpackChunkName: "Traces Funnels" */ 'pages/TracesModulePage'),
|
||||
);
|
||||
export const TracesFunnelDetails = Loadable(
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "Traces Funnel Details" */ 'pages/TracesFunnelDetails'
|
||||
/* webpackChunkName: "Traces Funnel Details" */ 'pages/TracesModulePage'
|
||||
),
|
||||
);
|
||||
|
||||
|
||||
183
frontend/src/antlr-parser/FilterQuery.g4
Normal file
183
frontend/src/antlr-parser/FilterQuery.g4
Normal file
@@ -0,0 +1,183 @@
|
||||
grammar FilterQuery;
|
||||
|
||||
/*
|
||||
* Parser Rules
|
||||
*/
|
||||
|
||||
query
|
||||
: expression ( (AND | OR) expression | expression )*
|
||||
EOF
|
||||
;
|
||||
|
||||
expression
|
||||
: orExpression
|
||||
;
|
||||
|
||||
orExpression
|
||||
: andExpression ( OR andExpression )*
|
||||
;
|
||||
|
||||
andExpression
|
||||
: unaryExpression ( AND unaryExpression | unaryExpression )*
|
||||
;
|
||||
|
||||
unaryExpression
|
||||
: NOT? primary
|
||||
;
|
||||
|
||||
primary
|
||||
: LPAREN orExpression RPAREN
|
||||
| comparison
|
||||
| functionCall
|
||||
| fullText
|
||||
;
|
||||
|
||||
comparison
|
||||
: key EQUALS value
|
||||
| key (NOT_EQUALS | NEQ) value
|
||||
| key LT value
|
||||
| key LE value
|
||||
| key GT value
|
||||
| key GE value
|
||||
| key (LIKE | ILIKE) value
|
||||
| key (NOT_LIKE | NOT_ILIKE) value
|
||||
| key BETWEEN value AND value
|
||||
| key NOT_BETWEEN value AND value
|
||||
| key inClause
|
||||
| key notInClause
|
||||
| key EXISTS
|
||||
| key NOT_EXISTS
|
||||
| key REGEXP value
|
||||
| key NOT_REGEXP value
|
||||
| key CONTAINS value
|
||||
| key NOT_CONTAINS value
|
||||
;
|
||||
|
||||
inClause
|
||||
: IN LPAREN valueList RPAREN
|
||||
| IN LBRACK valueList RBRACK
|
||||
;
|
||||
|
||||
notInClause
|
||||
: NOT_IN LPAREN valueList RPAREN
|
||||
| NOT_IN LBRACK valueList RBRACK
|
||||
;
|
||||
|
||||
valueList
|
||||
: value ( COMMA value )*
|
||||
;
|
||||
|
||||
fullText
|
||||
: QUOTED_TEXT
|
||||
;
|
||||
|
||||
functionCall
|
||||
: (HAS | HASANY | HASALL | HASNONE) LPAREN functionParamList RPAREN
|
||||
;
|
||||
|
||||
functionParamList
|
||||
: functionParam ( COMMA functionParam )*
|
||||
;
|
||||
|
||||
functionParam
|
||||
: key
|
||||
| value
|
||||
| array
|
||||
;
|
||||
|
||||
array
|
||||
: LBRACK valueList RBRACK
|
||||
;
|
||||
|
||||
value
|
||||
: QUOTED_TEXT
|
||||
| NUMBER
|
||||
| BOOL
|
||||
;
|
||||
|
||||
key
|
||||
: KEY
|
||||
;
|
||||
|
||||
/*
|
||||
* Lexer Rules
|
||||
*/
|
||||
|
||||
// Common punctuation / symbols
|
||||
LPAREN : '(' ;
|
||||
RPAREN : ')' ;
|
||||
LBRACK : '[' ;
|
||||
RBRACK : ']' ;
|
||||
COMMA : ',' ;
|
||||
|
||||
EQUALS : '=' | '==' ;
|
||||
NOT_EQUALS : '!=' ;
|
||||
NEQ : '<>' ;
|
||||
LT : '<' ;
|
||||
LE : '<=' ;
|
||||
GT : '>' ;
|
||||
GE : '>=' ;
|
||||
|
||||
// Multi-keyword operators
|
||||
LIKE : [Ll][Ii][Kk][Ee] ;
|
||||
NOT_LIKE : [Nn][Oo][Tt] [ \t]+ [Ll][Ii][Kk][Ee] ;
|
||||
ILIKE : [Ii][Ll][Ii][Kk][Ee] ;
|
||||
NOT_ILIKE : [Nn][Oo][Tt] [ \t]+ [Ii][Ll][Ii][Kk][Ee] ;
|
||||
BETWEEN : [Bb][Ee][Tt][Ww][Ee][Ee][Nn] ;
|
||||
NOT_BETWEEN : [Nn][Oo][Tt] [ \t]+ [Bb][Ee][Tt][Ww][Ee][Ee][Nn] ;
|
||||
|
||||
EXISTS : [Ee][Xx][Ii][Ss][Tt][Ss]? ;
|
||||
NOT_EXISTS : [Nn][Oo][Tt] [ \t]+ [Ee][Xx][Ii][Ss][Tt][Ss]? ;
|
||||
|
||||
REGEXP : [Rr][Ee][Gg][Ee][Xx][Pp] ;
|
||||
NOT_REGEXP : [Nn][Oo][Tt] [ \t]+ [Rr][Ee][Gg][Ee][Xx][Pp] ;
|
||||
|
||||
CONTAINS : [Cc][Oo][Nn][Tt][Aa][Ii][Nn][Ss]? ;
|
||||
NOT_CONTAINS: [Nn][Oo][Tt] [ \t]+ [Cc][Oo][Nn][Tt][Aa][Ii][Nn][Ss]? ;
|
||||
|
||||
IN : [Ii][Nn] ;
|
||||
NOT_IN : [Nn][Oo][Tt] [ \t]+ [Ii][Nn] ;
|
||||
|
||||
// Boolean logic
|
||||
NOT : [Nn][Oo][Tt] ;
|
||||
AND : [Aa][Nn][Dd] ;
|
||||
OR : [Oo][Rr] ;
|
||||
|
||||
// Functions
|
||||
HAS : [Hh][Aa][Ss] ;
|
||||
HASANY : [Hh][Aa][Ss][Aa][Nn][Yy] ;
|
||||
HASALL : [Hh][Aa][Ss][Aa][Ll][Ll] ;
|
||||
HASNONE : [Hh][Aa][Ss][Nn][Oo][Nn][Ee] ;
|
||||
|
||||
// Boolean values
|
||||
BOOL
|
||||
: [Tt][Rr][Uu][Ee]
|
||||
| [Ff][Aa][Ll][Ss][Ee]
|
||||
;
|
||||
|
||||
// Numbers
|
||||
NUMBER
|
||||
: DIGIT+ ( '.' DIGIT+ )?
|
||||
;
|
||||
|
||||
// Quoted text
|
||||
QUOTED_TEXT
|
||||
: ( '"' ( ~["\\] | '\\' . )* '"' // double-quoted
|
||||
| '\'' ( ~['\\] | '\\' . )* '\'' // single-quoted
|
||||
)
|
||||
;
|
||||
|
||||
// Keys
|
||||
KEY
|
||||
: [a-zA-Z0-9_] [a-zA-Z0-9_.[\]]*
|
||||
;
|
||||
|
||||
// Whitespace
|
||||
WS
|
||||
: [ \t\r\n]+ -> skip
|
||||
;
|
||||
|
||||
// Digits fragment
|
||||
fragment DIGIT
|
||||
: [0-9]
|
||||
;
|
||||
104
frontend/src/antlr-parser/FilterQuery.interp
Normal file
104
frontend/src/antlr-parser/FilterQuery.interp
Normal file
File diff suppressed because one or more lines are too long
49
frontend/src/antlr-parser/FilterQuery.tokens
Normal file
49
frontend/src/antlr-parser/FilterQuery.tokens
Normal file
@@ -0,0 +1,49 @@
|
||||
LPAREN=1
|
||||
RPAREN=2
|
||||
LBRACK=3
|
||||
RBRACK=4
|
||||
COMMA=5
|
||||
EQUALS=6
|
||||
NOT_EQUALS=7
|
||||
NEQ=8
|
||||
LT=9
|
||||
LE=10
|
||||
GT=11
|
||||
GE=12
|
||||
LIKE=13
|
||||
NOT_LIKE=14
|
||||
ILIKE=15
|
||||
NOT_ILIKE=16
|
||||
BETWEEN=17
|
||||
NOT_BETWEEN=18
|
||||
EXISTS=19
|
||||
NOT_EXISTS=20
|
||||
REGEXP=21
|
||||
NOT_REGEXP=22
|
||||
CONTAINS=23
|
||||
NOT_CONTAINS=24
|
||||
IN=25
|
||||
NOT_IN=26
|
||||
NOT=27
|
||||
AND=28
|
||||
OR=29
|
||||
HAS=30
|
||||
HASANY=31
|
||||
HASALL=32
|
||||
HASNONE=33
|
||||
BOOL=34
|
||||
NUMBER=35
|
||||
QUOTED_TEXT=36
|
||||
KEY=37
|
||||
WS=38
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
']'=4
|
||||
','=5
|
||||
'!='=7
|
||||
'<>'=8
|
||||
'<'=9
|
||||
'<='=10
|
||||
'>'=11
|
||||
'>='=12
|
||||
132
frontend/src/antlr-parser/FilterQueryLexer.interp
Normal file
132
frontend/src/antlr-parser/FilterQueryLexer.interp
Normal file
File diff suppressed because one or more lines are too long
49
frontend/src/antlr-parser/FilterQueryLexer.tokens
Normal file
49
frontend/src/antlr-parser/FilterQueryLexer.tokens
Normal file
@@ -0,0 +1,49 @@
|
||||
LPAREN=1
|
||||
RPAREN=2
|
||||
LBRACK=3
|
||||
RBRACK=4
|
||||
COMMA=5
|
||||
EQUALS=6
|
||||
NOT_EQUALS=7
|
||||
NEQ=8
|
||||
LT=9
|
||||
LE=10
|
||||
GT=11
|
||||
GE=12
|
||||
LIKE=13
|
||||
NOT_LIKE=14
|
||||
ILIKE=15
|
||||
NOT_ILIKE=16
|
||||
BETWEEN=17
|
||||
NOT_BETWEEN=18
|
||||
EXISTS=19
|
||||
NOT_EXISTS=20
|
||||
REGEXP=21
|
||||
NOT_REGEXP=22
|
||||
CONTAINS=23
|
||||
NOT_CONTAINS=24
|
||||
IN=25
|
||||
NOT_IN=26
|
||||
NOT=27
|
||||
AND=28
|
||||
OR=29
|
||||
HAS=30
|
||||
HASANY=31
|
||||
HASALL=32
|
||||
HASNONE=33
|
||||
BOOL=34
|
||||
NUMBER=35
|
||||
QUOTED_TEXT=36
|
||||
KEY=37
|
||||
WS=38
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
']'=4
|
||||
','=5
|
||||
'!='=7
|
||||
'<>'=8
|
||||
'<'=9
|
||||
'<='=10
|
||||
'>'=11
|
||||
'>='=12
|
||||
249
frontend/src/antlr-parser/FilterQueryLexer.ts
Normal file
249
frontend/src/antlr-parser/FilterQueryLexer.ts
Normal file
@@ -0,0 +1,249 @@
|
||||
// Generated from src/antlr-parser/FilterQuery.g4 by ANTLR 4.13.1
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer,
|
||||
CharStream,
|
||||
DecisionState, DFA,
|
||||
Lexer,
|
||||
LexerATNSimulator,
|
||||
RuleContext,
|
||||
PredictionContextCache,
|
||||
Token
|
||||
} from "antlr4";
|
||||
export default class FilterQueryLexer extends Lexer {
|
||||
public static readonly LPAREN = 1;
|
||||
public static readonly RPAREN = 2;
|
||||
public static readonly LBRACK = 3;
|
||||
public static readonly RBRACK = 4;
|
||||
public static readonly COMMA = 5;
|
||||
public static readonly EQUALS = 6;
|
||||
public static readonly NOT_EQUALS = 7;
|
||||
public static readonly NEQ = 8;
|
||||
public static readonly LT = 9;
|
||||
public static readonly LE = 10;
|
||||
public static readonly GT = 11;
|
||||
public static readonly GE = 12;
|
||||
public static readonly LIKE = 13;
|
||||
public static readonly NOT_LIKE = 14;
|
||||
public static readonly ILIKE = 15;
|
||||
public static readonly NOT_ILIKE = 16;
|
||||
public static readonly BETWEEN = 17;
|
||||
public static readonly NOT_BETWEEN = 18;
|
||||
public static readonly EXISTS = 19;
|
||||
public static readonly NOT_EXISTS = 20;
|
||||
public static readonly REGEXP = 21;
|
||||
public static readonly NOT_REGEXP = 22;
|
||||
public static readonly CONTAINS = 23;
|
||||
public static readonly NOT_CONTAINS = 24;
|
||||
public static readonly IN = 25;
|
||||
public static readonly NOT_IN = 26;
|
||||
public static readonly NOT = 27;
|
||||
public static readonly AND = 28;
|
||||
public static readonly OR = 29;
|
||||
public static readonly HAS = 30;
|
||||
public static readonly HASANY = 31;
|
||||
public static readonly HASALL = 32;
|
||||
public static readonly HASNONE = 33;
|
||||
public static readonly BOOL = 34;
|
||||
public static readonly NUMBER = 35;
|
||||
public static readonly QUOTED_TEXT = 36;
|
||||
public static readonly KEY = 37;
|
||||
public static readonly WS = 38;
|
||||
public static readonly EOF = Token.EOF;
|
||||
|
||||
public static readonly channelNames: string[] = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ];
|
||||
public static readonly literalNames: (string | null)[] = [ null, "'('",
|
||||
"')'", "'['",
|
||||
"']'", "','",
|
||||
null, "'!='",
|
||||
"'<>'", "'<'",
|
||||
"'<='", "'>'",
|
||||
"'>='" ];
|
||||
public static readonly symbolicNames: (string | null)[] = [ null, "LPAREN",
|
||||
"RPAREN", "LBRACK",
|
||||
"RBRACK", "COMMA",
|
||||
"EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT",
|
||||
"LE", "GT",
|
||||
"GE", "LIKE",
|
||||
"NOT_LIKE",
|
||||
"ILIKE", "NOT_ILIKE",
|
||||
"BETWEEN",
|
||||
"NOT_BETWEEN",
|
||||
"EXISTS", "NOT_EXISTS",
|
||||
"REGEXP", "NOT_REGEXP",
|
||||
"CONTAINS",
|
||||
"NOT_CONTAINS",
|
||||
"IN", "NOT_IN",
|
||||
"NOT", "AND",
|
||||
"OR", "HAS",
|
||||
"HASANY", "HASALL",
|
||||
"HASNONE",
|
||||
"BOOL", "NUMBER",
|
||||
"QUOTED_TEXT",
|
||||
"KEY", "WS" ];
|
||||
public static readonly modeNames: string[] = [ "DEFAULT_MODE", ];
|
||||
|
||||
public static readonly ruleNames: string[] = [
|
||||
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
|
||||
"BETWEEN", "NOT_BETWEEN", "EXISTS", "NOT_EXISTS", "REGEXP", "NOT_REGEXP",
|
||||
"CONTAINS", "NOT_CONTAINS", "IN", "NOT_IN", "NOT", "AND", "OR", "HAS",
|
||||
"HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY",
|
||||
"WS", "DIGIT",
|
||||
];
|
||||
|
||||
|
||||
constructor(input: CharStream) {
|
||||
super(input);
|
||||
this._interp = new LexerATNSimulator(this, FilterQueryLexer._ATN, FilterQueryLexer.DecisionsToDFA, new PredictionContextCache());
|
||||
}
|
||||
|
||||
public get grammarFileName(): string { return "FilterQuery.g4"; }
|
||||
|
||||
public get literalNames(): (string | null)[] { return FilterQueryLexer.literalNames; }
|
||||
public get symbolicNames(): (string | null)[] { return FilterQueryLexer.symbolicNames; }
|
||||
public get ruleNames(): string[] { return FilterQueryLexer.ruleNames; }
|
||||
|
||||
public get serializedATN(): number[] { return FilterQueryLexer._serializedATN; }
|
||||
|
||||
public get channelNames(): string[] { return FilterQueryLexer.channelNames; }
|
||||
|
||||
public get modeNames(): string[] { return FilterQueryLexer.modeNames; }
|
||||
|
||||
public static readonly _serializedATN: number[] = [4,0,38,359,6,-1,2,0,
|
||||
7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2,7,7,7,2,8,7,8,2,9,
|
||||
7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14,7,14,2,15,7,15,2,16,7,
|
||||
16,2,17,7,17,2,18,7,18,2,19,7,19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,
|
||||
2,24,7,24,2,25,7,25,2,26,7,26,2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2,
|
||||
31,7,31,2,32,7,32,2,33,7,33,2,34,7,34,2,35,7,35,2,36,7,36,2,37,7,37,2,38,
|
||||
7,38,1,0,1,0,1,1,1,1,1,2,1,2,1,3,1,3,1,4,1,4,1,5,1,5,1,5,3,5,93,8,5,1,6,
|
||||
1,6,1,6,1,7,1,7,1,7,1,8,1,8,1,9,1,9,1,9,1,10,1,10,1,11,1,11,1,11,1,12,1,
|
||||
12,1,12,1,12,1,12,1,13,1,13,1,13,1,13,4,13,120,8,13,11,13,12,13,121,1,13,
|
||||
1,13,1,13,1,13,1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,15,1,15,1,15,1,15,4,
|
||||
15,139,8,15,11,15,12,15,140,1,15,1,15,1,15,1,15,1,15,1,15,1,16,1,16,1,16,
|
||||
1,16,1,16,1,16,1,16,1,16,1,17,1,17,1,17,1,17,4,17,161,8,17,11,17,12,17,
|
||||
162,1,17,1,17,1,17,1,17,1,17,1,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1,
|
||||
18,3,18,179,8,18,1,19,1,19,1,19,1,19,4,19,185,8,19,11,19,12,19,186,1,19,
|
||||
1,19,1,19,1,19,1,19,1,19,3,19,195,8,19,1,20,1,20,1,20,1,20,1,20,1,20,1,
|
||||
20,1,21,1,21,1,21,1,21,4,21,208,8,21,11,21,12,21,209,1,21,1,21,1,21,1,21,
|
||||
1,21,1,21,1,21,1,22,1,22,1,22,1,22,1,22,1,22,1,22,1,22,3,22,227,8,22,1,
|
||||
23,1,23,1,23,1,23,4,23,233,8,23,11,23,12,23,234,1,23,1,23,1,23,1,23,1,23,
|
||||
1,23,1,23,1,23,3,23,245,8,23,1,24,1,24,1,24,1,25,1,25,1,25,1,25,4,25,254,
|
||||
8,25,11,25,12,25,255,1,25,1,25,1,25,1,26,1,26,1,26,1,26,1,27,1,27,1,27,
|
||||
1,27,1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,30,1,
|
||||
30,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,32,1,32,1,32,1,32,1,32,1,32,1,32,
|
||||
1,32,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,3,33,307,8,33,1,34,4,
|
||||
34,310,8,34,11,34,12,34,311,1,34,1,34,4,34,316,8,34,11,34,12,34,317,3,34,
|
||||
320,8,34,1,35,1,35,1,35,1,35,5,35,326,8,35,10,35,12,35,329,9,35,1,35,1,
|
||||
35,1,35,1,35,1,35,5,35,336,8,35,10,35,12,35,339,9,35,1,35,3,35,342,8,35,
|
||||
1,36,1,36,5,36,346,8,36,10,36,12,36,349,9,36,1,37,4,37,352,8,37,11,37,12,
|
||||
37,353,1,37,1,37,1,38,1,38,0,0,39,1,1,3,2,5,3,7,4,9,5,11,6,13,7,15,8,17,
|
||||
9,19,10,21,11,23,12,25,13,27,14,29,15,31,16,33,17,35,18,37,19,39,20,41,
|
||||
21,43,22,45,23,47,24,49,25,51,26,53,27,55,28,57,29,59,30,61,31,63,32,65,
|
||||
33,67,34,69,35,71,36,73,37,75,38,77,0,1,0,28,2,0,76,76,108,108,2,0,73,73,
|
||||
105,105,2,0,75,75,107,107,2,0,69,69,101,101,2,0,78,78,110,110,2,0,79,79,
|
||||
111,111,2,0,84,84,116,116,2,0,9,9,32,32,2,0,66,66,98,98,2,0,87,87,119,119,
|
||||
2,0,88,88,120,120,2,0,83,83,115,115,2,0,82,82,114,114,2,0,71,71,103,103,
|
||||
2,0,80,80,112,112,2,0,67,67,99,99,2,0,65,65,97,97,2,0,68,68,100,100,2,0,
|
||||
72,72,104,104,2,0,89,89,121,121,2,0,85,85,117,117,2,0,70,70,102,102,2,0,
|
||||
34,34,92,92,2,0,39,39,92,92,4,0,48,57,65,90,95,95,97,122,6,0,46,46,48,57,
|
||||
65,91,93,93,95,95,97,122,3,0,9,10,13,13,32,32,1,0,48,57,380,0,1,1,0,0,0,
|
||||
0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,
|
||||
0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,
|
||||
1,0,0,0,0,27,1,0,0,0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,
|
||||
0,0,37,1,0,0,0,0,39,1,0,0,0,0,41,1,0,0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,
|
||||
1,0,0,0,0,49,1,0,0,0,0,51,1,0,0,0,0,53,1,0,0,0,0,55,1,0,0,0,0,57,1,0,0,
|
||||
0,0,59,1,0,0,0,0,61,1,0,0,0,0,63,1,0,0,0,0,65,1,0,0,0,0,67,1,0,0,0,0,69,
|
||||
1,0,0,0,0,71,1,0,0,0,0,73,1,0,0,0,0,75,1,0,0,0,1,79,1,0,0,0,3,81,1,0,0,
|
||||
0,5,83,1,0,0,0,7,85,1,0,0,0,9,87,1,0,0,0,11,92,1,0,0,0,13,94,1,0,0,0,15,
|
||||
97,1,0,0,0,17,100,1,0,0,0,19,102,1,0,0,0,21,105,1,0,0,0,23,107,1,0,0,0,
|
||||
25,110,1,0,0,0,27,115,1,0,0,0,29,128,1,0,0,0,31,134,1,0,0,0,33,148,1,0,
|
||||
0,0,35,156,1,0,0,0,37,172,1,0,0,0,39,180,1,0,0,0,41,196,1,0,0,0,43,203,
|
||||
1,0,0,0,45,218,1,0,0,0,47,228,1,0,0,0,49,246,1,0,0,0,51,249,1,0,0,0,53,
|
||||
260,1,0,0,0,55,264,1,0,0,0,57,268,1,0,0,0,59,271,1,0,0,0,61,275,1,0,0,0,
|
||||
63,282,1,0,0,0,65,289,1,0,0,0,67,306,1,0,0,0,69,309,1,0,0,0,71,341,1,0,
|
||||
0,0,73,343,1,0,0,0,75,351,1,0,0,0,77,357,1,0,0,0,79,80,5,40,0,0,80,2,1,
|
||||
0,0,0,81,82,5,41,0,0,82,4,1,0,0,0,83,84,5,91,0,0,84,6,1,0,0,0,85,86,5,93,
|
||||
0,0,86,8,1,0,0,0,87,88,5,44,0,0,88,10,1,0,0,0,89,93,5,61,0,0,90,91,5,61,
|
||||
0,0,91,93,5,61,0,0,92,89,1,0,0,0,92,90,1,0,0,0,93,12,1,0,0,0,94,95,5,33,
|
||||
0,0,95,96,5,61,0,0,96,14,1,0,0,0,97,98,5,60,0,0,98,99,5,62,0,0,99,16,1,
|
||||
0,0,0,100,101,5,60,0,0,101,18,1,0,0,0,102,103,5,60,0,0,103,104,5,61,0,0,
|
||||
104,20,1,0,0,0,105,106,5,62,0,0,106,22,1,0,0,0,107,108,5,62,0,0,108,109,
|
||||
5,61,0,0,109,24,1,0,0,0,110,111,7,0,0,0,111,112,7,1,0,0,112,113,7,2,0,0,
|
||||
113,114,7,3,0,0,114,26,1,0,0,0,115,116,7,4,0,0,116,117,7,5,0,0,117,119,
|
||||
7,6,0,0,118,120,7,7,0,0,119,118,1,0,0,0,120,121,1,0,0,0,121,119,1,0,0,0,
|
||||
121,122,1,0,0,0,122,123,1,0,0,0,123,124,7,0,0,0,124,125,7,1,0,0,125,126,
|
||||
7,2,0,0,126,127,7,3,0,0,127,28,1,0,0,0,128,129,7,1,0,0,129,130,7,0,0,0,
|
||||
130,131,7,1,0,0,131,132,7,2,0,0,132,133,7,3,0,0,133,30,1,0,0,0,134,135,
|
||||
7,4,0,0,135,136,7,5,0,0,136,138,7,6,0,0,137,139,7,7,0,0,138,137,1,0,0,0,
|
||||
139,140,1,0,0,0,140,138,1,0,0,0,140,141,1,0,0,0,141,142,1,0,0,0,142,143,
|
||||
7,1,0,0,143,144,7,0,0,0,144,145,7,1,0,0,145,146,7,2,0,0,146,147,7,3,0,0,
|
||||
147,32,1,0,0,0,148,149,7,8,0,0,149,150,7,3,0,0,150,151,7,6,0,0,151,152,
|
||||
7,9,0,0,152,153,7,3,0,0,153,154,7,3,0,0,154,155,7,4,0,0,155,34,1,0,0,0,
|
||||
156,157,7,4,0,0,157,158,7,5,0,0,158,160,7,6,0,0,159,161,7,7,0,0,160,159,
|
||||
1,0,0,0,161,162,1,0,0,0,162,160,1,0,0,0,162,163,1,0,0,0,163,164,1,0,0,0,
|
||||
164,165,7,8,0,0,165,166,7,3,0,0,166,167,7,6,0,0,167,168,7,9,0,0,168,169,
|
||||
7,3,0,0,169,170,7,3,0,0,170,171,7,4,0,0,171,36,1,0,0,0,172,173,7,3,0,0,
|
||||
173,174,7,10,0,0,174,175,7,1,0,0,175,176,7,11,0,0,176,178,7,6,0,0,177,179,
|
||||
7,11,0,0,178,177,1,0,0,0,178,179,1,0,0,0,179,38,1,0,0,0,180,181,7,4,0,0,
|
||||
181,182,7,5,0,0,182,184,7,6,0,0,183,185,7,7,0,0,184,183,1,0,0,0,185,186,
|
||||
1,0,0,0,186,184,1,0,0,0,186,187,1,0,0,0,187,188,1,0,0,0,188,189,7,3,0,0,
|
||||
189,190,7,10,0,0,190,191,7,1,0,0,191,192,7,11,0,0,192,194,7,6,0,0,193,195,
|
||||
7,11,0,0,194,193,1,0,0,0,194,195,1,0,0,0,195,40,1,0,0,0,196,197,7,12,0,
|
||||
0,197,198,7,3,0,0,198,199,7,13,0,0,199,200,7,3,0,0,200,201,7,10,0,0,201,
|
||||
202,7,14,0,0,202,42,1,0,0,0,203,204,7,4,0,0,204,205,7,5,0,0,205,207,7,6,
|
||||
0,0,206,208,7,7,0,0,207,206,1,0,0,0,208,209,1,0,0,0,209,207,1,0,0,0,209,
|
||||
210,1,0,0,0,210,211,1,0,0,0,211,212,7,12,0,0,212,213,7,3,0,0,213,214,7,
|
||||
13,0,0,214,215,7,3,0,0,215,216,7,10,0,0,216,217,7,14,0,0,217,44,1,0,0,0,
|
||||
218,219,7,15,0,0,219,220,7,5,0,0,220,221,7,4,0,0,221,222,7,6,0,0,222,223,
|
||||
7,16,0,0,223,224,7,1,0,0,224,226,7,4,0,0,225,227,7,11,0,0,226,225,1,0,0,
|
||||
0,226,227,1,0,0,0,227,46,1,0,0,0,228,229,7,4,0,0,229,230,7,5,0,0,230,232,
|
||||
7,6,0,0,231,233,7,7,0,0,232,231,1,0,0,0,233,234,1,0,0,0,234,232,1,0,0,0,
|
||||
234,235,1,0,0,0,235,236,1,0,0,0,236,237,7,15,0,0,237,238,7,5,0,0,238,239,
|
||||
7,4,0,0,239,240,7,6,0,0,240,241,7,16,0,0,241,242,7,1,0,0,242,244,7,4,0,
|
||||
0,243,245,7,11,0,0,244,243,1,0,0,0,244,245,1,0,0,0,245,48,1,0,0,0,246,247,
|
||||
7,1,0,0,247,248,7,4,0,0,248,50,1,0,0,0,249,250,7,4,0,0,250,251,7,5,0,0,
|
||||
251,253,7,6,0,0,252,254,7,7,0,0,253,252,1,0,0,0,254,255,1,0,0,0,255,253,
|
||||
1,0,0,0,255,256,1,0,0,0,256,257,1,0,0,0,257,258,7,1,0,0,258,259,7,4,0,0,
|
||||
259,52,1,0,0,0,260,261,7,4,0,0,261,262,7,5,0,0,262,263,7,6,0,0,263,54,1,
|
||||
0,0,0,264,265,7,16,0,0,265,266,7,4,0,0,266,267,7,17,0,0,267,56,1,0,0,0,
|
||||
268,269,7,5,0,0,269,270,7,12,0,0,270,58,1,0,0,0,271,272,7,18,0,0,272,273,
|
||||
7,16,0,0,273,274,7,11,0,0,274,60,1,0,0,0,275,276,7,18,0,0,276,277,7,16,
|
||||
0,0,277,278,7,11,0,0,278,279,7,16,0,0,279,280,7,4,0,0,280,281,7,19,0,0,
|
||||
281,62,1,0,0,0,282,283,7,18,0,0,283,284,7,16,0,0,284,285,7,11,0,0,285,286,
|
||||
7,16,0,0,286,287,7,0,0,0,287,288,7,0,0,0,288,64,1,0,0,0,289,290,7,18,0,
|
||||
0,290,291,7,16,0,0,291,292,7,11,0,0,292,293,7,4,0,0,293,294,7,5,0,0,294,
|
||||
295,7,4,0,0,295,296,7,3,0,0,296,66,1,0,0,0,297,298,7,6,0,0,298,299,7,12,
|
||||
0,0,299,300,7,20,0,0,300,307,7,3,0,0,301,302,7,21,0,0,302,303,7,16,0,0,
|
||||
303,304,7,0,0,0,304,305,7,11,0,0,305,307,7,3,0,0,306,297,1,0,0,0,306,301,
|
||||
1,0,0,0,307,68,1,0,0,0,308,310,3,77,38,0,309,308,1,0,0,0,310,311,1,0,0,
|
||||
0,311,309,1,0,0,0,311,312,1,0,0,0,312,319,1,0,0,0,313,315,5,46,0,0,314,
|
||||
316,3,77,38,0,315,314,1,0,0,0,316,317,1,0,0,0,317,315,1,0,0,0,317,318,1,
|
||||
0,0,0,318,320,1,0,0,0,319,313,1,0,0,0,319,320,1,0,0,0,320,70,1,0,0,0,321,
|
||||
327,5,34,0,0,322,326,8,22,0,0,323,324,5,92,0,0,324,326,9,0,0,0,325,322,
|
||||
1,0,0,0,325,323,1,0,0,0,326,329,1,0,0,0,327,325,1,0,0,0,327,328,1,0,0,0,
|
||||
328,330,1,0,0,0,329,327,1,0,0,0,330,342,5,34,0,0,331,337,5,39,0,0,332,336,
|
||||
8,23,0,0,333,334,5,92,0,0,334,336,9,0,0,0,335,332,1,0,0,0,335,333,1,0,0,
|
||||
0,336,339,1,0,0,0,337,335,1,0,0,0,337,338,1,0,0,0,338,340,1,0,0,0,339,337,
|
||||
1,0,0,0,340,342,5,39,0,0,341,321,1,0,0,0,341,331,1,0,0,0,342,72,1,0,0,0,
|
||||
343,347,7,24,0,0,344,346,7,25,0,0,345,344,1,0,0,0,346,349,1,0,0,0,347,345,
|
||||
1,0,0,0,347,348,1,0,0,0,348,74,1,0,0,0,349,347,1,0,0,0,350,352,7,26,0,0,
|
||||
351,350,1,0,0,0,352,353,1,0,0,0,353,351,1,0,0,0,353,354,1,0,0,0,354,355,
|
||||
1,0,0,0,355,356,6,37,0,0,356,76,1,0,0,0,357,358,7,27,0,0,358,78,1,0,0,0,
|
||||
24,0,92,121,140,162,178,186,194,209,226,234,244,255,306,311,317,319,325,
|
||||
327,335,337,341,347,353,1,6,0,0];
|
||||
|
||||
private static __ATN: ATN;
|
||||
public static get _ATN(): ATN {
|
||||
if (!FilterQueryLexer.__ATN) {
|
||||
FilterQueryLexer.__ATN = new ATNDeserializer().deserialize(FilterQueryLexer._serializedATN);
|
||||
}
|
||||
|
||||
return FilterQueryLexer.__ATN;
|
||||
}
|
||||
|
||||
|
||||
static DecisionsToDFA = FilterQueryLexer._ATN.decisionToState.map( (ds: DecisionState, index: number) => new DFA(ds, index) );
|
||||
}
|
||||
201
frontend/src/antlr-parser/FilterQueryListener.ts
Normal file
201
frontend/src/antlr-parser/FilterQueryListener.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
// Generated from src/antlr-parser/FilterQuery.g4 by ANTLR 4.13.1
|
||||
|
||||
import {ParseTreeListener} from "antlr4";
|
||||
|
||||
|
||||
import { QueryContext } from "./FilterQueryParser";
|
||||
import { ExpressionContext } from "./FilterQueryParser";
|
||||
import { OrExpressionContext } from "./FilterQueryParser";
|
||||
import { AndExpressionContext } from "./FilterQueryParser";
|
||||
import { UnaryExpressionContext } from "./FilterQueryParser";
|
||||
import { PrimaryContext } from "./FilterQueryParser";
|
||||
import { ComparisonContext } from "./FilterQueryParser";
|
||||
import { InClauseContext } from "./FilterQueryParser";
|
||||
import { NotInClauseContext } from "./FilterQueryParser";
|
||||
import { ValueListContext } from "./FilterQueryParser";
|
||||
import { FullTextContext } from "./FilterQueryParser";
|
||||
import { FunctionCallContext } from "./FilterQueryParser";
|
||||
import { FunctionParamListContext } from "./FilterQueryParser";
|
||||
import { FunctionParamContext } from "./FilterQueryParser";
|
||||
import { ArrayContext } from "./FilterQueryParser";
|
||||
import { ValueContext } from "./FilterQueryParser";
|
||||
import { KeyContext } from "./FilterQueryParser";
|
||||
|
||||
|
||||
/**
|
||||
* This interface defines a complete listener for a parse tree produced by
|
||||
* `FilterQueryParser`.
|
||||
*/
|
||||
export default class FilterQueryListener extends ParseTreeListener {
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.query`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterQuery?: (ctx: QueryContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.query`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitQuery?: (ctx: QueryContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.expression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterExpression?: (ctx: ExpressionContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.expression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitExpression?: (ctx: ExpressionContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.orExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterOrExpression?: (ctx: OrExpressionContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.orExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitOrExpression?: (ctx: OrExpressionContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.andExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterAndExpression?: (ctx: AndExpressionContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.andExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitAndExpression?: (ctx: AndExpressionContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.unaryExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterUnaryExpression?: (ctx: UnaryExpressionContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.unaryExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitUnaryExpression?: (ctx: UnaryExpressionContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.primary`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterPrimary?: (ctx: PrimaryContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.primary`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitPrimary?: (ctx: PrimaryContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.comparison`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterComparison?: (ctx: ComparisonContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.comparison`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitComparison?: (ctx: ComparisonContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.inClause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterInClause?: (ctx: InClauseContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.inClause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitInClause?: (ctx: InClauseContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.notInClause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterNotInClause?: (ctx: NotInClauseContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.notInClause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitNotInClause?: (ctx: NotInClauseContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.valueList`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterValueList?: (ctx: ValueListContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.valueList`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitValueList?: (ctx: ValueListContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.fullText`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterFullText?: (ctx: FullTextContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.fullText`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitFullText?: (ctx: FullTextContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.functionCall`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterFunctionCall?: (ctx: FunctionCallContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.functionCall`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitFunctionCall?: (ctx: FunctionCallContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.functionParamList`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterFunctionParamList?: (ctx: FunctionParamListContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.functionParamList`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitFunctionParamList?: (ctx: FunctionParamListContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.functionParam`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterFunctionParam?: (ctx: FunctionParamContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.functionParam`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitFunctionParam?: (ctx: FunctionParamContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.array`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterArray?: (ctx: ArrayContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.array`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitArray?: (ctx: ArrayContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.value`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterValue?: (ctx: ValueContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.value`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitValue?: (ctx: ValueContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.key`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterKey?: (ctx: KeyContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.key`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitKey?: (ctx: KeyContext) => void;
|
||||
}
|
||||
|
||||
1895
frontend/src/antlr-parser/FilterQueryParser.ts
Normal file
1895
frontend/src/antlr-parser/FilterQueryParser.ts
Normal file
File diff suppressed because it is too large
Load Diff
136
frontend/src/antlr-parser/FilterQueryVisitor.ts
Normal file
136
frontend/src/antlr-parser/FilterQueryVisitor.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
// Generated from src/antlr-parser/FilterQuery.g4 by ANTLR 4.13.1
|
||||
|
||||
import {ParseTreeVisitor} from 'antlr4';
|
||||
|
||||
|
||||
import { QueryContext } from "./FilterQueryParser";
|
||||
import { ExpressionContext } from "./FilterQueryParser";
|
||||
import { OrExpressionContext } from "./FilterQueryParser";
|
||||
import { AndExpressionContext } from "./FilterQueryParser";
|
||||
import { UnaryExpressionContext } from "./FilterQueryParser";
|
||||
import { PrimaryContext } from "./FilterQueryParser";
|
||||
import { ComparisonContext } from "./FilterQueryParser";
|
||||
import { InClauseContext } from "./FilterQueryParser";
|
||||
import { NotInClauseContext } from "./FilterQueryParser";
|
||||
import { ValueListContext } from "./FilterQueryParser";
|
||||
import { FullTextContext } from "./FilterQueryParser";
|
||||
import { FunctionCallContext } from "./FilterQueryParser";
|
||||
import { FunctionParamListContext } from "./FilterQueryParser";
|
||||
import { FunctionParamContext } from "./FilterQueryParser";
|
||||
import { ArrayContext } from "./FilterQueryParser";
|
||||
import { ValueContext } from "./FilterQueryParser";
|
||||
import { KeyContext } from "./FilterQueryParser";
|
||||
|
||||
|
||||
/**
|
||||
* This interface defines a complete generic visitor for a parse tree produced
|
||||
* by `FilterQueryParser`.
|
||||
*
|
||||
* @param <Result> The return type of the visit operation. Use `void` for
|
||||
* operations with no return type.
|
||||
*/
|
||||
export default class FilterQueryVisitor<Result> extends ParseTreeVisitor<Result> {
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.query`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitQuery?: (ctx: QueryContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.expression`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitExpression?: (ctx: ExpressionContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.orExpression`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitOrExpression?: (ctx: OrExpressionContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.andExpression`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitAndExpression?: (ctx: AndExpressionContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.unaryExpression`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitUnaryExpression?: (ctx: UnaryExpressionContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.primary`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitPrimary?: (ctx: PrimaryContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.comparison`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitComparison?: (ctx: ComparisonContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.inClause`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitInClause?: (ctx: InClauseContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.notInClause`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitNotInClause?: (ctx: NotInClauseContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.valueList`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitValueList?: (ctx: ValueListContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.fullText`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitFullText?: (ctx: FullTextContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.functionCall`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitFunctionCall?: (ctx: FunctionCallContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.functionParamList`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitFunctionParamList?: (ctx: FunctionParamListContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.functionParam`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitFunctionParam?: (ctx: FunctionParamContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.array`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitArray?: (ctx: ArrayContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.value`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitValue?: (ctx: ValueContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.key`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitKey?: (ctx: KeyContext) => Result;
|
||||
}
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { APIKeyProps, CreateAPIKeyProps } from 'types/api/pat/types';
|
||||
|
||||
const createAPIKey = async (
|
||||
props: CreateAPIKeyProps,
|
||||
): Promise<SuccessResponse<APIKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/pats', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default createAPIKey;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllAPIKeyProps } from 'types/api/pat/types';
|
||||
|
||||
const deleteAPIKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllAPIKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.delete(`/pats/${id}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteAPIKey;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/alerts/get';
|
||||
|
||||
const get = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(`/pats/${props.id}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default get;
|
||||
@@ -1,6 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { AllAPIKeyProps } from 'types/api/pat/types';
|
||||
|
||||
export const getAllAPIKeys = (): Promise<AxiosResponse<AllAPIKeyProps>> =>
|
||||
axios.get(`/pats`);
|
||||
@@ -1,26 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, UpdateAPIKeyProps } from 'types/api/pat/types';
|
||||
|
||||
const updateAPIKey = async (
|
||||
props: UpdateAPIKeyProps,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.put(`/pats/${props.id}`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateAPIKey;
|
||||
46
frontend/src/api/ErrorResponseHandlerV2.ts
Normal file
46
frontend/src/api/ErrorResponseHandlerV2.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
// reference - https://axios-http.com/docs/handling_errors
|
||||
export function ErrorResponseHandlerV2(error: AxiosError<ErrorV2Resp>): never {
|
||||
const { response, request } = error;
|
||||
// The request was made and the server responded with a status code
|
||||
// that falls out of the range of 2xx
|
||||
if (response) {
|
||||
throw new APIError({
|
||||
httpStatusCode: response.status || 500,
|
||||
error: {
|
||||
code: response.data.error.code,
|
||||
message: response.data.error.message,
|
||||
url: response.data.error.url,
|
||||
errors: response.data.error.errors,
|
||||
},
|
||||
});
|
||||
}
|
||||
// The request was made but no response was received
|
||||
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
|
||||
// http.ClientRequest in node.js
|
||||
if (request) {
|
||||
throw new APIError({
|
||||
httpStatusCode: error.status || 500,
|
||||
error: {
|
||||
code: error.code || error.name,
|
||||
message: error.message,
|
||||
url: '',
|
||||
errors: [],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Something happened in setting up the request that triggered an Error
|
||||
throw new APIError({
|
||||
httpStatusCode: error.status || 500,
|
||||
error: {
|
||||
code: error.name,
|
||||
message: error.message,
|
||||
url: '',
|
||||
errors: [],
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/SAML/deleteDomain';
|
||||
|
||||
const deleteDomain = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.delete(`/domains/${props.id}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteDomain;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/SAML/listDomain';
|
||||
|
||||
const listAllDomain = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(`/orgs/${props.orgId}/domains`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default listAllDomain;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/SAML/updateDomain';
|
||||
|
||||
const updateDomain = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.put(`/domains/${props.id}`, props);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateDomain;
|
||||
@@ -3,6 +3,7 @@ const apiV1 = '/api/v1/';
|
||||
export const apiV2 = '/api/v2/';
|
||||
export const apiV3 = '/api/v3/';
|
||||
export const apiV4 = '/api/v4/';
|
||||
export const apiV5 = '/api/v5/';
|
||||
export const gatewayApiV1 = '/api/gateway/v1/';
|
||||
export const gatewayApiV2 = '/api/gateway/v2/';
|
||||
export const apiAlertManager = '/api/alertmanager/';
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
CheckoutRequestPayloadProps,
|
||||
CheckoutSuccessPayloadProps,
|
||||
} from 'types/api/billing/checkout';
|
||||
|
||||
const updateCreditCardApi = async (
|
||||
props: CheckoutRequestPayloadProps,
|
||||
): Promise<SuccessResponse<CheckoutSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/checkout', {
|
||||
url: props.url,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateCreditCardApi;
|
||||
@@ -1,29 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
CheckoutRequestPayloadProps,
|
||||
CheckoutSuccessPayloadProps,
|
||||
} from 'types/api/billing/checkout';
|
||||
|
||||
const manageCreditCardApi = async (
|
||||
props: CheckoutRequestPayloadProps,
|
||||
): Promise<SuccessResponse<CheckoutSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/portal', {
|
||||
url: props.url,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default manageCreditCardApi;
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createEmail';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
email_configs: [
|
||||
{
|
||||
@@ -21,13 +21,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createMsTeams';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
msteamsv2_configs: [
|
||||
{
|
||||
@@ -21,13 +21,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createOpsgenie';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
opsgenie_configs: [
|
||||
{
|
||||
@@ -24,13 +24,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createPager';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
pagerduty_configs: [
|
||||
{
|
||||
@@ -29,13 +29,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createSlack';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
const response = await axios.post<PayloadProps>('/channels', {
|
||||
name: props.name,
|
||||
slack_configs: [
|
||||
{
|
||||
@@ -22,13 +22,12 @@ const create = async (
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user