Compare commits
282 Commits
signoz-tai
...
feat/issue
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6d52266248 | ||
|
|
4992dbdbce | ||
|
|
a973c5c9b7 | ||
|
|
cc2418c160 | ||
|
|
e90fa34983 | ||
|
|
ac2b11ea6d | ||
|
|
f1b0ad5966 | ||
|
|
6faf323f00 | ||
|
|
bad8aa7899 | ||
|
|
7e9e31a8c7 | ||
|
|
73e60f298d | ||
|
|
69c06429c2 | ||
|
|
a62bf6fc60 | ||
|
|
00526e3803 | ||
|
|
6ee9b9b222 | ||
|
|
9cd5174e24 | ||
|
|
75a4c6b2a4 | ||
|
|
720e8120b0 | ||
|
|
cca086d4b6 | ||
|
|
9a1b9885c9 | ||
|
|
ef45bda1bf | ||
|
|
1b20083f84 | ||
|
|
3587d7fea3 | ||
|
|
56f4120342 | ||
|
|
758a993433 | ||
|
|
a3071e0f30 | ||
|
|
06719a5dd9 | ||
|
|
cc264c7960 | ||
|
|
7c8c62c52e | ||
|
|
a5a8f7b2dd | ||
|
|
d2fe84c92c | ||
|
|
892c6b5b05 | ||
|
|
a8905698e6 | ||
|
|
7d0860e309 | ||
|
|
b9ee748341 | ||
|
|
e2f1676551 | ||
|
|
1ff328f34c | ||
|
|
1ef2cba450 | ||
|
|
7c40d87864 | ||
|
|
096842ed7e | ||
|
|
89bcce6afa | ||
|
|
b26e8d854d | ||
|
|
824302be38 | ||
|
|
8d4c4dc5f2 | ||
|
|
91fae8c0f3 | ||
|
|
4bbe8c0ee7 | ||
|
|
0f7d226b9b | ||
|
|
e03342e001 | ||
|
|
57f96574ff | ||
|
|
354e4b4b8f | ||
|
|
d7102f69a9 | ||
|
|
040c45b144 | ||
|
|
207d7602ab | ||
|
|
018346ca18 | ||
|
|
7290ab3602 | ||
|
|
88239cec4d | ||
|
|
10ba0e6b4f | ||
|
|
88e1e42bf0 | ||
|
|
a0d896557e | ||
|
|
2b28c5f2e2 | ||
|
|
6dbcc5fb9d | ||
|
|
175e9a4c5e | ||
|
|
33506cafce | ||
|
|
e34e61a20d | ||
|
|
da084b4686 | ||
|
|
6821efeb99 | ||
|
|
c5d5c84a0e | ||
|
|
9c298e83a5 | ||
|
|
9383b6576d | ||
|
|
f10f7a806f | ||
|
|
03600f4d6f | ||
|
|
9fbf111976 | ||
|
|
b8dff86a56 | ||
|
|
f525647b40 | ||
|
|
0a2b7ca1d8 | ||
|
|
16938c6cc0 | ||
|
|
81b8f93177 | ||
|
|
96cfb607d1 | ||
|
|
f526e887cc | ||
|
|
03ab6e704b | ||
|
|
9c0134da54 | ||
|
|
175b059268 | ||
|
|
dfca5b13c0 | ||
|
|
ad392e81ff | ||
|
|
92ceefccee | ||
|
|
9cc4e1b56f | ||
|
|
3758ee7451 | ||
|
|
02b605d109 | ||
|
|
eb86aabf3e | ||
|
|
8810693bda | ||
|
|
6334e09a60 | ||
|
|
1d379931b2 | ||
|
|
815a6d13c5 | ||
|
|
59af9d1c2f | ||
|
|
19d24da147 | ||
|
|
cd1c9ddf11 | ||
|
|
7ff3286c9c | ||
|
|
27830742f9 | ||
|
|
39f07e7477 | ||
|
|
0ab50da7b0 | ||
|
|
c03541cd6c | ||
|
|
727a039eb9 | ||
|
|
c7db85f44c | ||
|
|
08d9a74055 | ||
|
|
503e4cdf00 | ||
|
|
224f952da7 | ||
|
|
0c28067f89 | ||
|
|
8dc749b9dd | ||
|
|
82a111e5b1 | ||
|
|
e2e6c65b4d | ||
|
|
f01d21cbf2 | ||
|
|
36886135d1 | ||
|
|
3648027576 | ||
|
|
b80626f5e2 | ||
|
|
08579242eb | ||
|
|
6e0b50dd60 | ||
|
|
76ed58c481 | ||
|
|
f4d029bd12 | ||
|
|
b66af786e6 | ||
|
|
5ad68a3310 | ||
|
|
0f0693f6eb | ||
|
|
16e3c185e9 | ||
|
|
8d6671e362 | ||
|
|
5b237ee628 | ||
|
|
cb08ce5e5d | ||
|
|
3fbc3dec48 | ||
|
|
5b2f897a00 | ||
|
|
73f57d8bee | ||
|
|
ab17bf3558 | ||
|
|
eb5a1b76b8 | ||
|
|
130ff925bd | ||
|
|
75d86cea60 | ||
|
|
cf451d335c | ||
|
|
e47c7cc17b | ||
|
|
629c54d3f9 | ||
|
|
ed3026eeb5 | ||
|
|
ccf26883c4 | ||
|
|
958924befe | ||
|
|
b70c570cdc | ||
|
|
42a026469b | ||
|
|
6de0908a62 | ||
|
|
fd21a4955e | ||
|
|
3dce13d29f | ||
|
|
2ce4b60c55 | ||
|
|
c9888804cd | ||
|
|
413b0d9fae | ||
|
|
b24095236f | ||
|
|
21d239ce68 | ||
|
|
d6e4e3c5ed | ||
|
|
552b103e8b | ||
|
|
1123a9a93d | ||
|
|
8b30e3cc5c | ||
|
|
b86e65d2ca | ||
|
|
d5e2841083 | ||
|
|
7dad5dcd17 | ||
|
|
ac0b640146 | ||
|
|
e125d146b5 | ||
|
|
a41ffceca4 | ||
|
|
7edb047c0c | ||
|
|
6504f2565b | ||
|
|
6b418a125b | ||
|
|
36827a1667 | ||
|
|
1118c56356 | ||
|
|
bd071e3e60 | ||
|
|
36f3a2e26d | ||
|
|
fee7e96176 | ||
|
|
ef4e3a30fb | ||
|
|
39532d5da0 | ||
|
|
4d216bae4d | ||
|
|
21563914c7 | ||
|
|
accb77f227 | ||
|
|
e73e1bd078 | ||
|
|
940313d28b | ||
|
|
9815ec7d81 | ||
|
|
a7cad0f1a5 | ||
|
|
a624b4758d | ||
|
|
ee5684b130 | ||
|
|
2f8da5957b | ||
|
|
3f6f77d0e2 | ||
|
|
5bceffbeaa | ||
|
|
9e449e2858 | ||
|
|
b60588a749 | ||
|
|
c322657666 | ||
|
|
a1846c008a | ||
|
|
a6824db622 | ||
|
|
e6f69aa74c | ||
|
|
a9c09f33cb | ||
|
|
9eb2196617 | ||
|
|
131759ec96 | ||
|
|
365a3e250f | ||
|
|
f3a1f3cc20 | ||
|
|
ae509b4ae9 | ||
|
|
43e2be0333 | ||
|
|
20a40b33ce | ||
|
|
a9b07c4b47 | ||
|
|
2a5c7cc0ab | ||
|
|
afb18b8142 | ||
|
|
9a580915e6 | ||
|
|
0944af3d31 | ||
|
|
9338efcefc | ||
|
|
6b9e0ce799 | ||
|
|
d4c3c24849 | ||
|
|
30d935a768 | ||
|
|
073d42c416 | ||
|
|
f11b9644cf | ||
|
|
87922e9577 | ||
|
|
8412727414 | ||
|
|
f0a95503d9 | ||
|
|
16e0fa2eef | ||
|
|
2fa944d254 | ||
|
|
b0d19035a4 | ||
|
|
054dea366e | ||
|
|
aaf0b597dc | ||
|
|
19372c8194 | ||
|
|
eb74adad44 | ||
|
|
d5c04e1342 | ||
|
|
2b9632c8fd | ||
|
|
24920ae903 | ||
|
|
6f096632a2 | ||
|
|
a42eacec4b | ||
|
|
e723399f7f | ||
|
|
48936bed9b | ||
|
|
ee70474cc7 | ||
|
|
c3fa7144ee | ||
|
|
5dd02a5b8e | ||
|
|
c0f01e4cb9 | ||
|
|
fed84cb50a | ||
|
|
80545c4d07 | ||
|
|
0b1faec092 | ||
|
|
ba6f31b1c3 | ||
|
|
eed92978a4 | ||
|
|
41cbd316b5 | ||
|
|
8d7d33393d | ||
|
|
8d143b44b1 | ||
|
|
423aebd6eb | ||
|
|
8d630707af | ||
|
|
a5b52431b7 | ||
|
|
0138d757c8 | ||
|
|
844195b84f | ||
|
|
8ff05b2e8f | ||
|
|
c8c56c544e | ||
|
|
1c43655336 | ||
|
|
c269c8c6b8 | ||
|
|
3142b6cc6d | ||
|
|
58e141685a | ||
|
|
e17f63a50c | ||
|
|
838ef5dcc5 | ||
|
|
e53d3d1269 | ||
|
|
2330420c0d | ||
|
|
65ac277074 | ||
|
|
b7982ca348 | ||
|
|
2748b49a44 | ||
|
|
7345027762 | ||
|
|
68f874e433 | ||
|
|
54a82b1664 | ||
|
|
93dc585145 | ||
|
|
6a143efd2c | ||
|
|
0116eb20ab | ||
|
|
79e9d1b357 | ||
|
|
b89ce82e25 | ||
|
|
b43a198fd8 | ||
|
|
b40ca4baf3 | ||
|
|
8df77c9221 | ||
|
|
f67555576f | ||
|
|
f0a4c37073 | ||
|
|
7972261237 | ||
|
|
3b4a8e5e0f | ||
|
|
5ef3b8ee3f | ||
|
|
597752a4bc | ||
|
|
07a244f569 | ||
|
|
eb9385840f | ||
|
|
30b689037a | ||
|
|
ba33c885d5 | ||
|
|
a4ed9e4d47 | ||
|
|
df5767198c | ||
|
|
81c7f3221a | ||
|
|
2cbd8733a1 | ||
|
|
71d1dfe9bd | ||
|
|
459712d25c | ||
|
|
61de2d414d | ||
|
|
0b7cd4c1a7 | ||
|
|
62c033ccf8 |
@@ -1,5 +1,4 @@
|
||||
services:
|
||||
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: clickhouse
|
||||
@@ -24,7 +23,6 @@ services:
|
||||
retries: 3
|
||||
depends_on:
|
||||
- zookeeper
|
||||
|
||||
zookeeper:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: zookeeper
|
||||
@@ -41,9 +39,8 @@ services:
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:0.111.29
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,9 +52,8 @@ services:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:0.111.29
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
27
.devenv/docker/postgres/compose.yaml
Normal file
@@ -0,0 +1,27 @@
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:15
|
||||
container_name: postgres
|
||||
environment:
|
||||
POSTGRES_DB: signoz
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: password
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"pg_isready",
|
||||
"-d",
|
||||
"signoz",
|
||||
"-U",
|
||||
"postgres"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 30s
|
||||
retries: 3
|
||||
restart: on-failure
|
||||
ports:
|
||||
- "127.0.0.1:5432:5432/tcp"
|
||||
volumes:
|
||||
- ${PWD}/fs/tmp/var/lib/postgresql/data/:/var/lib/postgresql/data/
|
||||
@@ -1,6 +1,7 @@
|
||||
.git
|
||||
.github
|
||||
.vscode
|
||||
.devenv
|
||||
README.md
|
||||
deploy
|
||||
sample-apps
|
||||
|
||||
3
.github/CODEOWNERS
vendored
@@ -2,7 +2,7 @@
|
||||
# Owners are automatically requested for review for PRs that changes code
|
||||
# that they own.
|
||||
|
||||
/frontend/ @YounixM
|
||||
/frontend/ @SigNoz/frontend @YounixM
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
/deploy/ @SigNoz/devops
|
||||
@@ -11,3 +11,4 @@
|
||||
/pkg/errors/ @grandwizard28
|
||||
/pkg/factory/ @grandwizard28
|
||||
/pkg/types/ @grandwizard28
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
|
||||
75
.github/pull_request_template.md
vendored
@@ -1,17 +1,74 @@
|
||||
### Summary
|
||||
## 📄 Summary
|
||||
|
||||
<!-- ✍️ A clear and concise description...-->
|
||||
<!-- Describe the purpose of the PR in a few sentences. What does it fix/add/update? -->
|
||||
|
||||
#### Related Issues / PR's
|
||||
---
|
||||
|
||||
<!-- ✍️ Add the issues being resolved here and related PR's where applicable -->
|
||||
## ✅ Changes
|
||||
|
||||
#### Screenshots
|
||||
- [ ] Feature: Brief description
|
||||
- [ ] Bug fix: Brief description
|
||||
|
||||
NA
|
||||
---
|
||||
|
||||
<!-- ✍️ Add screenshots of before and after changes where applicable-->
|
||||
## 🏷️ Required: Add Relevant Labels
|
||||
|
||||
#### Affected Areas and Manually Tested Areas
|
||||
> ⚠️ **Manually add appropriate labels in the PR sidebar**
|
||||
Please select one or more labels (as applicable):
|
||||
|
||||
<!-- ✍️ Add details of blast radius and dev testing areas where applicable-->
|
||||
ex:
|
||||
|
||||
- `frontend`
|
||||
- `backend`
|
||||
- `devops`
|
||||
- `bug`
|
||||
- `enhancement`
|
||||
- `ui`
|
||||
- `test`
|
||||
|
||||
---
|
||||
|
||||
## 👥 Reviewers
|
||||
|
||||
> Tag the relevant teams for review:
|
||||
|
||||
- [ ] @SigNoz/frontend
|
||||
- [ ] @SigNoz/backend
|
||||
- [ ] @SigNoz/devops
|
||||
|
||||
---
|
||||
|
||||
## 🧪 How to Test
|
||||
|
||||
<!-- Describe how reviewers can test this PR -->
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
|
||||
---
|
||||
|
||||
## 🔍 Related Issues
|
||||
|
||||
<!-- Reference any related issues (e.g. Fixes #123, Closes #456) -->
|
||||
Closes #
|
||||
|
||||
---
|
||||
|
||||
## 📸 Screenshots / Screen Recording (if applicable / mandatory for UI related changes)
|
||||
|
||||
<!-- Add screenshots or GIFs to help visualize changes -->
|
||||
|
||||
---
|
||||
|
||||
## 📋 Checklist
|
||||
|
||||
- [ ] Dev Review
|
||||
- [ ] Test cases added (Unit/ Integration / E2E)
|
||||
- [ ] Manually tested the changes
|
||||
|
||||
|
||||
---
|
||||
|
||||
## 👀 Notes for Reviewers
|
||||
|
||||
<!-- Anything reviewers should keep in mind while reviewing -->
|
||||
|
||||
42
.github/workflows/README.md
vendored
@@ -1,42 +0,0 @@
|
||||
# Github actions
|
||||
|
||||
## Testing the UI manually on each PR
|
||||
|
||||
First we need to make sure the UI is ready
|
||||
* Check the `Start tunnel` step in `e2e-k8s/deploy-on-k3s-cluster` job and make sure you see `your url is: https://pull-<number>-signoz.loca.lt`
|
||||
* This job will run until the PR is merged or closed to keep the local tunneling alive
|
||||
- github will cancel this job if the PR wasn't merged after 6h
|
||||
- if the job was cancel, go to the action and press `Re-run all jobs`
|
||||
|
||||
Now you can open your browser at https://pull-<number>-signoz.loca.lt and check the UI.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
To run GitHub workflow, a few environment variables needs to add in GitHub secrets
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th> Variables </th>
|
||||
<th> Description </th>
|
||||
<th> Example </th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> REPONAME </td>
|
||||
<td> Provide the DockerHub user/organisation name of the image. </td>
|
||||
<td> signoz</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> DOCKERHUB_USERNAME </td>
|
||||
<td> Docker hub username </td>
|
||||
<td> signoz</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> DOCKERHUB_TOKEN </td>
|
||||
<td> Docker hub password/token with push permission </td>
|
||||
<td> **** </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> SONAR_TOKEN </td>
|
||||
<td> <a href="https://sonarcloud.io">SonarCloud</a> token </td>
|
||||
<td> **** </td>
|
||||
</tr>
|
||||
82
.github/workflows/build-community.yaml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: build-community
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./pkg/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: dockerhub
|
||||
117
.github/workflows/build-enterprise.yaml
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
name: build-enterprise
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker_providers: ${{ steps.set-docker-providers.outputs.providers }}
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
id: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
- name: set-docker-providers
|
||||
id: set-docker-providers
|
||||
run: |
|
||||
if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ || ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$ ]]; then
|
||||
echo "providers=dockerhub gcp" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "providers=gcp" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: create-dotenv
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: frontend/.env
|
||||
key: enterprise-dotenv-${{ github.sha }}
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}
|
||||
126
.github/workflows/build-staging.yaml
vendored
Normal file
@@ -0,0 +1,126 @@
|
||||
name: build-staging
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
|
||||
outputs:
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
deployment: ${{ steps.build-info.outputs.deployment }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
id: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
|
||||
staging_label="${{ github.event.label.name }}"
|
||||
if [[ "${staging_label}" == "staging:"* ]]; then
|
||||
deployment=${staging_label#"staging:"}
|
||||
elif [[ "${{ github.event.ref }}" == "refs/heads/main" ]]; then
|
||||
deployment="staging"
|
||||
else
|
||||
echo "error: not able to determine deployment - please verify the PR label or the branch"
|
||||
exit 1
|
||||
fi
|
||||
echo "deployment=${deployment}" >> $GITHUB_OUTPUT
|
||||
- name: create-dotenv
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.NP_TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.NP_USERPILOT_KEY }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: frontend/.env
|
||||
key: staging-dotenv-${{ github.sha }}
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: staging-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: gcp
|
||||
staging:
|
||||
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
|
||||
uses: signoz/primus.workflows/.github/workflows/github-trigger.yaml@main
|
||||
secrets: inherit
|
||||
needs: [prepare, go-build]
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GITHUB_ENVIRONMENT: staging
|
||||
GITHUB_SILENT: true
|
||||
GITHUB_REPOSITORY_NAME: charts-saas-v3-staging
|
||||
GITHUB_EVENT_NAME: releaser
|
||||
GITHUB_EVENT_PAYLOAD: "{\"deployment\": \"${{ needs.prepare.outputs.deployment }}\", \"signoz_version\": \"${{ needs.prepare.outputs.version }}\"}"
|
||||
122
.github/workflows/build.yaml
vendored
@@ -1,122 +0,0 @@
|
||||
name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
enterprise:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: create-env-file
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-enterprise
|
||||
|
||||
community:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-community
|
||||
14
.github/workflows/goci.yaml
vendored
@@ -18,6 +18,7 @@ jobs:
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_TEST_CONTEXT: ./...
|
||||
GO_VERSION: 1.23
|
||||
fmt:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -26,6 +27,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
lint:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -34,6 +36,16 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
deps:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
uses: signoz/primus.workflows/.github/workflows/go-deps.yaml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.23
|
||||
build:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -45,7 +57,7 @@ jobs:
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
- name: qemu-install
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: aarch64-install
|
||||
|
||||
4
.github/workflows/gor-signoz-community.yaml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
5
.github/workflows/gor-signoz.yaml
vendored
@@ -35,6 +35,7 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> .env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> .env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> .env
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
@@ -72,7 +73,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -135,7 +136,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
53
.github/workflows/integrationci.yaml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: integrationci
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
src:
|
||||
- bootstrap
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 24.1.2-alpine
|
||||
- 24.12-alpine
|
||||
schema-migrator-version:
|
||||
- v0.111.38
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: poetry
|
||||
run: |
|
||||
python -m pip install poetry==2.1.2
|
||||
python -m poetry config virtualenvs.in-project true
|
||||
cd tests/integration && poetry install --no-root
|
||||
- name: run
|
||||
run: |
|
||||
cd tests/integration && \
|
||||
poetry run pytest \
|
||||
--basetemp=./tmp/ \
|
||||
src/${{matrix.src}} \
|
||||
--sqlstore-provider ${{matrix.sqlstore-provider}} \
|
||||
--postgres-version ${{matrix.postgres-version}} \
|
||||
--clickhouse-version ${{matrix.clickhouse-version}} \
|
||||
--schema-migrator-version ${{matrix.schema-migrator-version}}
|
||||
4
.github/workflows/prereleaser.yaml
vendored
@@ -1,9 +1,9 @@
|
||||
name: prereleaser
|
||||
|
||||
on:
|
||||
# schedule every wednesday 9:30 AM UTC (3pm IST)
|
||||
# schedule every wednesday 6:30 AM UTC (12:00 PM IST)
|
||||
schedule:
|
||||
- cron: '30 9 * * 3'
|
||||
- cron: '30 6 * * 3'
|
||||
|
||||
# allow manual triggering of the workflow by a maintainer
|
||||
workflow_dispatch:
|
||||
|
||||
16
.github/workflows/remove-label.yaml
vendored
@@ -1,16 +0,0 @@
|
||||
name: remove-label
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [synchronize]
|
||||
|
||||
jobs:
|
||||
remove:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Remove label testing-deploy from PR
|
||||
uses: buildsville/add-remove-label@v2.0.0
|
||||
with:
|
||||
label: testing-deploy
|
||||
type: remove
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
56
.github/workflows/staging-deployment.yaml
vendored
@@ -1,56 +0,0 @@
|
||||
name: staging-deployment
|
||||
# Trigger deployment only on push to main branch
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy latest main branch to staging
|
||||
runs-on: ubuntu-latest
|
||||
environment: staging
|
||||
permissions:
|
||||
contents: 'read'
|
||||
id-token: 'write'
|
||||
steps:
|
||||
- id: 'auth'
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
|
||||
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: 'sdk'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'ssh'
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
|
||||
GCP_ZONE: ${{ secrets.GCP_ZONE }}
|
||||
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
|
||||
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
|
||||
run: |
|
||||
read -r -d '' COMMAND <<EOF || true
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export OTELCOL_TAG="main"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
export KAFKA_SPAN_EVAL="true"
|
||||
docker system prune --force
|
||||
docker pull signoz/signoz-otel-collector:main
|
||||
docker pull signoz/signoz-schema-migrator:main
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout ${GITHUB_BRANCH}
|
||||
git pull
|
||||
make docker-build-enterprise-amd64
|
||||
export VERSION="${GITHUB_SHA:0:7}-amd64"
|
||||
docker-compose -f deploy/docker/docker-compose.testing.yaml up --build -d
|
||||
EOF
|
||||
gcloud beta compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
|
||||
56
.github/workflows/testing-deployment.yaml
vendored
@@ -1,56 +0,0 @@
|
||||
name: testing-deployment
|
||||
# Trigger deployment only on testing-deploy label on pull request
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy PR branch to testing
|
||||
runs-on: ubuntu-latest
|
||||
environment: testing
|
||||
if: ${{ github.event.label.name == 'testing-deploy' }}
|
||||
permissions:
|
||||
contents: 'read'
|
||||
id-token: 'write'
|
||||
steps:
|
||||
- id: 'auth'
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
|
||||
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: 'sdk'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'ssh'
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
|
||||
GCP_ZONE: ${{ secrets.GCP_ZONE }}
|
||||
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
|
||||
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
|
||||
run: |
|
||||
read -r -d '' COMMAND <<EOF || true
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export DEV_BUILD="1"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
docker system prune --force
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout main
|
||||
git pull
|
||||
# This is added to include the scenerio when new commit in PR is force-pushed
|
||||
git branch -D ${GITHUB_BRANCH}
|
||||
git checkout --track origin/${GITHUB_BRANCH}
|
||||
make docker-build-enterprise-amd64
|
||||
export VERSION="${GITHUB_SHA:0:7}-amd64"
|
||||
docker-compose -f deploy/docker/docker-compose.testing.yaml up --build -d
|
||||
EOF
|
||||
gcloud beta compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
|
||||
149
.gitignore
vendored
@@ -60,9 +60,7 @@ ee/query-service/db
|
||||
|
||||
e2e/node_modules/
|
||||
e2e/test-results/
|
||||
e2e/playwright-report/
|
||||
e2e/blob-report/
|
||||
e2e/playwright/.cache/
|
||||
e2e/.auth
|
||||
|
||||
# go
|
||||
@@ -80,6 +78,153 @@ deploy/common/clickhouse/user_scripts/
|
||||
|
||||
queries.active
|
||||
|
||||
# tmp
|
||||
**/tmp/**
|
||||
|
||||
# .devenv tmp files
|
||||
.devenv/**/tmp/**
|
||||
.qodo
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
### Python Patch ###
|
||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||
poetry.toml
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
17
.versions/alpine
Normal file
@@ -0,0 +1,17 @@
|
||||
#### Auto generated by make docker-version-alpine. DO NOT EDIT! ####
|
||||
amd64=029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85
|
||||
unknown=5fea95373b9ec85974843f31446fa6a9df4492dddae4e1cb056193c34a20a5be
|
||||
arm=b4aef1a899e0271f06d948c9a8fa626ecdb2202d3a178bc14775dd559e23df8e
|
||||
unknown=a4d1e27e63a9d6353046eb25a2f0ec02945012b217f4364cd83a73fe6dfb0b15
|
||||
arm=4fdafe217d0922f3c3e2b4f64cf043f8403a4636685cd9c51fea2cbd1f419740
|
||||
unknown=7f21ac2018d95b2c51a5779c1d5ca6c327504adc3b0fdc747a6725d30b3f13c2
|
||||
arm64=ea3c5a9671f7b3f7eb47eab06f73bc6591df978b0d5955689a9e6f943aa368c0
|
||||
unknown=a8ba68c1a9e6eea8041b4b8f996c235163440808b9654a865976fdcbede0f433
|
||||
386=dea9f02e103e837849f984d5679305c758aba7fea1b95b7766218597f61a05ab
|
||||
unknown=3c6629bec05c8273a927d46b77428bf4a378dad911a0ae284887becdc149b734
|
||||
ppc64le=0880443bffa028dfbbc4094a32dd6b7ac25684e4c0a3d50da9e0acae355c5eaf
|
||||
unknown=bb48308f976b266e3ab39bbf9af84521959bd9c295d3c763690cf41f8df2a626
|
||||
riscv64=d76e6fbe348ff20c2931bb7f101e49379648e026de95dd37f96e00ce1909dcf7
|
||||
unknown=dd807544365f6dc187cbe6de0806adce2ea9de3e7124717d1d8e8b7a18b77b64
|
||||
s390x=b815fadf80495594eb6296a6af0bc647ae5f193e0044e07acec7e5b378c9ce2d
|
||||
unknown=74681be74a280a88abb53ff1e048eb1fb624b30d0066730df6d8afd02ba82e01
|
||||
55
Makefile
@@ -10,19 +10,20 @@ COMMIT_SHORT_SHA ?= $(shell git rev-parse --short HEAD)
|
||||
BRANCH_NAME ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
|
||||
VERSION ?= $(BRANCH_NAME)-$(COMMIT_SHORT_SHA)
|
||||
TIMESTAMP ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
ARCHS = amd64 arm64
|
||||
ARCHS ?= amd64 arm64
|
||||
TARGET_DIR ?= $(shell pwd)/target
|
||||
|
||||
ZEUS_URL ?= https://api.signoz.cloud
|
||||
GO_BUILD_LDFLAG_ZEUS_URL = -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=$(ZEUS_URL)
|
||||
LICENSE_URL ?= https://license.signoz.io/api/v1
|
||||
GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO = -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=$(LICENSE_URL)
|
||||
GO_BUILD_LDFLAG_ZEUS_URL = -X github.com/SigNoz/signoz/ee/zeus.url=$(ZEUS_URL)
|
||||
LICENSE_URL ?= https://license.signoz.io
|
||||
GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO = -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=$(LICENSE_URL)
|
||||
|
||||
GO_BUILD_VERSION_LDFLAGS = -X github.com/SigNoz/signoz/pkg/version.version=$(VERSION) -X github.com/SigNoz/signoz/pkg/version.hash=$(COMMIT_SHORT_SHA) -X github.com/SigNoz/signoz/pkg/version.time=$(TIMESTAMP) -X github.com/SigNoz/signoz/pkg/version.branch=$(BRANCH_NAME)
|
||||
GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/pkg/query-service
|
||||
GO_BUILD_LDFLAGS_COMMUNITY = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
GO_BUILD_ARCHS_ENTERPRISE = $(addprefix go-build-enterprise-,$(ARCHS))
|
||||
GO_BUILD_ARCHS_ENTERPRISE_RACE = $(addprefix go-build-enterprise-race-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/ee/query-service
|
||||
GO_BUILD_LDFLAGS_ENTERPRISE = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=enterprise $(GO_BUILD_LDFLAG_ZEUS_URL) $(GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO)
|
||||
|
||||
@@ -55,6 +56,11 @@ devenv-clickhouse: ## Run clickhouse in devenv
|
||||
@cd .devenv/docker/clickhouse; \
|
||||
docker compose -f compose.yaml up -d
|
||||
|
||||
.PHONY: devenv-postgres
|
||||
devenv-postgres: ## Run postgres in devenv
|
||||
@cd .devenv/docker/postgres; \
|
||||
docker compose -f compose.yaml up -d
|
||||
|
||||
##############################################################
|
||||
# go commands
|
||||
##############################################################
|
||||
@@ -70,9 +76,11 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/main.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster \
|
||||
--use-logs-new-schema true \
|
||||
--use-trace-new-schema true
|
||||
--cluster cluster
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@go test -race ./...
|
||||
|
||||
.PHONY: go-run-community
|
||||
go-run-community: ## Runs the community go backend server
|
||||
@@ -86,9 +94,7 @@ go-run-community: ## Runs the community go backend server
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/main.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster \
|
||||
--use-logs-new-schema true \
|
||||
--use-trace-new-schema true
|
||||
--cluster cluster
|
||||
|
||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
go-build-community: ## Builds the go backend server for community
|
||||
@@ -115,6 +121,18 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
go-build-enterprise-race: ## Builds the go backend server for enterprise with race
|
||||
go-build-enterprise-race: $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
$(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
##############################################################
|
||||
# js commands
|
||||
##############################################################
|
||||
@@ -163,3 +181,20 @@ docker-buildx-enterprise: go-build-enterprise js-build
|
||||
--platform linux/arm64,linux/amd64 \
|
||||
--push \
|
||||
--tag $(DOCKER_REGISTRY_ENTERPRISE):$(VERSION) $(SRC)
|
||||
|
||||
##############################################################
|
||||
# python commands
|
||||
##############################################################
|
||||
.PHONY: py-fmt
|
||||
py-fmt: ## Run black for integration tests
|
||||
@cd tests/integration && poetry run black .
|
||||
|
||||
.PHONY: py-lint
|
||||
py-lint: ## Run lint for integration tests
|
||||
@cd tests/integration && poetry run isort .
|
||||
@cd tests/integration && poetry run autoflake .
|
||||
@cd tests/integration && poetry run pylint .
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@@ -50,7 +50,7 @@ cache:
|
||||
# Time-to-live for cache entries in memory. Specify the duration in ns
|
||||
ttl: 60000000000
|
||||
# The interval at which the cache will be cleaned up
|
||||
cleanupInterval: 1m
|
||||
cleanup_interval: 1m
|
||||
# redis: Uses Redis as the caching backend.
|
||||
redis:
|
||||
# The hostname or IP address of the Redis server.
|
||||
@@ -72,7 +72,6 @@ sqlstore:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
|
||||
|
||||
##################### APIServer #####################
|
||||
apiserver:
|
||||
timeout:
|
||||
@@ -91,20 +90,36 @@ apiserver:
|
||||
- /api/v1/version
|
||||
- /
|
||||
|
||||
|
||||
##################### TelemetryStore #####################
|
||||
telemetrystore:
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
# Maximum number of idle connections in the connection pool.
|
||||
max_idle_conns: 50
|
||||
# Maximum number of open connections to the database.
|
||||
max_open_conns: 100
|
||||
# Maximum time to wait for a connection to be established.
|
||||
dial_timeout: 5s
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
clickhouse:
|
||||
# The DSN to use for ClickHouse.
|
||||
dsn: http://localhost:9000
|
||||
# The DSN to use for clickhouse.
|
||||
dsn: tcp://localhost:9000
|
||||
# The query settings for clickhouse.
|
||||
settings:
|
||||
max_execution_time: 0
|
||||
max_execution_time_leaf: 0
|
||||
timeout_before_checking_execution_speed: 0
|
||||
max_bytes_to_read: 0
|
||||
max_result_rows_for_ch_query: 0
|
||||
|
||||
##################### Prometheus #####################
|
||||
prometheus:
|
||||
active_query_tracker:
|
||||
# Whether to enable the active query tracker.
|
||||
enabled: true
|
||||
# The path to use for the active query tracker.
|
||||
path: ""
|
||||
# The maximum number of concurrent queries.
|
||||
max_concurrent: 20
|
||||
|
||||
##################### Alertmanager #####################
|
||||
alertmanager:
|
||||
@@ -117,7 +132,7 @@ alertmanager:
|
||||
# The poll interval for periodically syncing the alertmanager with the config in the store.
|
||||
poll_interval: 1m
|
||||
# The URL under which Alertmanager is externally reachable (for example, if Alertmanager is served via a reverse proxy). Used for generating relative and absolute links back to Alertmanager itself.
|
||||
external_url: http://localhost:9093
|
||||
external_url: http://localhost:8080
|
||||
# The global configuration for the alertmanager. All the exahustive fields can be found in the upstream: https://github.com/prometheus/alertmanager/blob/efa05feffd644ba4accb526e98a8c6545d26a783/config/config.go#L833
|
||||
global:
|
||||
# ResolveTimeout is the time after which an alert is declared resolved if it has not been updated.
|
||||
@@ -149,3 +164,9 @@ alertmanager:
|
||||
maintenance_interval: 15m
|
||||
# Retention of the notification logs.
|
||||
retention: 120h
|
||||
|
||||
|
||||
##################### Analytics #####################
|
||||
analytics:
|
||||
# Whether to enable analytics.
|
||||
enabled: false
|
||||
|
||||
@@ -174,11 +174,9 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.76.2
|
||||
image: signoz/signoz:v0.84.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -208,7 +206,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.34
|
||||
image: signoz/signoz-otel-collector:v0.111.41
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -232,7 +230,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.34
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -110,11 +110,9 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.76.2
|
||||
image: signoz/signoz:v0.84.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -143,7 +141,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.34
|
||||
image: signoz/signoz-otel-collector:v0.111.41
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -167,7 +165,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.34
|
||||
image: signoz/signoz-schema-migrator:v0.111.41
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -26,7 +26,7 @@ processors:
|
||||
detectors: [env, system]
|
||||
timeout: 2s
|
||||
signozspanmetrics/delta:
|
||||
metrics_exporter: clickhousemetricswrite
|
||||
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
|
||||
metrics_flush_interval: 60s
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 100000
|
||||
@@ -64,8 +64,10 @@ exporters:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
disable_v2: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
disable_v2: true
|
||||
signozclickhousemetrics:
|
||||
dsn: tcp://clickhouse:9000/signoz_metrics
|
||||
clickhouselogsexporter:
|
||||
|
||||
@@ -177,12 +177,10 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
image: signoz/signoz:${VERSION:-v0.84.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -212,7 +210,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -238,7 +236,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -249,7 +247,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -1,199 +0,0 @@
|
||||
version: "3"
|
||||
x-common: &common
|
||||
networks:
|
||||
- signoz-net
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
signoz.io/port: "9363"
|
||||
signoz.io/path: "/metrics"
|
||||
depends_on:
|
||||
init-clickhouse:
|
||||
condition: service_completed_successfully
|
||||
zookeeper-1:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
- wget
|
||||
- --spider
|
||||
- -q
|
||||
- 0.0.0.0:8123/ping
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
user: root
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
signoz.io/port: "9141"
|
||||
signoz.io/path: "/metrics"
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD-SHELL
|
||||
- curl -s -m 2 http://localhost:8080/commands/ruok | grep error | grep null
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
x-db-depend: &db-depend
|
||||
!!merge <<: *common
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: signoz-init-clickhouse
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
- |
|
||||
version="v0.0.1"
|
||||
node_os=$$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
node_arch=$$(uname -m | sed s/aarch64/arm64/ | sed s/x86_64/amd64/)
|
||||
echo "Fetching histogram-binary for $${node_os}/$${node_arch}"
|
||||
cd /tmp
|
||||
wget -O histogram-quantile.tar.gz "https://github.com/SigNoz/signoz/releases/download/histogram-quantile%2F$${version}/histogram-quantile_$${node_os}_$${node_arch}.tar.gz"
|
||||
tar -xvzf histogram-quantile.tar.gz
|
||||
mv histogram-quantile /var/lib/clickhouse/user_scripts/histogramQuantile
|
||||
restart: on-failure
|
||||
volumes:
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
zookeeper-1:
|
||||
!!merge <<: *zookeeper-defaults
|
||||
container_name: signoz-zookeeper-1
|
||||
ports:
|
||||
- "2181:2181"
|
||||
- "2888:2888"
|
||||
- "3888:3888"
|
||||
volumes:
|
||||
- zookeeper-1:/bitnami/zookeeper
|
||||
environment:
|
||||
- ZOO_SERVER_ID=1
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
- ZOO_ENABLE_PROMETHEUS_METRICS=yes
|
||||
- ZOO_PROMETHEUS_METRICS_PORT_NUMBER=9141
|
||||
clickhouse:
|
||||
!!merge <<: *clickhouse-defaults
|
||||
container_name: signoz-clickhouse
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "8123:8123"
|
||||
- "9181:9181"
|
||||
volumes:
|
||||
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
- ../common/clickhouse/cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
- clickhouse:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --gateway-url=https://api.staging.signoz.cloud
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
volumes:
|
||||
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
|
||||
- ../common/dashboards:/root/config/dashboards
|
||||
- sqlite:/var/lib/signoz/
|
||||
environment:
|
||||
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
||||
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_SQLSTORE_SQLITE_PATH=/var/lib/signoz/signoz.db
|
||||
- DASHBOARDS_PATH=/root/config/dashboards
|
||||
- STORAGE=clickhouse
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
- KAFKA_SPAN_EVAL=${KAFKA_SPAN_EVAL:-false}
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
- wget
|
||||
- --spider
|
||||
- -q
|
||||
- localhost:8080/api/v1/health
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
depends_on:
|
||||
signoz:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --up=
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --up=
|
||||
restart: on-failure
|
||||
networks:
|
||||
signoz-net:
|
||||
name: signoz-net
|
||||
volumes:
|
||||
clickhouse:
|
||||
name: signoz-clickhouse
|
||||
sqlite:
|
||||
name: signoz-sqlite
|
||||
zookeeper-1:
|
||||
name: signoz-zookeeper-1
|
||||
@@ -110,12 +110,10 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
image: signoz/signoz:${VERSION:-v0.84.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -144,7 +142,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +164,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +176,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -26,7 +26,7 @@ processors:
|
||||
detectors: [env, system]
|
||||
timeout: 2s
|
||||
signozspanmetrics/delta:
|
||||
metrics_exporter: clickhousemetricswrite
|
||||
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
|
||||
metrics_flush_interval: 60s
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 100000
|
||||
@@ -62,10 +62,12 @@ exporters:
|
||||
use_new_schema: true
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
disable_v2: true
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
disable_v2: true
|
||||
signozclickhousemetrics:
|
||||
dsn: tcp://clickhouse:9000/signoz_metrics
|
||||
clickhouselogsexporter:
|
||||
|
||||
@@ -93,7 +93,7 @@ check_os() {
|
||||
;;
|
||||
Red\ Hat*)
|
||||
desired_os=1
|
||||
os="red hat"
|
||||
os="rhel"
|
||||
package_manager="yum"
|
||||
;;
|
||||
CentOS*)
|
||||
|
||||
103
docs/contributing/go/errors.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# Errors
|
||||
|
||||
SigNoz includes its own structured [errors](/pkg/errors/errors.go) package. It's built on top of Go's `error` interface, extending it to add additional context that helps provide more meaningful error messages throughout the application.
|
||||
|
||||
## How to use it?
|
||||
|
||||
To use the SigNoz structured errors package, use these functions instead of the standard library alternatives:
|
||||
|
||||
```go
|
||||
// Instead of errors.New()
|
||||
errors.New(typ, code, message)
|
||||
|
||||
// Instead of fmt.Errorf()
|
||||
errors.Newf(typ, code, message, args...)
|
||||
```
|
||||
|
||||
### Typ
|
||||
The Typ (read as Type, defined as `typ`) is used to categorize errors across the codebase and is loosely coupled with HTTP/GRPC status codes. All predefined types can be found in [pkg/errors/type.go](/pkg/errors/type.go). For example:
|
||||
|
||||
- `TypeInvalidInput` - Indicates invalid input was provided
|
||||
- `TypeNotFound` - Indicates a resource was not found
|
||||
|
||||
By design, `typ` is unexported and cannot be declared outside of [errors](/pkg/errors/errors.go) package. This ensures that it is consistent across the codebase and is used in a way that is meaningful.
|
||||
|
||||
### Code
|
||||
Codes are used to provide more granular categorization within types. For instance, a type of `TypeInvalidInput` might have codes like `CodeInvalidEmail` or `CodeInvalidPassword`.
|
||||
|
||||
To create new error codes, use the `errors.MustNewCode` function:
|
||||
|
||||
```go
|
||||
var (
|
||||
CodeThingAlreadyExists = errors.MustNewCode("thing_already_exists")
|
||||
CodeThingNotFound = errors.MustNewCode("thing_not_found")
|
||||
)
|
||||
```
|
||||
|
||||
> 💡 **Note**: Error codes must match the regex `^[a-z_]+$` otherwise the code will panic.
|
||||
|
||||
## Show me some examples
|
||||
|
||||
### Using the error
|
||||
A basic example of using the error:
|
||||
|
||||
```go
|
||||
var (
|
||||
CodeThingAlreadyExists = errors.MustNewCode("thing_already_exists")
|
||||
)
|
||||
|
||||
func CreateThing(id string) error {
|
||||
t, err := thing.GetFromStore(id)
|
||||
if err != nil {
|
||||
if errors.As(err, errors.TypeNotFound) {
|
||||
// thing was not found, create it
|
||||
return thing.Create(id)
|
||||
}
|
||||
|
||||
// something else went wrong, wrap the error with more context
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeUnknown, "failed to get thing from store")
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeAlreadyExists, CodeThingAlreadyExists, "thing with id %s already exists", id)
|
||||
}
|
||||
```
|
||||
|
||||
### Changing the error
|
||||
Sometimes you may want to change the error while preserving the message:
|
||||
|
||||
```go
|
||||
func GetUserSecurely(id string) (*User, error) {
|
||||
user, err := repository.GetUser(id)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
// Convert NotFound to Forbidden for security reasons
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeAccessDenied, "access denied to requested resource")
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return user, nil
|
||||
}
|
||||
```
|
||||
|
||||
## Why do we need this?
|
||||
|
||||
In a large codebase like SigNoz, error handling is critical for maintaining reliability, debuggability, and a good user experience. We believe that it is the **responsibility of a function** to return **well-defined** errors that **accurately describe what went wrong**. With our structured error system:
|
||||
|
||||
- Functions can create precise errors with appropriate additional context
|
||||
- Callers can make informed decisions based on the additional context
|
||||
- Error context is preserved and enhanced as it moves up the call stack
|
||||
|
||||
The caller (which can be another function or a HTTP/gRPC handler or something else entirely), can then choose to use this error to take appropriate actions such as:
|
||||
|
||||
- A function can branch into different paths based on the context
|
||||
- An HTTP/gRPC handler can derive the correct status code and message from the error and send it to the client
|
||||
- Logging systems can capture structured error information for better diagnostics
|
||||
|
||||
Although there might be cases where this might seem too verbose, it makes the code more maintainable and consistent. A little verbose code is better than clever code that doesn't provide enough context.
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- Think about error handling as you write your code, not as an afterthought.
|
||||
- Always use the [errors](/pkg/errors/errors.go) package instead of the standard library's `errors.New()` or `fmt.Errorf()`.
|
||||
- Always assign appropriate codes to errors when creating them instead of using the "catch all" error codes defined in [pkg/errors/code.go](/pkg/errors/code.go).
|
||||
- Use `errors.Wrapf()` to add context to errors while preserving the original when appropriate.
|
||||
11
docs/contributing/go/readme.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Go
|
||||
|
||||
This document provides an overview of contributing to the SigNoz backend written in Go. The SigNoz backend is built with Go, focusing on performance, maintainability, and developer experience. We strive for clean, idiomatic code that follows established Go practices while addressing the unique needs of an observability platform.
|
||||
|
||||
We adhere to three primary style guides as our foundation:
|
||||
|
||||
- [Effective Go](https://go.dev/doc/effective_go) - For writing idiomatic Go code
|
||||
- [Code Review Comments](https://go.dev/wiki/CodeReviewComments) - For understanding common comments in code reviews
|
||||
- [Google Style Guide](https://google.github.io/styleguide/go/) - Additional practices from Google
|
||||
|
||||
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development. In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package.
|
||||
94
docs/contributing/go/sql.md
Normal file
@@ -0,0 +1,94 @@
|
||||
# SQL
|
||||
SigNoz utilizes a relational database to store metadata including organization information, user data and other settings.
|
||||
|
||||
## How to use it?
|
||||
|
||||
The database interface is defined in [SQLStore](/pkg/sqlstore/sqlstore.go). SigNoz leverages the Bun ORM to interact with the underlying database. To access the database instance, use the `BunDBCtx` function. For operations that require transactions across multiple database operations, use the `RunInTxCtx` function. This function embeds a transaction in the context, which propagates through various functions in the callback.
|
||||
|
||||
```go
|
||||
type Thing struct {
|
||||
bun.BaseModel
|
||||
|
||||
ID types.Identifiable `bun:",embed"`
|
||||
SomeColumn string `bun:"some_column"`
|
||||
TimeAuditable types.TimeAuditable `bun:",embed"`
|
||||
OrgID string `bun:"org_id"`
|
||||
}
|
||||
|
||||
func GetThing(ctx context.Context, id string) (*Thing, error) {
|
||||
thing := new(Thing)
|
||||
err := sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(thing).
|
||||
Where("id = ?", id).
|
||||
Scan(ctx)
|
||||
|
||||
return thing, err
|
||||
}
|
||||
|
||||
func CreateThing(ctx context.Context, thing *Thing) error {
|
||||
return sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(thing).
|
||||
Exec(ctx)
|
||||
}
|
||||
```
|
||||
|
||||
> 💡 **Note**: Always use line breaks while working with SQL queries to enhance code readability.
|
||||
|
||||
> 💡 **Note**: Always use the `new` function to create new instances of structs.
|
||||
|
||||
## What are hooks?
|
||||
|
||||
Hooks are user-defined functions that execute before and/or after specific database operations. These hooks are particularly useful for generating telemetry data such as logs, traces, and metrics, providing visibility into database interactions. Hooks are defined in the [SQLStoreHook](/pkg/sqlstore/sqlstore.go) interface.
|
||||
|
||||
## How is the schema designed?
|
||||
|
||||
SigNoz implements a star schema design with the organizations table as the central entity. All other tables link to the organizations table via foreign key constraints on the `org_id` column. This design ensures that every entity within the system is either directly or indirectly associated with an organization.
|
||||
|
||||
```mermaid
|
||||
erDiagram
|
||||
ORGANIZATIONS {
|
||||
string id PK
|
||||
timestamp created_at
|
||||
timestamp updated_at
|
||||
}
|
||||
ENTITY_A {
|
||||
string id PK
|
||||
timestamp created_at
|
||||
timestamp updated_at
|
||||
string org_id FK
|
||||
}
|
||||
ENTITY_B {
|
||||
string id PK
|
||||
timestamp created_at
|
||||
timestamp updated_at
|
||||
string org_id FK
|
||||
}
|
||||
|
||||
ORGANIZATIONS ||--o{ ENTITY_A : contains
|
||||
ORGANIZATIONS ||--o{ ENTITY_B : contains
|
||||
```
|
||||
|
||||
> 💡 **Note**: There are rare exceptions to the above star schema design. Consult with the maintainers before deviating from the above design.
|
||||
|
||||
All tables follow a consistent primary key pattern using a `id` column (referenced by the `types.Identifiable` struct) and include `created_at` and `updated_at` columns (referenced by the `types.TimeAuditable` struct) for audit purposes.
|
||||
|
||||
## How to write migrations?
|
||||
|
||||
For schema migrations, use the [SQLMigration](/pkg/sqlmigration/sqlmigration.go) interface and write the migration in the same package. When creating migrations, adhere to these guidelines:
|
||||
|
||||
- Do not implement **`ON CASCADE` foreign key constraints**. Deletion operations should be handled explicitly in application logic rather than delegated to the database.
|
||||
- Do not **import types from the types package** in the `sqlmigration` package. Instead, define the required types within the migration package itself. This practice ensures migration stability as the core types evolve over time.
|
||||
- Do not implement **`Down` migrations**. As the codebase matures, we may introduce this capability, but for now, the `Down` function should remain empty.
|
||||
- Always write **idempotent** migrations. This means that if the migration is run multiple times, it should not cause an error.
|
||||
- A migration which is **dependent on the underlying dialect** (sqlite, postgres, etc) should be written as part of the [SQLDialect](/pkg/sqlstore/sqlstore.go) interface. The implementation needs to go in the dialect specific package of the respective database.
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- Use `BunDBCtx` and `RunInTxCtx` to access the database instance and execute transactions respectively.
|
||||
- While designing new tables, ensure the consistency of `id`, `created_at`, `updated_at` and an `org_id` column with a foreign key constraint to the `organizations` table (unless the table serves as a transitive entity not directly associated with an organization but indirectly associated with one).
|
||||
- Implement deletion logic in the application rather than relying on cascading deletes in the database.
|
||||
- While writing migrations, adhere to the guidelines mentioned above.
|
||||
@@ -24,7 +24,7 @@ func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var values []string
|
||||
var patToken string
|
||||
var pat types.StorablePersonalAccessToken
|
||||
var pat types.StorableAPIKey
|
||||
|
||||
for _, header := range p.headers {
|
||||
values = append(values, r.Header.Get(header))
|
||||
@@ -47,7 +47,7 @@ func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
|
||||
if pat.ExpiresAt.Before(time.Now()) {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
@@ -61,10 +61,10 @@ func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
}
|
||||
|
||||
jwt := authtypes.Claims{
|
||||
UserID: user.ID,
|
||||
GroupID: user.GroupID,
|
||||
Email: user.Email,
|
||||
OrgID: user.OrgID,
|
||||
UserID: user.ID.String(),
|
||||
Role: pat.Role,
|
||||
Email: user.Email,
|
||||
OrgID: user.OrgID,
|
||||
}
|
||||
|
||||
ctx = authtypes.NewContextWithClaims(ctx, jwt)
|
||||
@@ -73,7 +73,7 @@ func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
|
||||
pat.LastUsed = time.Now().Unix()
|
||||
pat.LastUsed = time.Now()
|
||||
_, err = p.store.BunDB().NewUpdate().Model(&pat).Column("last_used").Where("token = ?", patToken).Where("revoked = false").Exec(r.Context())
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
|
||||
|
||||
405
ee/modules/user/impluser/handler.go
Normal file
@@ -0,0 +1,405 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
// EnterpriseHandler embeds the base handler implementation
|
||||
type Handler struct {
|
||||
user.Handler // Embed the base handler interface
|
||||
module user.Module
|
||||
}
|
||||
|
||||
func NewHandler(module user.Module) user.Handler {
|
||||
baseHandler := impluser.NewHandler(module)
|
||||
return &Handler{
|
||||
Handler: baseHandler,
|
||||
module: module,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) Login(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
var req types.PostableLoginRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
if req.RefreshToken == "" {
|
||||
// the EE handler wrapper passes the feature flag value in context
|
||||
ssoAvailable, ok := ctx.Value(types.SSOAvailable).(bool)
|
||||
if !ok {
|
||||
render.Error(w, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to retrieve SSO availability"))
|
||||
return
|
||||
}
|
||||
|
||||
if ssoAvailable {
|
||||
_, err := h.module.CanUsePassword(ctx, req.Email)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
user, err := h.module.GetAuthenticatedUser(ctx, req.OrgID, req.Email, req.Password, req.RefreshToken)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
jwt, err := h.module.GetJWTForUser(ctx, user)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
gettableLoginResponse := &types.GettableLoginResponse{
|
||||
GettableUserJwt: jwt,
|
||||
UserID: user.ID.String(),
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, gettableLoginResponse)
|
||||
}
|
||||
|
||||
// Override only the methods you need with enterprise-specific implementations
|
||||
func (h *Handler) LoginPrecheck(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// assume user is valid unless proven otherwise and assign default values for rest of the fields
|
||||
|
||||
email := r.URL.Query().Get("email")
|
||||
sourceUrl := r.URL.Query().Get("ref")
|
||||
orgID := r.URL.Query().Get("orgID")
|
||||
|
||||
resp, err := h.module.LoginPrecheck(ctx, orgID, email, sourceUrl)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, resp)
|
||||
|
||||
}
|
||||
|
||||
func (h *Handler) AcceptInvite(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
req := new(types.PostableAcceptInvite)
|
||||
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
|
||||
render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode user"))
|
||||
return
|
||||
}
|
||||
|
||||
// get invite object
|
||||
invite, err := h.module.GetInviteByToken(ctx, req.InviteToken)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgDomain, err := h.module.GetAuthDomainByEmail(ctx, invite.Email)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
precheckResp := &types.GettableLoginPrecheck{
|
||||
SSO: false,
|
||||
IsUser: false,
|
||||
}
|
||||
|
||||
if invite.Name == "" && req.DisplayName != "" {
|
||||
invite.Name = req.DisplayName
|
||||
}
|
||||
|
||||
user, err := types.NewUser(invite.Name, invite.Email, invite.Role, invite.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
if orgDomain != nil && orgDomain.SsoEnabled {
|
||||
// sso is enabled, create user and respond precheck data
|
||||
err = h.module.CreateUser(ctx, user)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// check if sso is enforced for the org
|
||||
precheckResp, err = h.module.LoginPrecheck(ctx, invite.OrgID, user.Email, req.SourceURL)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
} else {
|
||||
password, err := types.NewFactorPassword(req.Password)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
user, err = h.module.CreateUserWithPassword(ctx, user, password)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
precheckResp.IsUser = true
|
||||
}
|
||||
|
||||
// delete the invite
|
||||
if err := h.module.DeleteInvite(ctx, invite.OrgID, invite.ID); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, precheckResp)
|
||||
}
|
||||
|
||||
func (h *Handler) GetInvite(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
token := mux.Vars(r)["token"]
|
||||
sourceUrl := r.URL.Query().Get("ref")
|
||||
invite, err := h.module.GetInviteByToken(ctx, token)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// precheck the user
|
||||
precheckResp, err := h.module.LoginPrecheck(ctx, invite.OrgID, invite.Email, sourceUrl)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
gettableInvite := &types.GettableEEInvite{
|
||||
GettableInvite: *invite,
|
||||
PreCheck: precheckResp,
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, gettableInvite)
|
||||
return
|
||||
}
|
||||
|
||||
func (h *Handler) CreateAPIKey(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := valuer.NewUUID(claims.UserID)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
req := new(types.PostableAPIKey)
|
||||
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
|
||||
render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode api key"))
|
||||
return
|
||||
}
|
||||
|
||||
apiKey, err := types.NewStorableAPIKey(
|
||||
req.Name,
|
||||
userID,
|
||||
req.Role,
|
||||
req.ExpiresInDays,
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = h.module.CreateAPIKey(ctx, apiKey)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// just corrected the status code, response is same,
|
||||
render.Success(w, http.StatusCreated, apiKey)
|
||||
}
|
||||
|
||||
func (h *Handler) ListAPIKeys(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
apiKeys, err := h.module.ListAPIKeys(ctx, orgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// for backward compatibility
|
||||
if len(apiKeys) == 0 {
|
||||
render.Success(w, http.StatusOK, []types.GettableAPIKey{})
|
||||
return
|
||||
}
|
||||
|
||||
result := make([]*types.GettableAPIKey, len(apiKeys))
|
||||
for i, apiKey := range apiKeys {
|
||||
result[i] = types.NewGettableAPIKeyFromStorableAPIKey(apiKey)
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, result)
|
||||
|
||||
}
|
||||
|
||||
func (h *Handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := valuer.NewUUID(claims.UserID)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
req := types.StorableAPIKey{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode api key"))
|
||||
return
|
||||
}
|
||||
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
//get the API Key
|
||||
existingAPIKey, err := h.module.GetAPIKey(ctx, orgID, id)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, err := h.module.GetUserByID(ctx, orgID.String(), existingAPIKey.UserID.String())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
|
||||
return
|
||||
}
|
||||
|
||||
err = h.module.UpdateAPIKey(ctx, id, &req, userID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (h *Handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := valuer.NewUUID(claims.UserID)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
//get the API Key
|
||||
existingAPIKey, err := h.module.GetAPIKey(ctx, orgID, id)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, err := h.module.GetUserByID(ctx, orgID.String(), existingAPIKey.UserID.String())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.module.RevokeAPIKey(ctx, id, userID); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusNoContent, nil)
|
||||
}
|
||||
250
ee/modules/user/impluser/module.go
Normal file
@@ -0,0 +1,250 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
baseimpl "github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// EnterpriseModule embeds the base module implementation
|
||||
type Module struct {
|
||||
user.Module // Embed the base module implementation
|
||||
store types.UserStore
|
||||
}
|
||||
|
||||
func NewModule(store types.UserStore) user.Module {
|
||||
baseModule := baseimpl.NewModule(store)
|
||||
return &Module{
|
||||
Module: baseModule,
|
||||
store: store,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Module) createUserForSAMLRequest(ctx context.Context, email string) (*types.User, error) {
|
||||
// get auth domain from email domain
|
||||
_, err := m.GetAuthDomainByEmail(ctx, email)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// get name from email
|
||||
parts := strings.Split(email, "@")
|
||||
if len(parts) < 2 {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format")
|
||||
}
|
||||
name := parts[0]
|
||||
|
||||
defaultOrgID, err := m.store.GetDefaultOrgID(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
user, err := types.NewUser(name, email, types.RoleViewer.String(), defaultOrgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = m.CreateUser(ctx, user)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (m *Module) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (string, error) {
|
||||
users, err := m.GetUsersByEmail(ctx, email)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", err.Error()))
|
||||
return "", err
|
||||
}
|
||||
user := &types.User{}
|
||||
|
||||
if len(users) == 0 {
|
||||
newUser, err := m.createUserForSAMLRequest(ctx, email)
|
||||
user = newUser
|
||||
if err != nil {
|
||||
zap.L().Error("failed to create user with email received from auth provider", zap.Error(err))
|
||||
return "", err
|
||||
}
|
||||
} else {
|
||||
user = &users[0].User
|
||||
}
|
||||
|
||||
tokenStore, err := m.GetJWTForUser(ctx, user)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
|
||||
redirectUri,
|
||||
tokenStore.AccessJwt,
|
||||
user.ID,
|
||||
tokenStore.RefreshJwt), nil
|
||||
}
|
||||
|
||||
func (m *Module) CanUsePassword(ctx context.Context, email string) (bool, error) {
|
||||
domain, err := m.GetAuthDomainByEmail(ctx, email)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if domain != nil && domain.SsoEnabled {
|
||||
// sso is enabled, check if the user has admin role
|
||||
users, err := m.GetUsersByEmail(ctx, email)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if len(users) == 0 {
|
||||
return false, errors.New(errors.TypeNotFound, errors.CodeNotFound, "user not found")
|
||||
}
|
||||
|
||||
if users[0].Role != types.RoleAdmin.String() {
|
||||
return false, errors.New(errors.TypeForbidden, errors.CodeForbidden, "auth method not supported")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (m *Module) LoginPrecheck(ctx context.Context, orgID, email, sourceUrl string) (*types.GettableLoginPrecheck, error) {
|
||||
resp := &types.GettableLoginPrecheck{IsUser: true, CanSelfRegister: false}
|
||||
|
||||
// check if email is a valid user
|
||||
users, err := m.GetUsersByEmail(ctx, email)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(users) == 0 {
|
||||
resp.IsUser = false
|
||||
}
|
||||
|
||||
// give them an option to select an org
|
||||
if orgID == "" && len(users) > 1 {
|
||||
resp.SelectOrg = true
|
||||
resp.Orgs = make([]string, len(users))
|
||||
for i, user := range users {
|
||||
resp.Orgs[i] = user.OrgID
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// select the user with the corresponding orgID
|
||||
if len(users) > 1 {
|
||||
found := false
|
||||
for _, tuser := range users {
|
||||
if tuser.OrgID == orgID {
|
||||
// user = tuser
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
resp.IsUser = false
|
||||
return resp, nil
|
||||
}
|
||||
}
|
||||
|
||||
// the EE handler wrapper passes the feature flag value in context
|
||||
ssoAvailable, ok := ctx.Value(types.SSOAvailable).(bool)
|
||||
if !ok {
|
||||
zap.L().Error("failed to retrieve ssoAvailable from context")
|
||||
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to retrieve SSO availability")
|
||||
}
|
||||
|
||||
if ssoAvailable {
|
||||
|
||||
// TODO(Nitya): in multitenancy this should use orgId as well.
|
||||
orgDomain, err := m.GetAuthDomainByEmail(ctx, email)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if orgDomain != nil && orgDomain.SsoEnabled {
|
||||
// this is to allow self registration
|
||||
resp.IsUser = true
|
||||
|
||||
// saml is enabled for this domain, lets prepare sso url
|
||||
if sourceUrl == "" {
|
||||
sourceUrl = constants.GetDefaultSiteURL()
|
||||
}
|
||||
|
||||
// parse source url that generated the login request
|
||||
var err error
|
||||
escapedUrl, _ := url.QueryUnescape(sourceUrl)
|
||||
siteUrl, err := url.Parse(escapedUrl)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse referer")
|
||||
}
|
||||
|
||||
// build Idp URL that will authenticat the user
|
||||
// the front-end will redirect user to this url
|
||||
resp.SSOUrl, err = orgDomain.BuildSsoUrl(siteUrl)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
|
||||
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to prepare saml request for domain")
|
||||
}
|
||||
|
||||
// set SSO to true, as the url is generated correctly
|
||||
resp.SSO = true
|
||||
}
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (m *Module) GetAuthDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, error) {
|
||||
|
||||
if email == "" {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required")
|
||||
}
|
||||
|
||||
components := strings.Split(email, "@")
|
||||
if len(components) < 2 {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format")
|
||||
}
|
||||
|
||||
domain, err := m.store.GetDomainByName(ctx, components[1])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gettableDomain := &types.GettableOrgDomain{StorableOrgDomain: *domain}
|
||||
if err := gettableDomain.LoadConfig(domain.Data); err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to load domain config")
|
||||
}
|
||||
return gettableDomain, nil
|
||||
}
|
||||
|
||||
func (m *Module) CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error {
|
||||
return m.store.CreateAPIKey(ctx, apiKey)
|
||||
}
|
||||
|
||||
func (m *Module) UpdateAPIKey(ctx context.Context, id valuer.UUID, apiKey *types.StorableAPIKey, updaterID valuer.UUID) error {
|
||||
return m.store.UpdateAPIKey(ctx, id, apiKey, updaterID)
|
||||
}
|
||||
|
||||
func (m *Module) ListAPIKeys(ctx context.Context, orgID valuer.UUID) ([]*types.StorableAPIKeyUser, error) {
|
||||
return m.store.ListAPIKeys(ctx, orgID)
|
||||
}
|
||||
|
||||
func (m *Module) GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*types.StorableAPIKeyUser, error) {
|
||||
return m.store.GetAPIKey(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UUID) error {
|
||||
return m.store.RevokeAPIKey(ctx, id, removedByUserID)
|
||||
}
|
||||
37
ee/modules/user/impluser/store.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
baseimpl "github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
*baseimpl.Store
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewStore(sqlstore sqlstore.SQLStore) types.UserStore {
|
||||
baseStore := baseimpl.NewStore(sqlstore).(*baseimpl.Store)
|
||||
return &store{
|
||||
Store: baseStore,
|
||||
sqlstore: sqlstore,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *store) GetDomainByName(ctx context.Context, name string) (*types.StorableOrgDomain, error) {
|
||||
domain := new(types.StorableOrgDomain)
|
||||
err := s.sqlstore.BunDB().NewSelect().
|
||||
Model(domain).
|
||||
Where("name = ?", name).
|
||||
Limit(1).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeNotFound, errors.CodeNotFound, "failed to get domain from name")
|
||||
}
|
||||
return domain, nil
|
||||
}
|
||||
@@ -35,6 +35,8 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.hash={{ .ShortCommit }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.time={{ .CommitTimestamp }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
- >-
|
||||
|
||||
@@ -18,4 +18,4 @@ COPY frontend/build/ /etc/signoz/web/
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
36
ee/query-service/Dockerfile.integration
Normal file
@@ -0,0 +1,36 @@
|
||||
FROM golang:1.23-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
ARG ZEUSURL
|
||||
|
||||
# This path is important for stacktraces
|
||||
WORKDIR $GOPATH/src/github.com/signoz/signoz
|
||||
WORKDIR /root
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
g++ \
|
||||
gcc \
|
||||
libc6-dev \
|
||||
make \
|
||||
pkg-config \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
RUN go mod download
|
||||
|
||||
COPY ./ee/ ./ee/
|
||||
COPY ./pkg/ ./pkg/
|
||||
COPY ./templates/email /root/templates
|
||||
|
||||
COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz"]
|
||||
22
ee/query-service/Dockerfile.multi-arch
Normal file
@@ -0,0 +1,22 @@
|
||||
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
|
||||
|
||||
FROM alpine@sha256:${ALPINE_SHA}
|
||||
LABEL maintainer="signoz"
|
||||
WORKDIR /root
|
||||
|
||||
ARG OS="linux"
|
||||
ARG ARCH
|
||||
|
||||
RUN apk update && \
|
||||
apk add ca-certificates && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
|
||||
COPY ./conf/prometheus.yml /root/config/prometheus.yml
|
||||
COPY ./templates/email /root/templates
|
||||
COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type DailyProvider struct {
|
||||
@@ -28,17 +29,16 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
|
||||
}
|
||||
|
||||
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
FeatureLookup: dp.ff,
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
})
|
||||
|
||||
return dp
|
||||
}
|
||||
|
||||
func (p *DailyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *DailyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
req.Seasonality = SeasonalityDaily
|
||||
return p.getAnomalies(ctx, req)
|
||||
return p.getAnomalies(ctx, orgID, req)
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type HourlyProvider struct {
|
||||
@@ -28,17 +29,16 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
|
||||
}
|
||||
|
||||
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
FeatureLookup: hp.ff,
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
})
|
||||
|
||||
return hp
|
||||
}
|
||||
|
||||
func (p *HourlyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *HourlyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
req.Seasonality = SeasonalityHourly
|
||||
return p.getAnomalies(ctx, req)
|
||||
return p.getAnomalies(ctx, orgID, req)
|
||||
}
|
||||
|
||||
@@ -2,8 +2,10 @@ package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Provider interface {
|
||||
GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
|
||||
GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
|
||||
}
|
||||
|
||||
@@ -5,11 +5,12 @@ import (
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -38,12 +39,6 @@ func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericPr
|
||||
}
|
||||
}
|
||||
|
||||
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().ff = ff
|
||||
}
|
||||
}
|
||||
|
||||
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().reader = reader
|
||||
@@ -56,7 +51,6 @@ type BaseSeasonalProvider struct {
|
||||
fluxInterval time.Duration
|
||||
cache cache.Cache
|
||||
keyGenerator cache.KeyGenerator
|
||||
ff interfaces.FeatureLookup
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
||||
@@ -66,9 +60,9 @@ func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomaly
|
||||
return prepareAnomalyQueryParams(req.Params, req.Seasonality)
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQueryParams) (*anomalyQueryResults, error) {
|
||||
func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID, params *anomalyQueryParams) (*anomalyQueryResults, error) {
|
||||
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery)
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -79,7 +73,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery)
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -90,7 +84,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery)
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -101,7 +95,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery)
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -112,7 +106,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
|
||||
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past2SeasonQuery)
|
||||
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -123,7 +117,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
|
||||
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past3SeasonQuery)
|
||||
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -342,9 +336,9 @@ func (p *BaseSeasonalProvider) getAnomalyScores(
|
||||
return anomalyScoreSeries
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
anomalyParams := p.getQueryParams(req)
|
||||
anomalyQueryResults, err := p.getResults(ctx, anomalyParams)
|
||||
anomalyQueryResults, err := p.getResults(ctx, orgID, anomalyParams)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type WeeklyProvider struct {
|
||||
@@ -27,17 +28,16 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
|
||||
}
|
||||
|
||||
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
FeatureLookup: wp.ff,
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
})
|
||||
|
||||
return wp
|
||||
}
|
||||
|
||||
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
req.Seasonality = SeasonalityWeekly
|
||||
return p.getAnomalies(ctx, req)
|
||||
return p.getAnomalies(ctx, orgID, req)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"time"
|
||||
@@ -9,25 +10,32 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type APIHandlerOptions struct {
|
||||
DataConnector interfaces.DataConnector
|
||||
SkipConfig *basemodel.SkipConfig
|
||||
PreferSpanMetrics bool
|
||||
AppDao dao.ModelDao
|
||||
RulesManager *rules.Manager
|
||||
@@ -37,7 +45,6 @@ type APIHandlerOptions struct {
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
Cache cache.Cache
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
@@ -54,23 +61,22 @@ type APIHandler struct {
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
|
||||
quickfiltermodule := quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(signoz.SQLStore))
|
||||
quickFilter := quickfilter.NewAPI(quickfiltermodule)
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
SkipConfig: opts.SkipConfig,
|
||||
PreferSpanMetrics: opts.PreferSpanMetrics,
|
||||
AppDao: opts.AppDao,
|
||||
RuleManager: opts.RulesManager,
|
||||
FeatureFlags: opts.FeatureFlags,
|
||||
IntegrationsController: opts.IntegrationsController,
|
||||
CloudIntegrationsController: opts.CloudIntegrationsController,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
Cache: opts.Cache,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuickFilters: quickFilter,
|
||||
QuickFilterModule: quickfiltermodule,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -114,56 +120,36 @@ func (ah *APIHandler) CheckFeature(f string) bool {
|
||||
}
|
||||
|
||||
// RegisterRoutes registers routes for this handler on the given router
|
||||
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddleware) {
|
||||
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// note: add ee override methods first
|
||||
|
||||
// routes available only in ee version
|
||||
|
||||
router.HandleFunc("/api/v1/featureFlags",
|
||||
am.OpenAccess(ah.getFeatureFlags)).
|
||||
Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/featureFlags", am.OpenAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/loginPrecheck", am.OpenAccess(ah.loginPrecheck)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/loginPrecheck",
|
||||
am.OpenAccess(ah.precheckLogin)).
|
||||
Methods(http.MethodGet)
|
||||
// invite
|
||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/invite/accept", am.OpenAccess(ah.acceptInvite)).Methods(http.MethodPost)
|
||||
|
||||
// paid plans specific routes
|
||||
router.HandleFunc("/api/v1/complete/saml",
|
||||
am.OpenAccess(ah.receiveSAML)).
|
||||
Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/complete/google", am.OpenAccess(ah.receiveGoogleAuth)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/orgs/{orgId}/domains", am.AdminAccess(ah.listDomainsByOrg)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/complete/google",
|
||||
am.OpenAccess(ah.receiveGoogleAuth)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/orgs/{orgId}/domains",
|
||||
am.AdminAccess(ah.listDomainsByOrg)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/domains",
|
||||
am.AdminAccess(ah.postDomain)).
|
||||
Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v1/domains/{id}",
|
||||
am.AdminAccess(ah.putDomain)).
|
||||
Methods(http.MethodPut)
|
||||
|
||||
router.HandleFunc("/api/v1/domains/{id}",
|
||||
am.AdminAccess(ah.deleteDomain)).
|
||||
Methods(http.MethodDelete)
|
||||
router.HandleFunc("/api/v1/domains", am.AdminAccess(ah.postDomain)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(ah.putDomain)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(ah.deleteDomain)).Methods(http.MethodDelete)
|
||||
|
||||
// base overrides
|
||||
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
|
||||
|
||||
// PAT APIs
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.getPATs)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.updatePAT)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.revokePAT)).Methods(http.MethodDelete)
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.Signoz.Handlers.User.CreateAPIKey)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.Signoz.Handlers.User.ListAPIKeys)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.Signoz.Handlers.User.UpdateAPIKey)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.Signoz.Handlers.User.RevokeAPIKey)).Methods(http.MethodDelete)
|
||||
|
||||
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
||||
@@ -188,7 +174,55 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
|
||||
}
|
||||
|
||||
func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *baseapp.AuthMiddleware) {
|
||||
// TODO(nitya): remove this once we know how to get the FF's
|
||||
func (ah *APIHandler) updateRequestContext(w http.ResponseWriter, r *http.Request) (*http.Request, error) {
|
||||
ssoAvailable := true
|
||||
err := ah.FF().CheckFeature(model.SSO)
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case basemodel.ErrFeatureUnavailable:
|
||||
// do nothing, just skip sso
|
||||
ssoAvailable = false
|
||||
default:
|
||||
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
|
||||
return r, errors.New(errors.TypeInternal, errors.CodeInternal, "error checking SSO feature")
|
||||
}
|
||||
}
|
||||
ctx := context.WithValue(r.Context(), types.SSOAvailable, ssoAvailable)
|
||||
return r.WithContext(ctx), nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) loginPrecheck(w http.ResponseWriter, r *http.Request) {
|
||||
r, err := ah.updateRequestContext(w, r)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
ah.Signoz.Handlers.User.LoginPrecheck(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
func (ah *APIHandler) acceptInvite(w http.ResponseWriter, r *http.Request) {
|
||||
r, err := ah.updateRequestContext(w, r)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
ah.Signoz.Handlers.User.AcceptInvite(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
|
||||
r, err := ah.updateRequestContext(w, r)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
ah.Signoz.Handlers.User.GetInvite(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
|
||||
ah.APIHandler.RegisterCloudIntegrationsRoutes(router, am)
|
||||
|
||||
|
||||
@@ -9,13 +9,11 @@ import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
)
|
||||
|
||||
func parseRequest(r *http.Request, req interface{}) error {
|
||||
@@ -31,162 +29,13 @@ func parseRequest(r *http.Request, req interface{}) error {
|
||||
|
||||
// loginUser overrides base handler and considers SSO case.
|
||||
func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
req := basemodel.LoginRequest{}
|
||||
err := parseRequest(r, &req)
|
||||
r, err := ah.updateRequestContext(w, r)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if req.Email != "" && ah.CheckFeature(model.SSO) {
|
||||
var apierr basemodel.BaseApiError
|
||||
_, apierr = ah.AppDao().CanUsePassword(ctx, req.Email)
|
||||
if apierr != nil && !apierr.IsNil() {
|
||||
RespondError(w, apierr, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// if all looks good, call auth
|
||||
resp, err := baseauth.Login(ctx, &req, ah.opts.JWT)
|
||||
if ah.HandleError(w, err, http.StatusUnauthorized) {
|
||||
return
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, resp)
|
||||
}
|
||||
|
||||
// registerUser registers a user and responds with a precheck
|
||||
// so the front-end can decide the login method
|
||||
func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
ah.APIHandler.Register(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
var req *baseauth.RegisterRequest
|
||||
|
||||
defer r.Body.Close()
|
||||
requestBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
zap.L().Error("received no input in api", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
err = json.Unmarshal(requestBody, &req)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("received invalid user registration request", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// get invite object
|
||||
invite, err := baseauth.ValidateInvite(ctx, req)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to validate invite token", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if invite == nil {
|
||||
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// get auth domain from email domain
|
||||
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
|
||||
}
|
||||
|
||||
precheckResp := &basemodel.PrecheckResponse{
|
||||
SSO: false,
|
||||
IsUser: false,
|
||||
}
|
||||
|
||||
if domain != nil && domain.SsoEnabled {
|
||||
// sso is enabled, create user and respond precheck data
|
||||
user, apierr := baseauth.RegisterInvitedUser(ctx, req, true)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var precheckError basemodel.BaseApiError
|
||||
|
||||
precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl)
|
||||
if precheckError != nil {
|
||||
RespondError(w, precheckError, precheckResp)
|
||||
}
|
||||
|
||||
} else {
|
||||
// no-sso, validate password
|
||||
if err := baseauth.ValidatePassword(req.Password); err != nil {
|
||||
RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager)
|
||||
if !registerError.IsNil() {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
precheckResp.IsUser = true
|
||||
}
|
||||
|
||||
ah.Respond(w, precheckResp)
|
||||
}
|
||||
|
||||
// getInvite returns the invite object details for the given invite token. We do not need to
|
||||
// protect this API because invite token itself is meant to be private.
|
||||
func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
|
||||
token := mux.Vars(r)["token"]
|
||||
sourceUrl := r.URL.Query().Get("ref")
|
||||
ctx := context.Background()
|
||||
|
||||
inviteObject, err := baseauth.GetInvite(context.Background(), token)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
resp := model.GettableInvitation{
|
||||
InvitationResponseObject: inviteObject,
|
||||
}
|
||||
|
||||
precheck, apierr := ah.AppDao().PrecheckLogin(ctx, inviteObject.Email, sourceUrl)
|
||||
resp.Precheck = precheck
|
||||
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, resp)
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, resp)
|
||||
}
|
||||
|
||||
// PrecheckLogin enables browser login page to display appropriate
|
||||
// login methods
|
||||
func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
email := r.URL.Query().Get("email")
|
||||
sourceUrl := r.URL.Query().Get("ref")
|
||||
|
||||
resp, apierr := ah.AppDao().PrecheckLogin(ctx, email, sourceUrl)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, resp)
|
||||
}
|
||||
|
||||
ah.Respond(w, resp)
|
||||
ah.Signoz.Handlers.User.Login(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
|
||||
@@ -253,7 +102,7 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email, ah.opts.JWT)
|
||||
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, identity.Email, ah.opts.JWT)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
@@ -331,7 +180,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
|
||||
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
|
||||
@@ -11,12 +11,12 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
@@ -30,6 +30,12 @@ type CloudIntegrationConnectionParamsResponse struct {
|
||||
}
|
||||
|
||||
func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
if cloudProvider != "aws" {
|
||||
RespondError(w, basemodel.BadRequest(fmt.Errorf(
|
||||
@@ -38,15 +44,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
currentUser, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, basemodel.UnauthorizedError(fmt.Errorf(
|
||||
"couldn't deduce current user: %w", err,
|
||||
)), nil)
|
||||
return
|
||||
}
|
||||
|
||||
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), currentUser.OrgID, cloudProvider)
|
||||
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't provision PAT for cloud integration:",
|
||||
@@ -118,7 +116,14 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
return "", apiErr
|
||||
}
|
||||
|
||||
allPats, err := ah.AppDao().ListPATs(ctx, orgId)
|
||||
orgIdUUID, err := valuer.NewUUID(orgId)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't parse orgId: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't list PATs: %w", err,
|
||||
@@ -135,36 +140,36 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newPAT := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Token: generatePATToken(),
|
||||
UserID: integrationUser.ID,
|
||||
Name: integrationPATName,
|
||||
Role: baseconstants.ViewerGroup,
|
||||
ExpiresAt: 0,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
},
|
||||
}
|
||||
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
|
||||
newPAT, err := types.NewStorableAPIKey(
|
||||
integrationPATName,
|
||||
integrationUser.ID,
|
||||
types.RoleViewer,
|
||||
0,
|
||||
)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration PAT: %w", err,
|
||||
))
|
||||
}
|
||||
return integrationPAT.Token, nil
|
||||
|
||||
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration PAT: %w", err,
|
||||
))
|
||||
}
|
||||
return newPAT.Token, nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) (*types.User, *basemodel.ApiError) {
|
||||
cloudIntegrationUserId := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
|
||||
|
||||
integrationUserResult, apiErr := ah.AppDao().GetUser(ctx, cloudIntegrationUserId)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
|
||||
integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||
}
|
||||
|
||||
if integrationUserResult != nil {
|
||||
@@ -176,33 +181,18 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newUser := &types.User{
|
||||
ID: cloudIntegrationUserId,
|
||||
Name: fmt.Sprintf("%s integration", cloudProvider),
|
||||
Email: fmt.Sprintf("%s@signoz.io", cloudIntegrationUserId),
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
OrgID: orgId,
|
||||
}
|
||||
|
||||
viewerGroup, apiErr := dao.DB().GetGroupByName(ctx, baseconstants.ViewerGroup)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't get viewer group for creating integration user")
|
||||
}
|
||||
newUser.GroupID = viewerGroup.ID
|
||||
|
||||
passwordHash, err := auth.PasswordHash(uuid.NewString())
|
||||
newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't hash random password for cloud integration user: %w", err,
|
||||
"couldn't create cloud integration user: %w", err,
|
||||
))
|
||||
}
|
||||
newUser.Password = passwordHash
|
||||
|
||||
integrationUser, apiErr := ah.AppDao().CreateUser(ctx, newUser, false)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't create cloud integration user")
|
||||
password, err := types.NewFactorPassword(uuid.NewString())
|
||||
|
||||
integrationUser, err := ah.Signoz.Modules.User.CreateUserWithPassword(ctx, newUser, password)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
||||
}
|
||||
|
||||
return integrationUser, nil
|
||||
|
||||
@@ -6,8 +6,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -36,26 +34,27 @@ func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request
|
||||
return
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
dashboard, err := dashboards.GetDashboard(r.Context(), claims.OrgID, uuid)
|
||||
|
||||
dashboard, err := ah.Signoz.Modules.Dashboard.Get(r.Context(), claims.OrgID, uuid)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get dashboard"))
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
if !auth.IsAdminV2(claims) && (dashboard.CreatedBy != claims.Email) {
|
||||
if err := claims.IsAdmin(); err != nil && (dashboard.CreatedBy != claims.Email) {
|
||||
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "You are not authorized to lock/unlock this dashboard"))
|
||||
return
|
||||
}
|
||||
|
||||
// Lock/Unlock the dashboard
|
||||
err = dashboards.LockUnlockDashboard(r.Context(), claims.OrgID, uuid, lock)
|
||||
err = ah.Signoz.Modules.Dashboard.LockUnlock(r.Context(), claims.OrgID, uuid, lock)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to lock/unlock dashboard"))
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
@@ -9,6 +9,8 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
)
|
||||
|
||||
type DayWiseBreakdown struct {
|
||||
@@ -90,8 +92,13 @@ func (ah *APIHandler) getActiveLicenseV3(w http.ResponseWriter, r *http.Request)
|
||||
|
||||
// this function is called by zeus when inserting licenses in the query-service
|
||||
func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
var licenseKey ApplyLicenseRequest
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
var licenseKey ApplyLicenseRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&licenseKey); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
@@ -102,9 +109,10 @@ func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
_, apiError := ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey)
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
_, err = ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey)
|
||||
if err != nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED, map[string]interface{}{"err": err.Error()}, claims.Email, true, false)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -112,10 +120,9 @@ func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (ah *APIHandler) refreshLicensesV3(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
apiError := ah.LM().RefreshLicense(r.Context())
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
err := ah.LM().RefreshLicense(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -127,7 +134,6 @@ func getCheckoutPortalResponse(redirectURL string) *Redirect {
|
||||
}
|
||||
|
||||
func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
checkoutRequest := &model.CheckoutRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(checkoutRequest); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
@@ -140,9 +146,9 @@ func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
redirectUrl, err := signozio.CheckoutSession(r.Context(), checkoutRequest, license.Key)
|
||||
redirectUrl, err := signozio.CheckoutSession(r.Context(), checkoutRequest, license.Key, ah.Signoz.Zeus)
|
||||
if err != nil {
|
||||
RespondError(w, err, nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -230,7 +236,6 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
portalRequest := &model.PortalRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(portalRequest); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
@@ -243,9 +248,9 @@ func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
redirectUrl, err := signozio.PortalSession(r.Context(), portalRequest, license.Key)
|
||||
redirectUrl, err := signozio.PortalSession(r.Context(), portalRequest, license.Key, ah.Signoz.Zeus)
|
||||
if err != nil {
|
||||
RespondError(w, err, nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func generatePATToken() string {
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
return encodedToken
|
||||
}
|
||||
|
||||
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
req := model.CreatePATRequestBody{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
Typ: model.ErrorUnauthorized,
|
||||
Err: err,
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
pat := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Name: req.Name,
|
||||
Role: req.Role,
|
||||
ExpiresAt: req.ExpiresInDays,
|
||||
},
|
||||
}
|
||||
err = validatePATRequest(pat)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// All the PATs are associated with the user creating the PAT.
|
||||
pat.UserID = user.ID
|
||||
pat.CreatedAt = time.Now()
|
||||
pat.UpdatedAt = time.Now()
|
||||
pat.LastUsed = 0
|
||||
pat.Token = generatePATToken()
|
||||
|
||||
if pat.ExpiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
|
||||
var apierr basemodel.BaseApiError
|
||||
if pat, apierr = ah.AppDao().CreatePAT(ctx, user.OrgID, pat); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, &pat)
|
||||
}
|
||||
|
||||
func validatePATRequest(req model.PAT) error {
|
||||
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
|
||||
return fmt.Errorf("valid role is required")
|
||||
}
|
||||
if req.ExpiresAt < 0 {
|
||||
return fmt.Errorf("valid expiresAt is required")
|
||||
}
|
||||
if req.Name == "" {
|
||||
return fmt.Errorf("valid name is required")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
req := model.PAT{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
Typ: model.ErrorUnauthorized,
|
||||
Err: err,
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
err = validatePATRequest(req)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
req.UpdatedByUserID = user.ID
|
||||
id := mux.Vars(r)["id"]
|
||||
req.UpdatedAt = time.Now()
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
if apierr = ah.AppDao().UpdatePAT(ctx, user.OrgID, req, id); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, map[string]string{"data": "pat updated successfully"})
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
Typ: model.ErrorUnauthorized,
|
||||
Err: err,
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
zap.L().Info("Get PATs for user", zap.String("user_id", user.ID))
|
||||
pats, apierr := ah.AppDao().ListPATs(ctx, user.OrgID)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
ah.Respond(w, pats)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
id := mux.Vars(r)["id"]
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
Typ: model.ErrorUnauthorized,
|
||||
Err: err,
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id))
|
||||
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
}
|
||||
ah.Respond(w, map[string]string{"data": "pat revoked successfully"})
|
||||
}
|
||||
@@ -7,14 +7,27 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
bodyBytes, _ := io.ReadAll(r.Body)
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
@@ -29,7 +42,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
queryRangeParams.Version = "v4"
|
||||
|
||||
// add temporality for each metric
|
||||
temporalityErr := aH.PopulateTemporality(r.Context(), queryRangeParams)
|
||||
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
|
||||
if temporalityErr != nil {
|
||||
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
|
||||
@@ -85,34 +98,30 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
switch seasonality {
|
||||
case anomaly.SeasonalityWeekly:
|
||||
provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityDaily:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityHourly:
|
||||
provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
default:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
}
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), orgID, &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/db"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
|
||||
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
ah.APIHandler.SearchTraces(w, r)
|
||||
return
|
||||
}
|
||||
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
||||
return
|
||||
}
|
||||
|
||||
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
|
||||
if ah.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, result)
|
||||
|
||||
}
|
||||
@@ -5,38 +5,35 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
)
|
||||
|
||||
type ClickhouseReader struct {
|
||||
conn clickhouse.Conn
|
||||
appdb *sqlx.DB
|
||||
appdb sqlstore.SQLStore
|
||||
*basechr.ClickHouseReader
|
||||
}
|
||||
|
||||
func NewDataConnector(
|
||||
localDB *sqlx.DB,
|
||||
ch clickhouse.Conn,
|
||||
promConfigPath string,
|
||||
lm interfaces.FeatureLookup,
|
||||
sqlDB sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
fluxIntervalForTraceDetail time.Duration,
|
||||
cache cache.Cache,
|
||||
) *ClickhouseReader {
|
||||
chReader := basechr.NewReader(localDB, ch, promConfigPath, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, fluxIntervalForTraceDetail, cache)
|
||||
return &ClickhouseReader{
|
||||
conn: ch,
|
||||
appdb: localDB,
|
||||
conn: telemetryStore.ClickhouseDB(),
|
||||
appdb: sqlDB,
|
||||
ClickHouseReader: chReader,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickhouseReader) Start(readerReady chan bool) {
|
||||
r.ClickHouseReader.Start(readerReady)
|
||||
func (r *ClickhouseReader) GetSQLStore() sqlstore.SQLStore {
|
||||
return r.appdb
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
@@ -16,16 +15,16 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/db"
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao/sqlite"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/web"
|
||||
"github.com/rs/cors"
|
||||
@@ -37,45 +36,29 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
|
||||
baseexplorer "github.com/SigNoz/signoz/pkg/query-service/app/explorer"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
pqle "github.com/SigNoz/signoz/pkg/query-service/pqlEngine"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
const AppDbEngine = "sqlite"
|
||||
|
||||
type ServerOptions struct {
|
||||
Config signoz.Config
|
||||
SigNoz *signoz.SigNoz
|
||||
PromConfigPath string
|
||||
SkipTopLvlOpsPath string
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
// alert specific params
|
||||
DisableRules bool
|
||||
RuleRepoURL string
|
||||
Config signoz.Config
|
||||
SigNoz *signoz.SigNoz
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
PreferSpanMetrics bool
|
||||
CacheConfigPath string
|
||||
FluxInterval string
|
||||
FluxIntervalForTraceDetail string
|
||||
Cluster string
|
||||
GatewayUrl string
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
Jwt *authtypes.JWT
|
||||
}
|
||||
|
||||
@@ -107,88 +90,40 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
|
||||
// NewServer creates and initializes Server
|
||||
func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
modelDao, err := dao.InitDao(serverOptions.SigNoz.SQLStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := baseexplorer.InitWithDSN(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := preferences.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
modelDao := sqlite.NewModelDao(serverOptions.SigNoz.SQLStore)
|
||||
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// initiate license manager
|
||||
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore)
|
||||
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.Zeus)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// set license manager as feature flag provider in dao
|
||||
modelDao.SetFlagProvider(lm)
|
||||
readerReady := make(chan bool)
|
||||
|
||||
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var reader interfaces.DataConnector
|
||||
qb := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
serverOptions.SigNoz.TelemetryStore.ClickHouseDB(),
|
||||
serverOptions.PromConfigPath,
|
||||
lm,
|
||||
reader := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
serverOptions.Cluster,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
fluxIntervalForTraceDetail,
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
go qb.Start(readerReady)
|
||||
reader = qb
|
||||
|
||||
skipConfig := &basemodel.SkipConfig{}
|
||||
if serverOptions.SkipTopLvlOpsPath != "" {
|
||||
// read skip config
|
||||
skipConfig, err = basemodel.ReadSkipConfig(serverOptions.SkipTopLvlOpsPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
<-readerReady
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.PromConfigPath,
|
||||
serverOptions.RuleRepoURL,
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
lm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
serverOptions.SigNoz.Cache,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -233,7 +168,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// start the usagemanager
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickHouseDB(), serverOptions.Config.TelemetryStore.ClickHouse.DSN)
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -243,7 +178,13 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
telemetry.GetInstance().SetReader(reader)
|
||||
telemetry.GetInstance().SetSqlStore(serverOptions.SigNoz.SQLStore)
|
||||
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
|
||||
telemetry.GetInstance().SetSavedViewsInfoCallback(telemetry.GetSavedViewsInfo)
|
||||
telemetry.GetInstance().SetAlertsInfoCallback(telemetry.GetAlertsInfo)
|
||||
telemetry.GetInstance().SetGetUsersCallback(telemetry.GetUsers)
|
||||
telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount)
|
||||
telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo)
|
||||
|
||||
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
|
||||
if err != nil {
|
||||
@@ -252,7 +193,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
apiOpts := api.APIHandlerOptions{
|
||||
DataConnector: reader,
|
||||
SkipConfig: skipConfig,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
AppDao: modelDao,
|
||||
RulesManager: rm,
|
||||
@@ -262,12 +202,9 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
IntegrationsController: integrationsController,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
Cache: c,
|
||||
FluxInterval: fluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: serverOptions.GatewayUrl,
|
||||
UseLogsNewSchema: serverOptions.UseLogsNewSchema,
|
||||
UseTraceNewSchema: serverOptions.UseTraceNewSchema,
|
||||
JWT: serverOptions.Jwt,
|
||||
}
|
||||
|
||||
@@ -277,8 +214,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
s := &Server{
|
||||
// logger: logger,
|
||||
// tracer: tracer,
|
||||
ruleManager: rm,
|
||||
serverOptions: serverOptions,
|
||||
unavailableChannel: make(chan healthcheck.Status),
|
||||
@@ -304,9 +239,15 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
&opAmpModel.AllAgents, agentConfMgr,
|
||||
)
|
||||
|
||||
errorList := qb.PreloadMetricsMetadata(context.Background())
|
||||
for _, er := range errorList {
|
||||
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
|
||||
orgs, err := apiHandler.Signoz.Modules.Organization.GetAll(context.Background())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, org := range orgs {
|
||||
errorList := reader.PreloadMetricsMetadata(context.Background(), org.ID)
|
||||
for _, er := range errorList {
|
||||
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
|
||||
}
|
||||
}
|
||||
|
||||
return s, nil
|
||||
@@ -345,24 +286,8 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
|
||||
}
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
|
||||
r := baseapp.NewRouter()
|
||||
|
||||
// add auth middleware
|
||||
getUserFromRequest := func(ctx context.Context) (*types.GettableUser, error) {
|
||||
user, err := auth.GetUserFromReqContext(ctx)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if user.User.OrgID == "" {
|
||||
return nil, basemodel.UnauthorizedError(errors.New("orgId is missing in the claims"))
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
am := baseapp.NewAuthMiddleware(getUserFromRequest)
|
||||
am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger())
|
||||
|
||||
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
|
||||
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
|
||||
@@ -378,6 +303,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
apiHandler.RegisterIntegrationRoutes(r, am)
|
||||
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
|
||||
apiHandler.RegisterFieldsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
@@ -385,6 +311,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
apiHandler.MetricExplorerRoutes(r, am)
|
||||
apiHandler.RegisterTraceFunnelsRoutes(r, am)
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: []string{"*"},
|
||||
@@ -439,14 +366,8 @@ func (s *Server) initListeners() error {
|
||||
}
|
||||
|
||||
// Start listening on http and private http port concurrently
|
||||
func (s *Server) Start() error {
|
||||
|
||||
// initiate rule manager first
|
||||
if !s.serverOptions.DisableRules {
|
||||
s.ruleManager.Start()
|
||||
} else {
|
||||
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
}
|
||||
func (s *Server) Start(ctx context.Context) error {
|
||||
s.ruleManager.Start(ctx)
|
||||
|
||||
err := s.initListeners()
|
||||
if err != nil {
|
||||
@@ -527,7 +448,7 @@ func (s *Server) Stop() error {
|
||||
s.opampServer.Stop()
|
||||
|
||||
if s.ruleManager != nil {
|
||||
s.ruleManager.Stop()
|
||||
s.ruleManager.Stop(context.Background())
|
||||
}
|
||||
|
||||
// stop usage manager
|
||||
@@ -537,39 +458,25 @@ func (s *Server) Stop() error {
|
||||
}
|
||||
|
||||
func makeRulesManager(
|
||||
promConfigPath,
|
||||
ruleRepoURL string,
|
||||
db *sqlx.DB,
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm baseint.FeatureLookup,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
) (*baserules.Manager, error) {
|
||||
// create engine
|
||||
pqle, err := pqle.FromConfigPath(promConfigPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create pql engine : %v", err)
|
||||
}
|
||||
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
PqlEngine: pqle,
|
||||
RepoURL: ruleRepoURL,
|
||||
TelemetryStore: telemetryStore,
|
||||
Prometheus: prometheus,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
FeatureFlags: fm,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
PrepareTestRuleFunc: rules.TestNotification,
|
||||
Alertmanager: alertmanager,
|
||||
SQLStore: sqlstore,
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao/sqlite"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
)
|
||||
|
||||
func InitDao(sqlStore sqlstore.SQLStore) (ModelDao, error) {
|
||||
return sqlite.InitDB(sqlStore)
|
||||
}
|
||||
@@ -4,28 +4,13 @@ import (
|
||||
"context"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type ModelDao interface {
|
||||
basedao.ModelDao
|
||||
|
||||
// SetFlagProvider sets the feature lookup provider
|
||||
SetFlagProvider(flags baseint.FeatureLookup)
|
||||
|
||||
DB() *bun.DB
|
||||
|
||||
// auth methods
|
||||
CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
|
||||
PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError)
|
||||
GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error)
|
||||
|
||||
// org domain (auth domains) CRUD ops
|
||||
@@ -35,12 +20,4 @@ type ModelDao interface {
|
||||
UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError
|
||||
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
|
||||
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
|
||||
CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError)
|
||||
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError
|
||||
}
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*types.User, basemodel.BaseApiError) {
|
||||
// get auth domain from email domain
|
||||
domain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
return nil, model.InternalErrorStr("failed to get domain from email")
|
||||
}
|
||||
if domain == nil {
|
||||
zap.L().Error("email domain does not match any authenticated domain", zap.String("email", email))
|
||||
return nil, model.InternalErrorStr("email domain does not match any authenticated domain")
|
||||
}
|
||||
|
||||
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
|
||||
if err != nil {
|
||||
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
|
||||
return nil, model.InternalErrorStr("failed to generate password hash")
|
||||
}
|
||||
|
||||
group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("GetGroupByName failed", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
user := &types.User{
|
||||
ID: uuid.NewString(),
|
||||
Name: "",
|
||||
Email: email,
|
||||
Password: hash,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
ProfilePictureURL: "", // Currently unused
|
||||
GroupID: group.ID,
|
||||
OrgID: domain.OrgID,
|
||||
}
|
||||
|
||||
user, apiErr = m.CreateUser(ctx, user, false)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("CreateUser failed", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
return user, nil
|
||||
|
||||
}
|
||||
|
||||
// PrepareSsoRedirect prepares redirect page link after SSO response
|
||||
// is successfully parsed (i.e. valid email is available)
|
||||
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError) {
|
||||
|
||||
userPayload, apierr := m.GetUserByEmail(ctx, email)
|
||||
if !apierr.IsNil() {
|
||||
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
|
||||
return "", model.BadRequestStr("invalid user email received from the auth provider")
|
||||
}
|
||||
|
||||
user := &types.User{}
|
||||
|
||||
if userPayload == nil {
|
||||
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
|
||||
user = newUser
|
||||
if apiErr != nil {
|
||||
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
|
||||
return "", apiErr
|
||||
}
|
||||
} else {
|
||||
user = &userPayload.User
|
||||
}
|
||||
|
||||
tokenStore, err := baseauth.GenerateJWTForUser(user, jwt)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
|
||||
return "", model.InternalErrorStr("failed to generate token for the user")
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
|
||||
redirectUri,
|
||||
tokenStore.AccessJwt,
|
||||
user.ID,
|
||||
tokenStore.RefreshJwt), nil
|
||||
}
|
||||
|
||||
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
|
||||
domain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
return false, apierr
|
||||
}
|
||||
|
||||
if domain != nil && domain.SsoEnabled {
|
||||
// sso is enabled, check if the user has admin role
|
||||
userPayload, baseapierr := m.GetUserByEmail(ctx, email)
|
||||
|
||||
if baseapierr != nil || userPayload == nil {
|
||||
return false, baseapierr
|
||||
}
|
||||
|
||||
if userPayload.Role != baseconst.AdminGroup {
|
||||
return false, model.BadRequest(fmt.Errorf("auth method not supported"))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// PrecheckLogin is called when the login or signup page is loaded
|
||||
// to check sso login is to be prompted
|
||||
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, basemodel.BaseApiError) {
|
||||
|
||||
// assume user is valid unless proven otherwise
|
||||
resp := &basemodel.PrecheckResponse{IsUser: true, CanSelfRegister: false}
|
||||
|
||||
// check if email is a valid user
|
||||
userPayload, baseApiErr := m.GetUserByEmail(ctx, email)
|
||||
if baseApiErr != nil {
|
||||
return resp, baseApiErr
|
||||
}
|
||||
|
||||
if userPayload == nil {
|
||||
resp.IsUser = false
|
||||
}
|
||||
|
||||
ssoAvailable := true
|
||||
err := m.checkFeature(model.SSO)
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case basemodel.ErrFeatureUnavailable:
|
||||
// do nothing, just skip sso
|
||||
ssoAvailable = false
|
||||
default:
|
||||
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
|
||||
return resp, model.BadRequestStr(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
if ssoAvailable {
|
||||
|
||||
resp.IsUser = true
|
||||
|
||||
// find domain from email
|
||||
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
var emailDomain string
|
||||
emailComponents := strings.Split(email, "@")
|
||||
if len(emailComponents) > 0 {
|
||||
emailDomain = emailComponents[1]
|
||||
}
|
||||
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
|
||||
return resp, apierr
|
||||
}
|
||||
|
||||
if orgDomain != nil && orgDomain.SsoEnabled {
|
||||
// saml is enabled for this domain, lets prepare sso url
|
||||
|
||||
if sourceUrl == "" {
|
||||
sourceUrl = constants.GetDefaultSiteURL()
|
||||
}
|
||||
|
||||
// parse source url that generated the login request
|
||||
var err error
|
||||
escapedUrl, _ := url.QueryUnescape(sourceUrl)
|
||||
siteUrl, err := url.Parse(escapedUrl)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to parse referer", zap.Error(err))
|
||||
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
|
||||
}
|
||||
|
||||
// build Idp URL that will authenticat the user
|
||||
// the front-end will redirect user to this url
|
||||
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
|
||||
return resp, model.InternalError(err)
|
||||
}
|
||||
|
||||
// set SSO to true, as the url is generated correctly
|
||||
resp.SSO = true
|
||||
}
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
@@ -10,8 +10,8 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
@@ -44,7 +44,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
|
||||
}
|
||||
|
||||
domain, err = m.GetDomain(ctx, domainId)
|
||||
if (err != nil) || domain == nil {
|
||||
if err != nil {
|
||||
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
@@ -54,7 +54,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
|
||||
|
||||
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
|
||||
domain = domainFromDB
|
||||
if (err != nil) || domain == nil {
|
||||
if err != nil {
|
||||
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
@@ -70,7 +70,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
|
||||
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
|
||||
stored := types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
err := m.sqlStore.BunDB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("name = ?", name).
|
||||
Limit(1).
|
||||
@@ -94,7 +94,7 @@ func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*types.Get
|
||||
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError) {
|
||||
|
||||
stored := types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
err := m.sqlStore.BunDB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("id = ?", id).
|
||||
Limit(1).
|
||||
@@ -119,7 +119,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]types.Getta
|
||||
domains := []types.GettableOrgDomain{}
|
||||
|
||||
stored := []types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
err := m.sqlStore.BunDB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("org_id = ?", orgId).
|
||||
Scan(ctx)
|
||||
@@ -167,7 +167,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *types.GettableOrgDo
|
||||
TimeAuditable: ossTypes.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
|
||||
}
|
||||
|
||||
_, err = m.DB().NewInsert().
|
||||
_, err = m.sqlStore.BunDB().NewInsert().
|
||||
Model(&storableDomain).
|
||||
Exec(ctx)
|
||||
|
||||
@@ -201,7 +201,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *types.GettableOrgDo
|
||||
TimeAuditable: ossTypes.TimeAuditable{UpdatedAt: time.Now()},
|
||||
}
|
||||
|
||||
_, err = m.DB().NewUpdate().
|
||||
_, err = m.sqlStore.BunDB().NewUpdate().
|
||||
Model(storableDomain).
|
||||
Column("data", "updated_at").
|
||||
WherePK().
|
||||
@@ -224,7 +224,7 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.Bas
|
||||
}
|
||||
|
||||
storableDomain := &types.StorableOrgDomain{ID: id}
|
||||
_, err := m.DB().NewDelete().
|
||||
_, err := m.sqlStore.BunDB().NewDelete().
|
||||
Model(storableDomain).
|
||||
WherePK().
|
||||
Exec(ctx)
|
||||
@@ -251,7 +251,7 @@ func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*types.G
|
||||
parsedDomain := components[1]
|
||||
|
||||
stored := types.StorableOrgDomain{}
|
||||
err := m.DB().NewSelect().
|
||||
err := m.sqlStore.BunDB().NewSelect().
|
||||
Model(&stored).
|
||||
Where("name = ?", parsedDomain).
|
||||
Limit(1).
|
||||
|
||||
@@ -1,46 +1,18 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
basedsql "github.com/SigNoz/signoz/pkg/query-service/dao/sqlite"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type modelDao struct {
|
||||
*basedsql.ModelDaoSqlite
|
||||
flags baseint.FeatureLookup
|
||||
}
|
||||
|
||||
// SetFlagProvider sets the feature lookup provider
|
||||
func (m *modelDao) SetFlagProvider(flags baseint.FeatureLookup) {
|
||||
m.flags = flags
|
||||
}
|
||||
|
||||
// CheckFeature confirms if a feature is available
|
||||
func (m *modelDao) checkFeature(key string) error {
|
||||
if m.flags == nil {
|
||||
return fmt.Errorf("flag provider not set")
|
||||
}
|
||||
|
||||
return m.flags.CheckFeature(key)
|
||||
userModule user.Module
|
||||
sqlStore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
// InitDB creates and extends base model DB repository
|
||||
func InitDB(sqlStore sqlstore.SQLStore) (*modelDao, error) {
|
||||
dao, err := basedsql.InitDB(sqlStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// set package variable so dependent base methods (e.g. AuthCache) will work
|
||||
basedao.SetDB(dao)
|
||||
m := &modelDao{ModelDaoSqlite: dao}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) DB() *bun.DB {
|
||||
return m.ModelDaoSqlite.DB()
|
||||
func NewModelDao(sqlStore sqlstore.SQLStore) *modelDao {
|
||||
userModule := impluser.NewModule(impluser.NewStore(sqlStore))
|
||||
return &modelDao{userModule: userModule, sqlStore: sqlStore}
|
||||
}
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError) {
|
||||
p.StorablePersonalAccessToken.OrgID = orgID
|
||||
_, err := m.DB().NewInsert().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Returning("id").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, p.UserID)
|
||||
if createdByUser == nil {
|
||||
p.CreatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
p.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError {
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Column("role", "name", "updated_at", "updated_by_user_id").
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT update failed"))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("revoked = false").
|
||||
Where("org_id = ?", orgID).
|
||||
Order("updated_at DESC").
|
||||
Scan(ctx); err != nil {
|
||||
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
|
||||
}
|
||||
|
||||
patsWithUsers := []model.PAT{}
|
||||
for i := range pats {
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[i],
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
|
||||
if createdByUser == nil {
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
|
||||
if updatedByUser == nil {
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
Id: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
CreatedAt: updatedByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
patsWithUsers = append(patsWithUsers, patWithUser)
|
||||
}
|
||||
return patsWithUsers, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError {
|
||||
updatedAt := time.Now().Unix()
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&types.StorablePersonalAccessToken{}).
|
||||
Set("revoked = ?", true).
|
||||
Set("updated_by_user_id = ?", userID).
|
||||
Set("updated_at = ?", updatedAt).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT revoke failed"))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("token = ?", token).
|
||||
Where("revoked = false").
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
|
||||
}
|
||||
|
||||
if len(pats) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple PATs with same token, %s", token),
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
|
||||
}
|
||||
|
||||
if len(pats) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple PATs with same token"),
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
// deprecated
|
||||
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*types.GettableUser, basemodel.BaseApiError) {
|
||||
users := []types.GettableUser{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&users).
|
||||
Column("u.id", "u.name", "u.email", "u.password", "u.created_at", "u.profile_picture_url", "u.org_id", "u.group_id").
|
||||
Join("JOIN personal_access_tokens p ON u.id = p.user_id").
|
||||
Where("p.token = ?", token).
|
||||
Where("p.expires_at >= strftime('%s', 'now')").
|
||||
Where("p.org_id = ?", orgID).
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
|
||||
}
|
||||
|
||||
if len(users) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
|
||||
}
|
||||
}
|
||||
return &users[0], nil
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
package signozio
|
||||
|
||||
type status string
|
||||
|
||||
type ValidateLicenseResponse struct {
|
||||
Status status `json:"status"`
|
||||
Data map[string]interface{} `json:"data"`
|
||||
}
|
||||
|
||||
type CheckoutSessionRedirect struct {
|
||||
RedirectURL string `json:"url"`
|
||||
}
|
||||
type CheckoutResponse struct {
|
||||
Status status `json:"status"`
|
||||
Data CheckoutSessionRedirect `json:"data"`
|
||||
}
|
||||
@@ -1,223 +1,67 @@
|
||||
package signozio
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
var C *Client
|
||||
|
||||
const (
|
||||
POST = "POST"
|
||||
APPLICATION_JSON = "application/json"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
Prefix string
|
||||
GatewayUrl string
|
||||
}
|
||||
|
||||
func New() *Client {
|
||||
return &Client{
|
||||
Prefix: constants.LicenseSignozIo,
|
||||
GatewayUrl: constants.ZeusURL,
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
C = New()
|
||||
}
|
||||
|
||||
func ValidateLicenseV3(licenseKey string) (*model.LicenseV3, *model.ApiError) {
|
||||
|
||||
// Creating an HTTP client with a timeout for better control
|
||||
client := &http.Client{
|
||||
Timeout: 10 * time.Second,
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("GET", C.GatewayUrl+"/v2/licenses/me", nil)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, "failed to create request"))
|
||||
}
|
||||
|
||||
// Setting the custom header
|
||||
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
|
||||
|
||||
response, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, "failed to make post request"))
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read validation response from %v", C.GatewayUrl)))
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
switch response.StatusCode {
|
||||
case 200:
|
||||
a := ValidateLicenseResponse{}
|
||||
err = json.Unmarshal(body, &a)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3(a.Data)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, "failed to generate new license v3"))
|
||||
}
|
||||
|
||||
return license, nil
|
||||
case 400:
|
||||
return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
|
||||
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
|
||||
case 401:
|
||||
return nil, model.Unauthorized(errors.Wrap(fmt.Errorf(string(body)),
|
||||
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
|
||||
default:
|
||||
return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
|
||||
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func NewPostRequestWithCtx(ctx context.Context, url string, contentType string, body io.Reader) (*http.Request, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, POST, url, body)
|
||||
func ValidateLicenseV3(ctx context.Context, licenseKey string, zeus zeus.Zeus) (*model.LicenseV3, error) {
|
||||
data, err := zeus.GetLicense(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Add("Content-Type", contentType)
|
||||
return req, err
|
||||
|
||||
var m map[string]any
|
||||
if err = json.Unmarshal(data, &m); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return license, nil
|
||||
}
|
||||
|
||||
// SendUsage reports the usage of signoz to license server
|
||||
func SendUsage(ctx context.Context, usage model.UsagePayload) *model.ApiError {
|
||||
reqString, _ := json.Marshal(usage)
|
||||
req, err := NewPostRequestWithCtx(ctx, C.Prefix+"/usage", APPLICATION_JSON, bytes.NewBuffer(reqString))
|
||||
func SendUsage(ctx context.Context, usage model.UsagePayload, zeus zeus.Zeus) error {
|
||||
body, err := json.Marshal(usage)
|
||||
if err != nil {
|
||||
return model.BadRequest(errors.Wrap(err, "unable to create http request"))
|
||||
return err
|
||||
}
|
||||
|
||||
res, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return model.BadRequest(errors.Wrap(err, "failed to read usage response from license.signoz.io"))
|
||||
}
|
||||
|
||||
defer res.Body.Close()
|
||||
|
||||
switch res.StatusCode {
|
||||
case 200, 201:
|
||||
return nil
|
||||
case 400, 401:
|
||||
return model.BadRequest(errors.Wrap(errors.New(string(body)),
|
||||
"bad request error received from license.signoz.io"))
|
||||
default:
|
||||
return model.InternalError(errors.Wrap(errors.New(string(body)),
|
||||
"internal error received from license.signoz.io"))
|
||||
}
|
||||
return zeus.PutMeters(ctx, usage.LicenseKey.String(), body)
|
||||
}
|
||||
|
||||
func CheckoutSession(ctx context.Context, checkoutRequest *model.CheckoutRequest, licenseKey string) (string, *model.ApiError) {
|
||||
hClient := &http.Client{}
|
||||
|
||||
reqString, err := json.Marshal(checkoutRequest)
|
||||
func CheckoutSession(ctx context.Context, checkoutRequest *model.CheckoutRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
|
||||
body, err := json.Marshal(checkoutRequest)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", C.GatewayUrl+"/v2/subscriptions/me/sessions/checkout", bytes.NewBuffer(reqString))
|
||||
response, err := zeus.GetCheckoutURL(ctx, licenseKey, body)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(err)
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
|
||||
|
||||
response, err := hClient.Do(req)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(err)
|
||||
}
|
||||
body, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read checkout response from %v", C.GatewayUrl)))
|
||||
}
|
||||
defer response.Body.Close()
|
||||
|
||||
switch response.StatusCode {
|
||||
case 201:
|
||||
a := CheckoutResponse{}
|
||||
err = json.Unmarshal(body, &a)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(errors.Wrap(err, "failed to unmarshal zeus checkout response"))
|
||||
}
|
||||
return a.Data.RedirectURL, nil
|
||||
case 400:
|
||||
return "", model.BadRequest(errors.Wrap(errors.New(string(body)),
|
||||
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
|
||||
case 401:
|
||||
return "", model.Unauthorized(errors.Wrap(errors.New(string(body)),
|
||||
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
|
||||
default:
|
||||
return "", model.InternalError(errors.Wrap(errors.New(string(body)),
|
||||
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
|
||||
}
|
||||
return gjson.GetBytes(response, "url").String(), nil
|
||||
}
|
||||
|
||||
func PortalSession(ctx context.Context, checkoutRequest *model.PortalRequest, licenseKey string) (string, *model.ApiError) {
|
||||
hClient := &http.Client{}
|
||||
|
||||
reqString, err := json.Marshal(checkoutRequest)
|
||||
func PortalSession(ctx context.Context, portalRequest *model.PortalRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
|
||||
body, err := json.Marshal(portalRequest)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", C.GatewayUrl+"/v2/subscriptions/me/sessions/portal", bytes.NewBuffer(reqString))
|
||||
response, err := zeus.GetPortalURL(ctx, licenseKey, body)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(err)
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
|
||||
|
||||
response, err := hClient.Do(req)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(err)
|
||||
}
|
||||
body, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read portal response from %v", C.GatewayUrl)))
|
||||
}
|
||||
defer response.Body.Close()
|
||||
|
||||
switch response.StatusCode {
|
||||
case 201:
|
||||
a := CheckoutResponse{}
|
||||
err = json.Unmarshal(body, &a)
|
||||
if err != nil {
|
||||
return "", model.BadRequest(errors.Wrap(err, "failed to unmarshal zeus portal response"))
|
||||
}
|
||||
return a.Data.RedirectURL, nil
|
||||
case 400:
|
||||
return "", model.BadRequest(errors.Wrap(errors.New(string(body)),
|
||||
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
|
||||
case 401:
|
||||
return "", model.Unauthorized(errors.Wrap(errors.New(string(body)),
|
||||
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
|
||||
default:
|
||||
return "", model.InternalError(errors.Wrap(errors.New(string(body)),
|
||||
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
|
||||
}
|
||||
return gjson.GetBytes(response, "url").String(), nil
|
||||
}
|
||||
|
||||
@@ -7,6 +7,5 @@ import (
|
||||
// Connector defines methods for interaction
|
||||
// with o11y data. for example - clickhouse
|
||||
type DataConnector interface {
|
||||
Start(readerReady chan bool)
|
||||
baseint.Reader
|
||||
}
|
||||
|
||||
@@ -6,14 +6,13 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"sync"
|
||||
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
|
||||
validate "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
@@ -29,6 +28,7 @@ var validationFrequency = 24 * 60 * time.Minute
|
||||
|
||||
type Manager struct {
|
||||
repo *Repo
|
||||
zeus zeus.Zeus
|
||||
mutex sync.Mutex
|
||||
validatorRunning bool
|
||||
// end the license validation, this is important to gracefully
|
||||
@@ -45,7 +45,7 @@ type Manager struct {
|
||||
activeFeatures basemodel.FeatureSet
|
||||
}
|
||||
|
||||
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, features ...basemodel.Feature) (*Manager, error) {
|
||||
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, zeus zeus.Zeus, features ...basemodel.Feature) (*Manager, error) {
|
||||
if LM != nil {
|
||||
return LM, nil
|
||||
}
|
||||
@@ -53,6 +53,7 @@ func StartManager(db *sqlx.DB, store sqlstore.SQLStore, features ...basemodel.Fe
|
||||
repo := NewLicenseRepo(db, store)
|
||||
m := &Manager{
|
||||
repo: &repo,
|
||||
zeus: zeus,
|
||||
}
|
||||
if err := m.start(features...); err != nil {
|
||||
return m, err
|
||||
@@ -172,17 +173,15 @@ func (lm *Manager) ValidatorV3(ctx context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError {
|
||||
|
||||
license, apiError := validate.ValidateLicenseV3(lm.activeLicenseV3.Key)
|
||||
if apiError != nil {
|
||||
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
|
||||
return apiError
|
||||
func (lm *Manager) RefreshLicense(ctx context.Context) error {
|
||||
license, err := validate.ValidateLicenseV3(ctx, lm.activeLicenseV3.Key, lm.zeus)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err := lm.repo.UpdateLicenseV3(ctx, license)
|
||||
err = lm.repo.UpdateLicenseV3(ctx, license)
|
||||
if err != nil {
|
||||
return model.BadRequest(errors.Wrap(err, "failed to update the new license"))
|
||||
return err
|
||||
}
|
||||
lm.SetActiveV3(license)
|
||||
|
||||
@@ -190,7 +189,6 @@ func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError {
|
||||
}
|
||||
|
||||
func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
|
||||
zap.L().Info("License validation started")
|
||||
if lm.activeLicenseV3 == nil {
|
||||
return nil
|
||||
}
|
||||
@@ -236,28 +234,17 @@ func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (licenseResponse *model.LicenseV3, errResponse *model.ApiError) {
|
||||
defer func() {
|
||||
if errResponse != nil {
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if ok {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
|
||||
map[string]interface{}{"err": errResponse.Err.Error()}, claims.Email, true, false)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
license, apiError := validate.ValidateLicenseV3(licenseKey)
|
||||
if apiError != nil {
|
||||
zap.L().Error("failed to get the license", zap.Error(apiError.Err))
|
||||
return nil, apiError
|
||||
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (*model.LicenseV3, error) {
|
||||
license, err := validate.ValidateLicenseV3(ctx, licenseKey, lm.zeus)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// insert the new license to the sqlite db
|
||||
err := lm.repo.InsertLicenseV3(ctx, license)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to activate license", zap.Error(err))
|
||||
return nil, err
|
||||
modelErr := lm.repo.InsertLicenseV3(ctx, license)
|
||||
if modelErr != nil {
|
||||
zap.L().Error("failed to activate license", zap.Error(modelErr))
|
||||
return nil, modelErr
|
||||
}
|
||||
|
||||
// license is valid, activate it
|
||||
|
||||
@@ -6,24 +6,27 @@ import (
|
||||
"os"
|
||||
"time"
|
||||
|
||||
eeuserimpl "github.com/SigNoz/signoz/ee/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/ee/zeus"
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
|
||||
prommodel "github.com/prometheus/common/model"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
)
|
||||
|
||||
// Deprecated: Please use the logger from pkg/instrumentation.
|
||||
func initZapLog() *zap.Logger {
|
||||
config := zap.NewProductionConfig()
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
@@ -32,10 +35,6 @@ func initZapLog() *zap.Logger {
|
||||
return logger
|
||||
}
|
||||
|
||||
func init() {
|
||||
prommodel.NameValidationScheme = prommodel.UTF8Validation
|
||||
}
|
||||
|
||||
func main() {
|
||||
var promConfigPath, skipTopLvlOpsPath string
|
||||
|
||||
@@ -57,21 +56,32 @@ func main() {
|
||||
var gatewayUrl string
|
||||
var useLicensesV3 bool
|
||||
|
||||
// Deprecated
|
||||
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
|
||||
// Deprecated
|
||||
flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces")
|
||||
// Deprecated
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
// Deprecated
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
// Deprecated
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
// Deprecated
|
||||
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
|
||||
// Deprecated
|
||||
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
|
||||
// Deprecated
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
|
||||
// Deprecated
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
// Deprecated
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)")
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
|
||||
// Deprecated
|
||||
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
|
||||
flag.Parse()
|
||||
|
||||
@@ -89,6 +99,7 @@ func main() {
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
Config: promConfigPath,
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create config", zap.Error(err))
|
||||
@@ -104,13 +115,21 @@ func main() {
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
zeus.Config(),
|
||||
httpzeus.NewProviderFactory(),
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
func(sqlstore sqlstore.SQLStore) user.Module {
|
||||
return eeuserimpl.NewModule(eeuserimpl.NewStore(sqlstore))
|
||||
},
|
||||
func(userModule user.Module) user.Handler {
|
||||
return eeuserimpl.NewHandler(userModule)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz struct", zap.Error(err))
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
@@ -127,19 +146,12 @@ func main() {
|
||||
Config: config,
|
||||
SigNoz: signoz,
|
||||
HTTPHostPort: baseconst.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
SkipTopLvlOpsPath: skipTopLvlOpsPath,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: baseconst.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
RuleRepoURL: ruleRepoURL,
|
||||
CacheConfigPath: cacheConfigPath,
|
||||
FluxInterval: fluxInterval,
|
||||
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
|
||||
Cluster: cluster,
|
||||
GatewayUrl: gatewayUrl,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
Jwt: jwt,
|
||||
}
|
||||
|
||||
@@ -148,14 +160,10 @@ func main() {
|
||||
zap.L().Fatal("Failed to create server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := server.Start(); err != nil {
|
||||
if err := server.Start(context.Background()); err != nil {
|
||||
zap.L().Fatal("Could not start server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := auth.InitAuthCache(context.Background()); err != nil {
|
||||
zap.L().Fatal("Failed to initialize auth cache", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz.Start(context.Background())
|
||||
|
||||
if err := signoz.Wait(context.Background()); err != nil {
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
// GettableInvitation overrides base object and adds precheck into
|
||||
// response
|
||||
type GettableInvitation struct {
|
||||
*basemodel.InvitationResponseObject
|
||||
Precheck *basemodel.PrecheckResponse `json:"precheck"`
|
||||
}
|
||||
@@ -157,8 +157,6 @@ func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
|
||||
}
|
||||
|
||||
switch planName {
|
||||
case PlanNameTeams:
|
||||
features = append(features, ProPlan...)
|
||||
case PlanNameEnterprise:
|
||||
features = append(features, EnterprisePlan...)
|
||||
case PlanNameBasic:
|
||||
|
||||
@@ -74,21 +74,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Parse the entire license properly",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "TEAMS",
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameTeams,
|
||||
PlanName: PlanNameEnterprise,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "ACTIVE",
|
||||
@@ -98,14 +98,14 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Fallback to basic plan if license status is invalid",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "TEAMS",
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "INVALID",
|
||||
@@ -122,21 +122,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "fallback states for validFrom and validUntil",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "TEAMS",
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"valid_from": 1234.456,
|
||||
"valid_until": 5678.567,
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
},
|
||||
PlanName: PlanNameTeams,
|
||||
PlanName: PlanNameEnterprise,
|
||||
ValidFrom: 1234,
|
||||
ValidUntil: 5678,
|
||||
Status: "ACTIVE",
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
package model
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types"
|
||||
|
||||
type User struct {
|
||||
Id string `json:"id" db:"id"`
|
||||
Name string `json:"name" db:"name"`
|
||||
Email string `json:"email" db:"email"`
|
||||
CreatedAt int64 `json:"createdAt" db:"created_at"`
|
||||
ProfilePictureURL string `json:"profilePictureURL" db:"profile_picture_url"`
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
type CreatePATRequestBody struct {
|
||||
Name string `json:"name"`
|
||||
Role string `json:"role"`
|
||||
ExpiresInDays int64 `json:"expiresInDays"`
|
||||
}
|
||||
|
||||
type PAT struct {
|
||||
CreatedByUser User `json:"createdByUser"`
|
||||
UpdatedByUser User `json:"updatedByUser"`
|
||||
|
||||
types.StorablePersonalAccessToken
|
||||
}
|
||||
@@ -1,30 +1,26 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
const SSO = "SSO"
|
||||
const Basic = "BASIC_PLAN"
|
||||
const Pro = "PRO_PLAN"
|
||||
const Enterprise = "ENTERPRISE_PLAN"
|
||||
|
||||
var (
|
||||
PlanNameEnterprise = "ENTERPRISE"
|
||||
PlanNameTeams = "TEAMS"
|
||||
PlanNameBasic = "BASIC"
|
||||
)
|
||||
|
||||
var (
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameTeams: Pro, PlanNameEnterprise: Enterprise}
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
|
||||
)
|
||||
|
||||
var (
|
||||
LicenseStatusInvalid = "INVALID"
|
||||
)
|
||||
|
||||
const DisableUpsell = "DISABLE_UPSELL"
|
||||
const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
const Gateway = "GATEWAY"
|
||||
@@ -38,90 +34,6 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: DisableUpsell,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -150,135 +62,6 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var ProPlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
Name: SSO,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Gateway,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: PremiumSupport,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var EnterprisePlan = basemodel.FeatureSet{
|
||||
@@ -289,83 +72,6 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -408,11 +114,4 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -12,9 +12,11 @@ import (
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -52,8 +54,8 @@ type AnomalyRule struct {
|
||||
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
p *baserules.PostableRule,
|
||||
featureFlags interfaces.FeatureLookup,
|
||||
orgID valuer.UUID,
|
||||
p *ruletypes.PostableRule,
|
||||
reader interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
opts ...baserules.RuleOption,
|
||||
@@ -61,12 +63,12 @@ func NewAnomalyRule(
|
||||
|
||||
zap.L().Info("creating new AnomalyRule", zap.String("id", id), zap.Any("opts", opts))
|
||||
|
||||
if p.RuleCondition.CompareOp == baserules.ValueIsBelow {
|
||||
if p.RuleCondition.CompareOp == ruletypes.ValueIsBelow {
|
||||
target := -1 * *p.RuleCondition.Target
|
||||
p.RuleCondition.Target = &target
|
||||
}
|
||||
|
||||
baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
|
||||
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -89,10 +91,9 @@ func NewAnomalyRule(
|
||||
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FeatureLookup: featureFlags,
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
@@ -102,27 +103,24 @@ func NewAnomalyRule(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
|
||||
)
|
||||
}
|
||||
return &t, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Type() baserules.RuleType {
|
||||
func (r *AnomalyRule) Type() ruletypes.RuleType {
|
||||
return RuleTypeAnomaly
|
||||
}
|
||||
|
||||
@@ -162,18 +160,18 @@ func (r *AnomalyRule) GetSelectedQuery() string {
|
||||
return r.Condition().GetSelectedQueryName()
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (baserules.Vector, error) {
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = r.PopulateTemporality(ctx, params)
|
||||
err = r.PopulateTemporality(ctx, orgID, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("internal error while setting temporality")
|
||||
}
|
||||
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, &anomaly.GetAnomaliesRequest{
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
|
||||
Params: params,
|
||||
Seasonality: r.seasonality,
|
||||
})
|
||||
@@ -189,7 +187,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (baser
|
||||
}
|
||||
}
|
||||
|
||||
var resultVector baserules.Vector
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
zap.L().Info("anomaly scores", zap.String("scores", string(scoresJSON)))
|
||||
@@ -208,7 +206,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
prevState := r.State()
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
res, err := r.buildAndRunQuery(ctx, ts)
|
||||
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -218,7 +216,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
var alerts = make(map[uint64]*baserules.Alert, len(res))
|
||||
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
|
||||
|
||||
for _, smpl := range res {
|
||||
l := make(map[string]string, len(smpl.Metric))
|
||||
@@ -230,7 +228,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
threshold := valueFormatter.Format(r.TargetVal(), r.Unit())
|
||||
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
|
||||
|
||||
tmplData := baserules.AlertTemplateData(l, value, threshold)
|
||||
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
// who are not used to Go's templating system.
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
@@ -238,7 +236,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
// utility function to apply go template on labels and annotations
|
||||
expand := func(text string) string {
|
||||
|
||||
tmpl := baserules.NewTemplateExpander(
|
||||
tmpl := ruletypes.NewTemplateExpander(
|
||||
ctx,
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
@@ -283,7 +281,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alerts[h] = &baserules.Alert{
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
Annotations: annotations,
|
||||
@@ -324,7 +322,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
// retention time so it is reported as resolved to the AlertManager.
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > baserules.ResolvedRetention) {
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != model.StateInactive {
|
||||
@@ -380,10 +378,10 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
|
||||
func (r *AnomalyRule) String() string {
|
||||
|
||||
ar := baserules.PostableRule{
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.Name(),
|
||||
RuleCondition: r.Condition(),
|
||||
EvalWindow: baserules.Duration(r.EvalWindow()),
|
||||
EvalWindow: ruletypes.Duration(r.EvalWindow()),
|
||||
Labels: r.Labels().Map(),
|
||||
Annotations: r.Annotations().Map(),
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
|
||||
@@ -8,6 +8,8 @@ import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@@ -18,15 +20,13 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
var task baserules.Task
|
||||
|
||||
ruleId := baserules.RuleIdFromTaskName(opts.TaskName)
|
||||
if opts.Rule.RuleType == baserules.RuleTypeThreshold {
|
||||
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
// create a threshold rule
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
ruleId,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
@@ -38,17 +38,18 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == baserules.RuleTypeProm {
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
pr, err := baserules.NewPromRule(
|
||||
ruleId,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
|
||||
@@ -59,14 +60,14 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evalution
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == baserules.RuleTypeAnomaly {
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
@@ -79,10 +80,10 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, baserules.RuleTypeProm, baserules.RuleTypeThreshold)
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@@ -107,12 +108,12 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
}
|
||||
|
||||
// append name to indicate this is test alert
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, baserules.TestAlertPostFix)
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, ruletypes.TestAlertPostFix)
|
||||
|
||||
var rule baserules.Rule
|
||||
var err error
|
||||
|
||||
if parsedRule.RuleType == baserules.RuleTypeThreshold {
|
||||
if parsedRule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
@@ -122,45 +123,44 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
// create a threshold rule
|
||||
rule, err = baserules.NewThresholdRule(
|
||||
alertname,
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeProm {
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
rule, err = baserules.NewPromRule(
|
||||
alertname,
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeAnomaly {
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
rule, err = NewAnomalyRule(
|
||||
alertname,
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithSendAlways(),
|
||||
@@ -168,7 +168,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else {
|
||||
@@ -194,9 +194,9 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, ruleDB baserules.RuleDB) baserules.Task {
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
|
||||
if taskType == baserules.TaskTypeCh {
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
}
|
||||
return baserules.NewPromRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
|
||||
return baserules.NewPromRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
}
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
package sso
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// SSOIdentity contains details of user received from SSO provider
|
||||
type SSOIdentity struct {
|
||||
UserID string
|
||||
Username string
|
||||
PreferredUsername string
|
||||
Email string
|
||||
EmailVerified bool
|
||||
ConnectorData []byte
|
||||
}
|
||||
|
||||
// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth
|
||||
// style redirect flow to determine user information.
|
||||
type OAuthCallbackProvider interface {
|
||||
// The initial URL user would be redirect to.
|
||||
// OAuth2 implementations support various scopes but we only need profile and user as
|
||||
// the roles are still being managed in SigNoz.
|
||||
BuildAuthURL(state string) (string, error)
|
||||
|
||||
// Handle the callback to the server (after login at oauth provider site)
|
||||
// and return a email identity.
|
||||
// At the moment we dont support auto signup flow (based on domain), so
|
||||
// the full identity (including name, group etc) is not required outside of the
|
||||
// connector
|
||||
HandleCallback(r *http.Request) (identity *SSOIdentity, err error)
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
@@ -16,10 +15,10 @@ import (
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
licenseserver "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -42,26 +41,16 @@ type Manager struct {
|
||||
|
||||
modelDao dao.ModelDao
|
||||
|
||||
tenantID string
|
||||
zeus zeus.Zeus
|
||||
}
|
||||
|
||||
func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, chUrl string) (*Manager, error) {
|
||||
hostNameRegex := regexp.MustCompile(`tcp://(?P<hostname>.*):`)
|
||||
hostNameRegexMatches := hostNameRegex.FindStringSubmatch(chUrl)
|
||||
|
||||
tenantID := ""
|
||||
if len(hostNameRegexMatches) == 2 {
|
||||
tenantID = hostNameRegexMatches[1]
|
||||
tenantID = strings.TrimSuffix(tenantID, "-clickhouse")
|
||||
}
|
||||
|
||||
func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, zeus zeus.Zeus) (*Manager, error) {
|
||||
m := &Manager{
|
||||
// repository: repo,
|
||||
clickhouseConn: clickhouseConn,
|
||||
licenseRepo: licenseRepo,
|
||||
scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC
|
||||
modelDao: modelDao,
|
||||
tenantID: tenantID,
|
||||
zeus: zeus,
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
@@ -138,15 +127,6 @@ func (lm *Manager) UploadUsage() {
|
||||
|
||||
zap.L().Info("uploading usage data")
|
||||
|
||||
orgName := ""
|
||||
orgNames, orgError := lm.modelDao.GetOrgs(ctx)
|
||||
if orgError != nil {
|
||||
zap.L().Error("failed to get org data: %v", zap.Error(orgError))
|
||||
}
|
||||
if len(orgNames) == 1 {
|
||||
orgName = orgNames[0].Name
|
||||
}
|
||||
|
||||
usagesPayload := []model.Usage{}
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
@@ -166,8 +146,8 @@ func (lm *Manager) UploadUsage() {
|
||||
usageData.ExporterID = usage.ExporterID
|
||||
usageData.Type = usage.Type
|
||||
usageData.Tenant = "default"
|
||||
usageData.OrgName = orgName
|
||||
usageData.TenantId = lm.tenantID
|
||||
usageData.OrgName = "default"
|
||||
usageData.TenantId = "default"
|
||||
usagesPayload = append(usagesPayload, usageData)
|
||||
}
|
||||
|
||||
@@ -176,24 +156,18 @@ func (lm *Manager) UploadUsage() {
|
||||
LicenseKey: key,
|
||||
Usage: usagesPayload,
|
||||
}
|
||||
lm.UploadUsageWithExponentalBackOff(ctx, payload)
|
||||
}
|
||||
|
||||
func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload model.UsagePayload) {
|
||||
for i := 1; i <= MaxRetries; i++ {
|
||||
apiErr := licenseserver.SendUsage(ctx, payload)
|
||||
if apiErr != nil && i == MaxRetries {
|
||||
zap.L().Error("retries stopped : %v", zap.Error(apiErr))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return
|
||||
} else if apiErr != nil {
|
||||
// sleeping for exponential backoff
|
||||
sleepDuration := RetryInterval * time.Duration(i)
|
||||
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err))
|
||||
time.Sleep(sleepDuration)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
body, errv2 := json.Marshal(payload)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
|
||||
return
|
||||
}
|
||||
|
||||
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,15 +2,39 @@ package postgressqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type dialect struct {
|
||||
}
|
||||
var (
|
||||
Identity = "id"
|
||||
Integer = "bigint"
|
||||
Text = "text"
|
||||
)
|
||||
|
||||
func (dialect *dialect) MigrateIntToTimestamp(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
UserNoCascade = "user_no_cascade"
|
||||
FactorPassword = "factor_password"
|
||||
CloudIntegration = "cloud_integration"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
|
||||
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
)
|
||||
|
||||
type dialect struct{}
|
||||
|
||||
func (dialect *dialect) IntToTimestamp(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -57,7 +81,15 @@ func (dialect *dialect) MigrateIntToTimestamp(ctx context.Context, bun bun.IDB,
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) MigrateIntToBoolean(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
func (dialect *dialect) IntToBoolean(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
columnExists, err := dialect.ColumnExists(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !columnExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -130,6 +162,26 @@ func (dialect *dialect) ColumnExists(ctx context.Context, bun bun.IDB, table str
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddColumn(ctx context.Context, bun bun.IDB, table string, column string, columnExpr string) error {
|
||||
exists, err := dialect.ColumnExists(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !exists {
|
||||
_, err = bun.
|
||||
NewAddColumn().
|
||||
Table(table).
|
||||
ColumnExpr(column + " " + columnExpr).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table string, oldColumnName string, newColumnName string) (bool, error) {
|
||||
oldColumnExists, err := dialect.ColumnExists(ctx, bun, table, oldColumnName)
|
||||
if err != nil {
|
||||
@@ -141,10 +193,14 @@ func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table str
|
||||
return false, err
|
||||
}
|
||||
|
||||
if !oldColumnExists && newColumnExists {
|
||||
if newColumnExists {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if !oldColumnExists {
|
||||
return false, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "old column: %s doesn't exist", oldColumnName)
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, "ALTER TABLE "+table+" RENAME COLUMN "+oldColumnName+" TO "+newColumnName)
|
||||
if err != nil {
|
||||
@@ -153,6 +209,26 @@ func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table str
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) DropColumn(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
exists, err := dialect.ColumnExists(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
_, err = bun.
|
||||
NewDropColumn().
|
||||
Table(table).
|
||||
Column(column).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table interface{}) (bool, error) {
|
||||
|
||||
count := 0
|
||||
@@ -174,7 +250,10 @@ func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table inte
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, references []string, cb func(context.Context) error) error {
|
||||
if len(references) == 0 {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -183,12 +262,31 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
var fkReferences []string
|
||||
for _, reference := range references {
|
||||
if reference == Org && !slices.Contains(fkReferences, OrgReference) {
|
||||
fkReferences = append(fkReferences, OrgReference)
|
||||
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||
fkReferences = append(fkReferences, UserReference)
|
||||
} else if reference == UserNoCascade && !slices.Contains(fkReferences, UserReferenceNoCascade) {
|
||||
fkReferences = append(fkReferences, UserReferenceNoCascade)
|
||||
} else if reference == FactorPassword && !slices.Contains(fkReferences, FactorPasswordReference) {
|
||||
fkReferences = append(fkReferences, FactorPasswordReference)
|
||||
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||
}
|
||||
}
|
||||
|
||||
createTable := bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
Exec(ctx)
|
||||
Model(newModel)
|
||||
|
||||
for _, fk := range fkReferences {
|
||||
createTable = createTable.ForeignKey(fk)
|
||||
}
|
||||
|
||||
_, err = createTable.Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -209,3 +307,146 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddNotNullDefaultToColumn(ctx context.Context, bun bun.IDB, table string, column, columnType, defaultValue string) error {
|
||||
query := fmt.Sprintf("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT %s, ALTER COLUMN %s SET NOT NULL", table, column, defaultValue, column)
|
||||
if _, err := bun.ExecContext(ctx, query); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if columnType == Text {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
identityExists, err := dialect.ColumnExists(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if identityExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) DropColumnWithForeignKeyConstraint(ctx context.Context, bunIDB bun.IDB, model interface{}, column string) error {
|
||||
existingTable := bunIDB.Dialect().Tables().Get(reflect.TypeOf(model))
|
||||
columnExists, err := dialect.ColumnExists(ctx, bunIDB, existingTable.Name, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !columnExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = bunIDB.
|
||||
NewDropColumn().
|
||||
Model(model).
|
||||
Column(column).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -4,8 +4,10 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/jackc/pgx/v5/pgconn"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/jackc/pgx/v5/stdlib"
|
||||
"github.com/jmoiron/sqlx"
|
||||
@@ -87,3 +89,24 @@ func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
func (provider *provider) RunInTxCtx(ctx context.Context, opts *sql.TxOptions, cb func(ctx context.Context) error) error {
|
||||
return provider.bundb.RunInTxCtx(ctx, opts, cb)
|
||||
}
|
||||
|
||||
func (provider *provider) WrapNotFoundErrf(err error, code errors.Code, format string, args ...any) error {
|
||||
if err == sql.ErrNoRows {
|
||||
return errors.Wrapf(err, errors.TypeNotFound, code, format, args...)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, format string, args ...any) error {
|
||||
var pgErr *pgconn.PgError
|
||||
if errors.As(err, &pgErr) && pgErr.Code == "23505" {
|
||||
return errors.Wrapf(err, errors.TypeAlreadyExists, code, format, args...)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (dialect *dialect) ToggleForeignKeyConstraint(ctx context.Context, bun *bun.DB, enable bool) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
42
ee/zeus/config.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package zeus
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
neturl "net/url"
|
||||
"sync"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
|
||||
// This will be set via ldflags at build time.
|
||||
var (
|
||||
url string = "<unset>"
|
||||
deprecatedURL string = "<unset>"
|
||||
)
|
||||
|
||||
var (
|
||||
config zeus.Config
|
||||
once sync.Once
|
||||
)
|
||||
|
||||
// initializes the Zeus configuration
|
||||
func Config() zeus.Config {
|
||||
once.Do(func() {
|
||||
parsedURL, err := neturl.Parse(url)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("invalid zeus URL: %w", err))
|
||||
}
|
||||
|
||||
deprecatedParsedURL, err := neturl.Parse(deprecatedURL)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("invalid zeus deprecated URL: %w", err))
|
||||
}
|
||||
|
||||
config = zeus.Config{URL: parsedURL, DeprecatedURL: deprecatedParsedURL}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(fmt.Errorf("invalid zeus config: %w", err))
|
||||
}
|
||||
})
|
||||
|
||||
return config
|
||||
}
|
||||
189
ee/zeus/httpzeus/provider.go
Normal file
@@ -0,0 +1,189 @@
|
||||
package httpzeus
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
type Provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
config zeus.Config
|
||||
httpClient *client.Client
|
||||
}
|
||||
|
||||
func NewProviderFactory() factory.ProviderFactory[zeus.Zeus, zeus.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config zeus.Config) (zeus.Zeus, error) {
|
||||
return New(ctx, providerSettings, config)
|
||||
})
|
||||
}
|
||||
|
||||
func New(ctx context.Context, providerSettings factory.ProviderSettings, config zeus.Config) (zeus.Zeus, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/zeus/httpzeus")
|
||||
|
||||
httpClient, err := client.New(
|
||||
settings.Logger(),
|
||||
providerSettings.TracerProvider,
|
||||
providerSettings.MeterProvider,
|
||||
client.WithRequestResponseLog(true),
|
||||
client.WithRetryCount(3),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Provider{
|
||||
settings: settings,
|
||||
config: config,
|
||||
httpClient: httpClient,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) GetLicense(ctx context.Context, key string) ([]byte, error) {
|
||||
response, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/licenses/me"),
|
||||
http.MethodGet,
|
||||
key,
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return []byte(gjson.GetBytes(response, "data").String()), nil
|
||||
}
|
||||
|
||||
func (provider *Provider) GetCheckoutURL(ctx context.Context, key string, body []byte) ([]byte, error) {
|
||||
response, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/subscriptions/me/sessions/checkout"),
|
||||
http.MethodPost,
|
||||
key,
|
||||
body,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return []byte(gjson.GetBytes(response, "data").String()), nil
|
||||
}
|
||||
|
||||
func (provider *Provider) GetPortalURL(ctx context.Context, key string, body []byte) ([]byte, error) {
|
||||
response, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/subscriptions/me/sessions/portal"),
|
||||
http.MethodPost,
|
||||
key,
|
||||
body,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return []byte(gjson.GetBytes(response, "data").String()), nil
|
||||
}
|
||||
|
||||
func (provider *Provider) GetDeployment(ctx context.Context, key string) ([]byte, error) {
|
||||
response, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/deployments/me"),
|
||||
http.MethodGet,
|
||||
key,
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return []byte(gjson.GetBytes(response, "data").String()), nil
|
||||
}
|
||||
|
||||
func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte) error {
|
||||
_, err := provider.do(
|
||||
ctx,
|
||||
provider.config.DeprecatedURL.JoinPath("/api/v1/usage"),
|
||||
http.MethodPost,
|
||||
key,
|
||||
data,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/profiles/me"),
|
||||
http.MethodPut,
|
||||
key,
|
||||
body,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
|
||||
http.MethodPut,
|
||||
key,
|
||||
body,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) do(ctx context.Context, url *url.URL, method string, key string, requestBody []byte) ([]byte, error) {
|
||||
request, err := http.NewRequestWithContext(ctx, method, url.String(), bytes.NewBuffer(requestBody))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
request.Header.Set("X-Signoz-Cloud-Api-Key", key)
|
||||
request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
response, err := provider.httpClient.Do(request)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = response.Body.Close()
|
||||
}()
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if response.StatusCode/100 == 2 {
|
||||
return body, nil
|
||||
}
|
||||
|
||||
return nil, provider.errFromStatusCode(response.StatusCode)
|
||||
}
|
||||
|
||||
// This can be taken down to the client package
|
||||
func (provider *Provider) errFromStatusCode(statusCode int) error {
|
||||
switch statusCode {
|
||||
case http.StatusBadRequest:
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
|
||||
case http.StatusUnauthorized:
|
||||
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
|
||||
case http.StatusForbidden:
|
||||
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
|
||||
case http.StatusNotFound:
|
||||
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
|
||||
}
|
||||
@@ -110,6 +110,8 @@ module.exports = {
|
||||
// eslint rules need to remove
|
||||
'@typescript-eslint/no-shadow': 'off',
|
||||
'import/no-cycle': 'off',
|
||||
// https://typescript-eslint.io/rules/consistent-return/ check the warning for details
|
||||
'consistent-return': 'off',
|
||||
'prettier/prettier': [
|
||||
'error',
|
||||
{},
|
||||
|
||||
1
frontend/.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
|
||||
# Sentry Config File
|
||||
.env.sentry-build-plugin
|
||||
.qodo
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
# Ignore artifacts:
|
||||
build
|
||||
coverage
|
||||
public/
|
||||
|
||||
# Ignore all MD files:
|
||||
**/*.md
|
||||
**/*.md
|
||||
|
||||
# Ignore all JSON files:
|
||||
**/*.json
|
||||
|
||||
@@ -3,5 +3,4 @@ BUNDLE_ANALYSER="true"
|
||||
FRONTEND_API_ENDPOINT="http://localhost:8080/"
|
||||
INTERCOM_APP_ID="intercom-app-id"
|
||||
|
||||
PLAYWRIGHT_TEST_BASE_URL="http://localhost:8080"
|
||||
CI="1"
|
||||
@@ -30,11 +30,6 @@ const config: Config.InitialOptions = {
|
||||
testPathIgnorePatterns: ['/node_modules/', '/public/'],
|
||||
moduleDirectories: ['node_modules', 'src'],
|
||||
testEnvironment: 'jest-environment-jsdom',
|
||||
testEnvironmentOptions: {
|
||||
'jest-playwright': {
|
||||
browsers: ['chromium', 'firefox', 'webkit'],
|
||||
},
|
||||
},
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
statements: 80,
|
||||
|
||||
@@ -15,10 +15,6 @@
|
||||
"jest:coverage": "jest --coverage",
|
||||
"jest:watch": "jest --watch",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
|
||||
"playwright": "NODE_ENV=testing playwright test --config=./playwright.config.ts",
|
||||
"playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium",
|
||||
"playwright:codegen:local": "playwright codegen http://localhost:3301",
|
||||
"playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json",
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
@@ -35,6 +31,7 @@
|
||||
"@dnd-kit/core": "6.1.0",
|
||||
"@dnd-kit/modifiers": "7.0.0",
|
||||
"@dnd-kit/sortable": "8.0.0",
|
||||
"@dnd-kit/utilities": "3.2.2",
|
||||
"@grafana/data": "^11.2.3",
|
||||
"@mdx-js/loader": "2.3.0",
|
||||
"@mdx-js/react": "2.3.0",
|
||||
@@ -55,7 +52,7 @@
|
||||
"ansi-to-html": "0.7.2",
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"axios": "1.7.7",
|
||||
"axios": "1.8.2",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
@@ -82,6 +79,7 @@
|
||||
"history": "4.10.1",
|
||||
"html-webpack-plugin": "5.5.0",
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"http-status-codes": "2.3.0",
|
||||
"i18next": "^21.6.12",
|
||||
"i18next-browser-languagedetector": "^6.1.3",
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
@@ -90,7 +88,7 @@
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "0.427.0",
|
||||
"lucide-react": "0.498.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"motion": "12.4.13",
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
@@ -132,6 +130,7 @@
|
||||
"tsconfig-paths-webpack-plugin": "^3.5.1",
|
||||
"typescript": "^4.0.5",
|
||||
"uplot": "1.6.31",
|
||||
"userpilot": "1.3.9",
|
||||
"uuid": "^8.3.2",
|
||||
"web-vitals": "^0.2.4",
|
||||
"webpack": "5.94.0",
|
||||
@@ -162,7 +161,6 @@
|
||||
"@commitlint/config-conventional": "^16.2.4",
|
||||
"@faker-js/faker": "9.3.0",
|
||||
"@jest/globals": "^27.5.1",
|
||||
"@playwright/test": "^1.22.0",
|
||||
"@testing-library/jest-dom": "5.16.5",
|
||||
"@testing-library/react": "13.4.0",
|
||||
"@testing-library/user-event": "14.4.3",
|
||||
@@ -198,7 +196,7 @@
|
||||
"autoprefixer": "10.4.19",
|
||||
"babel-plugin-styled-components": "^1.12.0",
|
||||
"compression-webpack-plugin": "9.0.0",
|
||||
"copy-webpack-plugin": "^8.1.0",
|
||||
"copy-webpack-plugin": "^11.0.0",
|
||||
"critters-webpack-plugin": "^3.0.1",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-config-airbnb": "^19.0.4",
|
||||
@@ -255,6 +253,8 @@
|
||||
"body-parser": "1.20.3",
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"cross-spawn": "7.0.5",
|
||||
"cookie": "^0.7.1"
|
||||
"cookie": "^0.7.1",
|
||||
"serialize-javascript": "6.0.2",
|
||||
"prismjs": "1.30.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import { PlaywrightTestConfig } from '@playwright/test';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const config: PlaywrightTestConfig = {
|
||||
forbidOnly: !!process.env.CI,
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
preserveOutput: 'always',
|
||||
name: 'Signoz',
|
||||
testDir: './tests',
|
||||
use: {
|
||||
trace: 'retain-on-failure',
|
||||
baseURL: process.env.PLAYWRIGHT_TEST_BASE_URL || 'http://localhost:3301',
|
||||
},
|
||||
updateSnapshots: 'all',
|
||||
fullyParallel: !!process.env.CI,
|
||||
quiet: false,
|
||||
testMatch: ['**/*.spec.ts'],
|
||||
reporter: process.env.CI ? 'github' : 'list',
|
||||
};
|
||||
|
||||
export default config;
|
||||
2
frontend/public/Icons/empty-funnel-icon.svg
Normal file
|
After Width: | Height: | Size: 5.9 KiB |
1
frontend/public/Icons/funnel-add.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="m12.192 3.18-1.167 2.33-.583 1.165M7.31 12.74a.583.583 0 0 1-.835-.24L1.808 3.179"/><path d="M7 1.167c2.9 0 5.25.783 5.25 1.75 0 .966-2.35 1.75-5.25 1.75s-5.25-.784-5.25-1.75c0-.967 2.35-1.75 5.25-1.75ZM8.75 10.5h3.5M10.5 12.25v-3.5"/></g></svg>
|
||||
|
After Width: | Height: | Size: 418 B |
1
frontend/public/Icons/solid-info-circle.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#a)" stroke-linecap="round" stroke-linejoin="round"><path d="M8 14.666A6.667 6.667 0 1 0 8 1.333a6.667 6.667 0 0 0 0 13.333Z" fill="#C0C1C3" stroke="#C0C1C3" stroke-width="2"/><path d="M8 11.333v-4H6.333M8 4.667h.007" stroke="#121317" stroke-width="1.333"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>
|
||||
|
After Width: | Height: | Size: 439 B |
1
frontend/public/Logos/amazon-msk.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="80" height="80"><defs><linearGradient id="a" x1="0%" x2="100%" y1="100%" y2="0%"><stop offset="0%" stop-color="#4D27A8"/><stop offset="100%" stop-color="#A166FF"/></linearGradient></defs><g fill="none" fill-rule="evenodd"><path fill="url(#a)" d="M0 0h80v80H0z"/><path fill="#FFF" d="M36.008 36.977v-8.99h1.996v3.995l2.694-3.996h2.21l-3.123 4.204 3.38 4.787h-2.368l-2.793-4.345v4.345zM66 49.117l-.669.579c-1.347 1.18-3.16 1.77-4.973 1.77-1.815 0-3.63-.59-4.98-1.773l-1.333-1.155c-1.956-1.714-5.38-1.714-7.332-.004l-1.341 1.162c-2.693 2.36-7.255 2.362-9.953-.003l-1.333-1.155c-1.956-1.714-5.38-1.714-7.332-.004l-1.341 1.162c-2.694 2.36-7.255 2.362-9.953-.003l-.665-.576 1.307-1.511.668.58c1.957 1.714 5.381 1.714 7.332.003l1.342-1.162c2.693-2.36 7.255-2.36 9.952.004l1.334 1.154c1.957 1.715 5.38 1.715 7.332.004l1.341-1.162c2.693-2.36 7.255-2.36 9.953.004l1.333 1.154c1.956 1.715 5.38 1.715 7.332.004l.673-.583zM63.898 61.14l1.307 1.51-.669.58c-1.346 1.18-3.159 1.769-4.972 1.769-1.815 0-3.63-.59-4.98-1.773L53.25 62.07c-1.956-1.712-5.38-1.712-7.332-.004l-1.341 1.163c-2.692 2.36-7.254 2.361-9.953-.004l-1.333-1.155c-1.955-1.712-5.38-1.712-7.332-.004l-1.342 1.163c-2.692 2.36-7.254 2.361-9.952-.004L14 62.65l1.306-1.51.669.58c1.957 1.712 5.381 1.713 7.332.002l1.341-1.161c2.695-2.36 7.255-2.361 9.953.002l1.333 1.156c1.957 1.713 5.381 1.714 7.332.003l1.342-1.161c2.694-2.36 7.254-2.361 9.952.002l1.334 1.156c1.957 1.713 5.38 1.714 7.332.003zm0-6.333 1.307 1.509-.669.58c-2.693 2.36-7.254 2.362-9.953-.004l-1.333-1.154c-1.956-1.715-5.38-1.714-7.332-.004l-1.341 1.162c-1.347 1.18-3.16 1.77-4.972 1.77-1.815 0-3.63-.59-4.981-1.774l-1.333-1.154c-1.955-1.715-5.38-1.714-7.332-.004l-1.342 1.162c-2.691 2.36-7.253 2.36-9.952-.004L14 56.316l1.306-1.51.669.58c1.957 1.714 5.381 1.713 7.332.003l1.341-1.162c2.695-2.36 7.257-2.361 9.953.003l1.333 1.155c1.957 1.715 5.381 1.714 7.332.004l1.342-1.162c2.694-2.36 7.256-2.361 9.952.003l1.334 1.155c1.957 1.715 5.38 1.714 7.332.004zM24.033 37.976a1.999 1.999 0 0 1 0 3.996 1.999 1.999 0 0 1 0-3.996m14.969-20.978a1.999 1.999 0 0 1 0 3.996 1.999 1.999 0 0 1 0-3.996m16.965 22.976a2 2 0 0 1-1.995 1.998 1.999 1.999 0 0 1 0-3.996c1.1 0 1.995.896 1.995 1.998M39.002 22.992c.665 0 1.283-.18 1.835-.469l10.168 14.804a3.95 3.95 0 0 0-.883 1.648h-22.24A3.95 3.95 0 0 0 27 37.327l10.167-14.804c.552.29 1.17.469 1.835.469M24.032 43.97c1.855 0 3.405-1.279 3.85-2.997h22.24c.445 1.718 1.995 2.997 3.85 2.997a4 4 0 0 0 3.991-3.996 4 4 0 0 0-3.991-3.996c-.46 0-.896.094-1.309.238L42.337 21.18a3.96 3.96 0 0 0 .657-2.185A4 4 0 0 0 39.002 15a4 4 0 0 0-3.992 3.996c0 .807.244 1.556.657 2.185L25.34 36.216a4 4 0 0 0-1.308-.238 4 4 0 0 0-3.992 3.996 4 4 0 0 0 3.992 3.996"/></g></svg>
|
||||
|
After Width: | Height: | Size: 2.7 KiB |
18
frontend/public/Logos/api-gateway.svg
Normal file
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="80px" height="80px" viewBox="0 0 80 80" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<title>Icon-Architecture/64/Arch_ Amazon-API-Gateway_64</title>
|
||||
<defs>
|
||||
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
|
||||
<stop stop-color="#B0084D" offset="0%"></stop>
|
||||
<stop stop-color="#FF4F8B" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="Icon-Architecture/64/Arch_-Amazon-API-Gateway_64" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Rectangle" fill="url(#linearGradient-1)">
|
||||
<rect x="0" y="0" width="80" height="80"></rect>
|
||||
</g>
|
||||
<g id="Icon-Service/64/Amazon-API-Gateway_64" transform="translate(8.000000, 8.000000)" fill="#FFFFFF">
|
||||
<path d="M26.065,47.6323642 L29,47.6323642 L29,45.6152989 L26.065,45.6152989 L26.065,47.6323642 Z M31,47.6323642 L34,47.6323642 L34,45.6152989 L31,45.6152989 L31,47.6323642 Z M19,6.62946112 L6,13.082053 L6,53.1934132 L19,57.589607 L19,6.62946112 Z M21,19.3934502 L21,45.6152989 L24,45.6152989 L24,47.6323642 L21,47.6323642 L21,58.9914674 C21,59.3162149 20.845,59.6207917 20.584,59.8103959 C20.412,59.9354539 20.207,60 20,60 C19.894,60 19.786,59.9828549 19.682,59.9475563 L4.682,54.8756456 C4.274,54.7374767 4,54.3522172 4,53.9195567 L4,12.4537371 C4,12.0704947 4.217,11.7185168 4.559,11.5490833 L19.559,4.10409539 C19.868,3.94978989 20.235,3.96794348 20.528,4.15250495 C20.821,4.33605789 21,4.65979687 21,5.00874917 L21,17.376385 L24,17.376385 L24,19.3934502 L21,19.3934502 Z M36,47.6323642 L39,47.6323642 L39,45.6152989 L36,45.6152989 L36,47.6323642 Z M36.065,19.3934502 L39,19.3934502 L39,17.376385 L36.065,17.376385 L36.065,19.3934502 Z M31.065,19.3934502 L34,19.3934502 L34,17.376385 L31.065,17.376385 L31.065,19.3934502 Z M26.065,19.3934502 L29,19.3934502 L29,17.376385 L26.065,17.376385 L26.065,19.3934502 Z M58,13.082053 L45,6.62946112 L45,57.589607 L58,53.1934132 L58,13.082053 Z M60,53.9195567 C60,54.3522172 59.726,54.7374767 59.318,54.8756456 L44.318,59.9475563 C44.214,59.9828549 44.106,60 44,60 C43.793,60 43.588,59.9354539 43.416,59.8103959 C43.155,59.6207917 43,59.3162149 43,58.9914674 L43,47.6323642 L41.065,47.6323642 L41.065,45.6152989 L43,45.6152989 L43,19.3934502 L41.065,19.3934502 L41.065,17.376385 L43,17.376385 L43,5.00874917 C43,4.65979687 43.179,4.33605789 43.472,4.15250495 C43.765,3.96794348 44.131,3.94978989 44.441,4.10409539 L59.441,11.5490833 C59.783,11.7185168 60,12.0704947 60,12.4537371 L60,53.9195567 Z M34.934,25.8067093 L33.066,25.0825829 L28.066,38.1935072 L29.934,38.9176337 L34.934,25.8067093 Z M41.707,32.2088745 C42.098,31.8145383 42.098,31.1761371 41.707,30.7828094 L37.707,26.7486788 L36.293,28.174744 L39.586,31.4958419 L36.293,34.8169399 L37.707,36.2430051 L41.707,32.2088745 Z M26.293,36.2430051 L22.293,32.2088745 C21.902,31.8145383 21.902,31.1761371 22.293,30.7828094 L26.293,26.7486788 L27.707,28.174744 L24.414,31.4958419 L27.707,34.8169399 L26.293,36.2430051 Z" id="Amazon-API-Gateway_Icon_64_Squid"></path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.1 KiB |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
1
frontend/public/Logos/celery.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" id="图层_1" x="0" y="0" version="1.1" viewBox="0 0 519.27 214.14"><style>.st2{fill:#a9cc54}</style><path d="M493.54 171.25c-35.63 0-64.52-28.89-64.52-64.52s28.89-64.52 64.52-64.52c3.85 0 7.62.36 11.29 1L493.54 5.45H104.46v.02C49.24 6.51 4.78 51.59 4.78 107.07c0 55.47 44.45 100.56 99.68 101.59v.02h389.07l11.29-38.44c-3.66.65-7.43 1.01-11.28 1.01" style="fill:#b6de64"/><defs><path id="SVGID_1_" d="M493.54 171.25c-35.63 0-64.52-28.89-64.52-64.52s28.89-64.52 64.52-64.52c3.85 0 7.62.36 11.29 1V5.45H104.46v.02C49.24 6.51 4.78 51.59 4.78 107.07c0 55.47 44.45 100.56 99.68 101.59v.02h389.07l11.29-38.44c-3.66.65-7.43 1.01-11.28 1.01"/></defs><clipPath id="SVGID_2_"><use xlink:href="#SVGID_1_" style="overflow:visible"/></clipPath><g style="clip-path:url(#SVGID_2_)"><path d="M-23.16 55.58H428.5v51.1H-23.16zM-23.16 157.59h530.98v72.35H-23.16z" class="st2"/></g><path d="M493.54 184.23c-43.94 0-79.56-33.62-79.56-77.56s35.62-76.56 79.56-76.56" style="fill:none;stroke:#ddf4a4;stroke-width:49;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10"/></svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
31
frontend/public/Logos/clickhouse.svg
Normal file
@@ -0,0 +1,31 @@
|
||||
<svg version="1.1" id="Layer_1" xmlns:x="ns_extend;" xmlns:i="ns_ai;" xmlns:graph="ns_graphs;" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 50.6 50.6" style="enable-background:new 0 0 50.6 50.6;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<metadata>
|
||||
<sfw xmlns="ns_sfw;">
|
||||
<slices>
|
||||
</slices>
|
||||
<sliceSourceBounds bottomLeftOrigin="true" height="50.6" width="50.6" x="0" y="0">
|
||||
</sliceSourceBounds>
|
||||
</sfw>
|
||||
</metadata>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M0.6,0H5c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6H0.6C0.3,50.6,0,50.4,0,50V0.6C0,0.3,0.3,0,0.6,0z">
|
||||
</path>
|
||||
<path class="st0" d="M11.8,0h4.4c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V0.6
|
||||
C11.3,0.3,11.5,0,11.8,0z">
|
||||
</path>
|
||||
<path class="st0" d="M23.1,0h4.4c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V0.6
|
||||
C22.5,0.3,22.8,0,23.1,0z">
|
||||
</path>
|
||||
<path class="st0" d="M34.3,0h4.4c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V0.6
|
||||
C33.7,0.3,34,0,34.3,0z">
|
||||
</path>
|
||||
<path class="st0" d="M45.6,19.7H50c0.3,0,0.6,0.3,0.6,0.6v10.1c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V20.3
|
||||
C45,20,45.3,19.7,45.6,19.7z">
|
||||
</path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
30
frontend/public/Logos/confluent-kafka.svg
Normal file
@@ -0,0 +1,30 @@
|
||||
<svg version="1.1" id="Layer_1" xmlns:x="ns_extend;" xmlns:i="ns_ai;" xmlns:graph="ns_graphs;" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 43 43" style="enable-background:new 0 0 43 43;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill-rule:evenodd;clip-rule:evenodd;fill:#FFFFFF;}
|
||||
</style>
|
||||
<metadata>
|
||||
<sfw xmlns="ns_sfw;">
|
||||
<slices>
|
||||
</slices>
|
||||
<sliceSourceBounds bottomLeftOrigin="true" height="43" width="43" x="68.7" y="-221.7">
|
||||
</sliceSourceBounds>
|
||||
</sfw>
|
||||
</metadata>
|
||||
<g>
|
||||
<path class="st0" d="M30.8,22.7c1.1-0.1,2.2-0.1,3.3-0.2v-0.8c-1.1-0.1-2.2-0.1-3.3-0.2l-3.3-0.1c-1.9-0.1-3.9-0.1-5.8-0.1
|
||||
c0-1.9,0-3.9-0.1-5.8l-0.1-3.3c-0.1-1.1-0.1-2.2-0.2-3.3h-0.9c-0.1,1.1-0.1,2.2-0.2,3.3l-0.1,3.3c0,0.9,0,1.8,0,2.7
|
||||
c-0.4-0.8-0.7-1.7-1.1-2.5l-1.4-3c-0.5-1-0.9-2-1.4-3L15.4,10c0.4,1.1,0.7,2.1,1.1,3.1l1.2,3.1c0.3,0.9,0.7,1.7,1,2.6
|
||||
c-0.7-0.6-1.3-1.3-2-1.9l-2.4-2.3c-0.8-0.7-1.6-1.5-2.5-2.2l-0.7,0.7c0.7,0.8,1.5,1.6,2.2,2.5l2.3,2.4c0.6,0.7,1.3,1.3,1.9,2
|
||||
c-0.8-0.3-1.7-0.7-2.6-1l-3.1-1.2c-1-0.4-2.1-0.8-3.1-1.1l-0.4,0.9c1,0.5,2,0.9,3,1.4l3,1.4c0.8,0.4,1.7,0.7,2.5,1.1
|
||||
c-0.9,0-1.8,0-2.7,0l-3.3,0.1c-1.1,0.1-2.2,0.1-3.3,0.2v0.9c1.1,0.1,2.2,0.1,3.3,0.2l3.3,0.1c2,0.1,3.9,0.1,5.8,0.1
|
||||
c0,1.9,0,3.9,0.1,5.8l0.1,3.3c0.1,1.1,0.1,2.2,0.2,3.3h0.8c0.1-1.1,0.1-2.2,0.2-3.3l0.1-3.3c0-0.9,0-1.9,0.1-2.8
|
||||
c0.4,0.9,0.7,1.7,1.1,2.6l1.4,3c0.5,1,0.9,2,1.4,3l0.8-0.3c-0.3-1.1-0.7-2.1-1.1-3.1L24.1,28c-0.3-0.9-0.7-1.7-1-2.6
|
||||
c0.7,0.7,1.3,1.3,2,1.9l2.4,2.3c0.8,0.7,1.6,1.5,2.5,2.2l0.6-0.6c-0.7-0.8-1.5-1.6-2.2-2.5l-2.3-2.4c-0.6-0.7-1.3-1.4-1.9-2
|
||||
c0.9,0.3,1.7,0.7,2.6,1l3.1,1.2c1,0.4,2.1,0.8,3.1,1.1l0.3-0.8c-1-0.5-2-1-3-1.4l-3-1.4c-0.9-0.4-1.7-0.8-2.6-1.1
|
||||
c0.9,0,1.9,0,2.8-0.1C27.5,22.8,30.8,22.7,30.8,22.7z">
|
||||
</path>
|
||||
<path class="st0" d="M21.5,43C9.6,43,0,33.4,0,21.5S9.6,0,21.5,0S43,9.6,43,21.5S33.4,43,21.5,43 M21.5,2C10.8,2,2,10.8,2,21.5
|
||||
S10.8,41,21.5,41S41,32.2,41,21.5S32.2,2,21.5,2">
|
||||
</path>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.9 KiB |
39
frontend/public/Logos/datadog.svg
Normal file
@@ -0,0 +1,39 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 23.0.4, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 800.5 907.77" style="enable-background:new 0 0 800.5 907.77;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<path class="st0" d="M303.36,238.61c31.36-21.37,71.76-12.97,65-6.53c-12.89,12.28,4.26,8.65,6.11,31.31
|
||||
c1.36,16.69-4.09,25.88-8.78,31.11c-9.79,1.28-21.69,3.67-36.02,8.33c-8.48,2.76-15.85,5.82-22.31,8.9
|
||||
c-1.7-1.11-3.55-2.47-5.74-4.36C279.5,288.19,280.24,254.37,303.36,238.61 M490.68,370.72c5.69-4.41,31.55-12.72,55.49-15.55
|
||||
c12.57-1.48,30.49-2.34,34.31-0.2c7.59,4.19,7.59,17.16,2.39,29.14c-7.57,17.4-18.27,36.63-30.39,38.21
|
||||
c-19.77,2.61-38.46-8.09-59.8-24.03C485.06,392.56,480.38,378.68,490.68,370.72 M526.75,201.27c29.19,13.58,25.37,39.42,26.18,54.6
|
||||
c0.22,4.36,0.15,7.3-0.22,9.32c-4.04-2.19-10.43-3.8-20.56-3.35c-2.96,0.12-5.84,0.47-8.63,0.91c-10.77-5.77-17.21-17.06-23.1-29.06
|
||||
c-0.54-1.11-0.96-2.1-1.36-3.06c-0.17-0.44-0.35-0.91-0.52-1.31c-0.07-0.22-0.12-0.39-0.2-0.59c-3.23-10.25-1.06-12.3,0.3-15.46
|
||||
c1.41-3.23,6.68-5.89-1.11-8.58c-0.67-0.25-1.5-0.39-2.44-0.57C500.25,197.72,515.7,196.17,526.75,201.27 M367.62,510.22
|
||||
c-31.45-20.19-63.99-49.15-78.22-65.18c-2.39-1.8-2-9.79-2-9.79c12.84,9.98,66.11,48.04,122.44,65.42
|
||||
c19.87,6.14,50.36,8.46,76.81-6.53c20.21-11.46,44.54-31.43,59.06-52.01l2.66,4.61c-0.1,3.06-6.78,17.97-10.18,23.96
|
||||
c6.14,3.53,10.72,4.49,17.55,6.36l46.64-7.27c16.74-27.04,28.74-70.65,15.95-112.16c-7.3-23.81-45.36-71.22-48.09-73.83
|
||||
c-9.56-9.19,1.6-44.69-17.35-83.42C532.86,159.41,480.67,116.69,458,98.1c6.68,4.88,47.82,21.47,67,44.62
|
||||
c1.8-2.39,2.54-14.82,4.19-17.97c-16.47-21.57-17.75-59.95-17.75-70.21c0-18.81-9.56-40.13-9.56-40.13s16.47,13.04,20.73,35.5
|
||||
c5.03,26.6,15.75,47.55,29.93,65.28c26.84,33.43,51.08,50.58,63.33,38.23C630.53,138.58,601,72.2,563.28,35.15
|
||||
C519.25-8.09,507.74-2.52,481.91,6.7c-20.61,7.35-31.75,65.87-85.47,64.71c-9.1-1.06-32.54-1.63-44.13-1.53
|
||||
c6.04-8.43,11.22-14.94,11.22-14.94s-18.02,7.25-33.38,16.44l-1.18-1.77c5.18-10.92,10.75-17.82,10.75-17.82s-14.4,8.65-27.54,19.01
|
||||
c2.39-13.02,11.44-21.27,11.44-21.27s-18.19,3.28-41.36,28.77c-26.33,7.2-32.66,11.93-53.64,21.22
|
||||
c-34.12-7.44-50.21-19.45-65.55-41.56c-11.68-16.89-32.47-19.45-53.71-10.72c-30.97,12.8-70.14,30.33-70.14,30.33
|
||||
s12.77-0.52,26.08,0.05c-18.22,6.9-35.72,16.39-35.72,16.39s8.53-0.3,19.06-0.12c-7.27,6.04-11.29,8.92-18.22,13.51
|
||||
c-16.66,12.1-30.17,26.08-30.17,26.08s11.31-5.15,21.47-8.04c-7.1,16.27-21.18,28.25-18.59,48.17
|
||||
c2.49,18.19,24.82,55.66,53.64,78.66c2.49,2,41.86,38.43,71.56,23.47c29.68-14.94,41.39-28.25,46.27-48.66
|
||||
c5.74-23.44,2.47-41.17-9.79-92.05c-4.04-16.79-14.57-51.37-19.65-67.91l1.13-0.81c9.71,20.49,34.56,74.5,44.57,110.78
|
||||
c15.63,56.57,10.75,85.27,3.6,95.79c-21.57,31.73-76.84,35.92-101.98,18.34c-3.85,60.91,9.76,87.73,14.37,101.24
|
||||
c-2.29,15.53,7.77,44.37,7.77,44.37s1.13-13.11,5.74-20.02c1.23,15.41,9,33.72,9,33.72s-0.47-11.31,3.06-21.08
|
||||
c4.98,8.43,8.63,10.43,13.34,16.76c4.71,16.47,14.15,28.5,14.15,28.5s-1.53-8.83-0.69-18.02c23.05,22.14,27.02,54.45,29.31,79.28
|
||||
c6.46,68.26-107.63,122.54-129.74,165.24c-16.76,25.29-26.8,65.3,1.58,88.89c68.6,56.97,42.25,72.65,76.59,97.69
|
||||
c47.11,34.34,106.05,18.96,126.11-8.97c27.93-38.92,20.76-75.63,10.38-109.97c-8.11-26.85-30.15-71.46-57.41-88.72
|
||||
c-27.86-17.65-54.95-20.95-77.9-18.59l2.12-2.44c33.01-6.56,67.52-2.96,92.49,13.14c28.35,18.22,54.28,49.47,67.84,97.37
|
||||
c15.38-2.19,17.55-3.18,31.63-5.18l-31.7-246.76L367.62,510.22z M385.94,819.52l-3.65-34.22l71.29-108.74l80.93,23.64l69.59-116.23
|
||||
L687.52,639l63.38-132.92l22.53,242.07L385.94,819.52z M774.27,456.51l-254.72,46.17c-6.31,8.13-21.91,22.41-29.41,26.13
|
||||
c-32.17,16.2-53.91,11.51-72.7,6.63c-12.08-3.06-19.08-4.78-29.11-9.29l-62.17,8.53l37.74,314.87l436.35-78.66L774.27,456.51z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.8 KiB |
18
frontend/public/Logos/elasticache.svg
Normal file
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="80px" height="80px" viewBox="0 0 80 80" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<!-- Generator: Sketch 64 (93537) - https://sketch.com -->
|
||||
<title>Icon-Architecture/64/Arch_Amazon-ElastiCache_64</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<defs>
|
||||
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
|
||||
<stop stop-color="#2E27AD" offset="0%"></stop>
|
||||
<stop stop-color="#527FFF" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="Icon-Architecture/64/Arch_Amazon-ElastiCache_64" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Icon-Architecture-BG/64/Database" fill="url(#linearGradient-1)">
|
||||
<rect id="Rectangle" x="0" y="0" width="80" height="80"></rect>
|
||||
</g>
|
||||
<path d="M51,61.5554864 L51,55.7029342 C48.466,57.3400887 43.904,58.1131616 39.556,58.1131616 C34.816,58.1131616 31.121,57.2860836 29,55.8679498 L29,61.5554864 C29,63.2486461 32.948,64.9998113 39.556,64.9998113 C46.3,64.9998113 51,63.1846401 51,61.5554864 L51,61.5554864 Z M39.556,49.2203227 C34.816,49.2203227 31.121,48.3942447 29,46.976111 L29,52.6866497 C29.031,54.3738088 32.973,56.1129729 39.556,56.1129729 C46.279,56.1129729 50.969,54.3088027 51,52.6826493 L51,46.8100953 C48.466,48.4482498 43.904,49.2203227 39.556,49.2203227 L39.556,49.2203227 Z M51,43.7908105 L51,37.0291726 C48.466,38.666327 43.904,39.4393999 39.556,39.4393999 C34.816,39.4393999 31.121,38.613322 29,37.1951882 L29,43.7948108 C29.031,45.48197 32.973,47.220134 39.556,47.220134 C46.279,47.220134 50.969,45.4159638 51,43.7908105 L51,43.7908105 Z M28.997,33.9928861 C28.997,33.9958864 28.998,33.9988867 28.998,34.001887 L29,34.001887 L29,34.012888 C29.031,35.7000472 32.973,37.4392112 39.556,37.4392112 C46.898,37.4392112 50.969,35.4170205 51,34.0098877 L51,34.001887 L51.002,34.001887 C51.002,33.9988867 51.003,33.9958864 51.003,33.9928861 C51.003,32.5847533 46.927,30.546561 39.556,30.546561 C32.946,30.546561 28.997,32.2987263 28.997,33.9928861 L28.997,33.9928861 Z M53,34.0178885 L53,43.7738088 L53.003,43.7738088 C53.003,43.7828097 53,43.7898104 53,43.7988112 L53,52.6666478 L53.003,52.6666478 C53.003,52.6756486 53,52.6826493 53,52.6916502 L53,61.5554864 C53,65.2968393 46.031,67 39.556,67 C31.929,67 27,64.8627984 27,61.5554864 L27,52.6976507 C27,52.6866497 26.997,52.6776488 26.997,52.6666478 L27,52.6666478 L27,43.8048118 C27,43.7948108 26.997,43.7848099 26.997,43.7738088 L27,43.7738088 L27,34.023889 C27,34.012888 26.997,34.0038872 26.997,33.9928861 C26.997,30.684574 31.927,28.5463723 39.556,28.5463723 C46.032,28.5463723 53.003,30.2505331 53.003,33.9928861 C53.003,34.001887 53,34.0088876 53,34.0178885 L53,34.0178885 Z M67,21.1206718 C67.553,21.1206718 68,20.6726295 68,20.1205774 L68,15.0000943 C68,14.4470422 67.553,14 67,14 L13,14 C12.447,14 12,14.4470422 12,15.0000943 L12,20.1205774 C12,20.6726295 12.447,21.1206718 13,21.1206718 C14.221,21.1206718 15.214,22.1077649 15.214,23.3208793 C15.214,24.5339938 14.221,25.5210869 13,25.5210869 C12.447,25.5210869 12,25.9691292 12,26.5211812 L12,47.0031135 C12,47.5551656 12.447,48.0032078 13,48.0032078 L23,48.0032078 L23,46.0030192 L18,46.0030192 L18,43.0027361 L23,43.0027361 L23,41.0025474 L17,41.0025474 C16.447,41.0025474 16,41.4495896 16,42.0026418 L16,46.0030192 L14,46.0030192 L14,27.4012643 C15.843,26.9522219 17.214,25.2930654 17.214,23.3208793 C17.214,21.3476932 15.843,19.6885367 14,19.2394943 L14,16.0001887 L66,16.0001887 L66,19.2394943 C64.157,19.6885367 62.786,21.3476932 62.786,23.3208793 C62.786,25.2930654 64.157,26.9522219 66,27.4012643 L66,46.0030192 L64,46.0030192 L64,42.0026418 C64,41.4495896 63.553,41.0025474 63,41.0025474 L57,41.0025474 L57,43.0027361 L62,43.0027361 L62,46.0030192 L57,46.0030192 L57,48.0032078 L67,48.0032078 C67.553,48.0032078 68,47.5551656 68,47.0031135 L68,26.5211812 C68,25.9691292 67.553,25.5210869 67,25.5210869 C65.779,25.5210869 64.786,24.5339938 64.786,23.3208793 C64.786,22.1077649 65.779,21.1206718 67,21.1206718 L67,21.1206718 Z M28,28.0013209 L28,20.0005661 C28,19.4475139 27.553,19.0004717 27,19.0004717 L21,19.0004717 C20.447,19.0004717 20,19.4475139 20,20.0005661 L20,37.00217 C20,37.5542221 20.447,38.0022644 21,38.0022644 L24,38.0022644 L24,36.0020757 L22,36.0020757 L22,21.0006604 L26,21.0006604 L26,28.0013209 L28,28.0013209 Z M58,36.0020757 L57,36.0020757 L57,38.0022644 L59,38.0022644 C59.553,38.0022644 60,37.5542221 60,37.00217 L60,20.0005661 C60,19.4475139 59.553,19.0004717 59,19.0004717 L53,19.0004717 C52.447,19.0004717 52,19.4475139 52,20.0005661 L52,28.0013209 L54,28.0013209 L54,21.0006604 L58,21.0006604 L58,36.0020757 Z M50,27.0012265 L50,20.0005661 C50,19.4475139 49.553,19.0004717 49,19.0004717 L42,19.0004717 C41.447,19.0004717 41,19.4475139 41,20.0005661 L41,26.0011322 L43,26.0011322 L43,21.0006604 L48,21.0006604 L48,27.0012265 L50,27.0012265 Z M37,26.0011322 L37,21.0006604 L32,21.0006604 L32,27.0012265 L30,27.0012265 L30,20.0005661 C30,19.4475139 30.447,19.0004717 31,19.0004717 L38,19.0004717 C38.553,19.0004717 39,19.4475139 39,20.0005661 L39,26.0011322 L37,26.0011322 Z" id="Amazon-ElastiCache_Icon_64_Squid" fill="#FFFFFF"></path>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 5.2 KiB |
70
frontend/public/Logos/grafana.svg
Normal file
@@ -0,0 +1,70 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 21.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 142.5 145.6" style="enable-background:new 0 0 142.5 145.6;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#565656;}
|
||||
.st1{fill:url(#SVGID_1_);}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M28.7,131.5c-0.3,7.9-6.6,14.1-14.4,14.1C6.1,145.6,0,139,0,130.9s6.6-14.7,14.7-14.7c3.6,0,7.2,1.6,10.2,4.4
|
||||
l-2.3,2.9c-2.3-2-5.1-3.4-7.9-3.4c-5.9,0-10.8,4.8-10.8,10.8c0,6.1,4.6,10.8,10.4,10.8c5.2,0,9.3-3.8,10.2-8.8H12.6v-3.5h16.1
|
||||
V131.5z"/>
|
||||
<path class="st0" d="M42.3,129.5h-2.2c-2.4,0-4.4,2-4.4,4.4v11.4h-3.9v-19.6H35v1.6c1.1-1.1,2.7-1.6,4.6-1.6h4.2L42.3,129.5z"/>
|
||||
<path class="st0" d="M63.7,145.3h-3.4v-2.5c-2.6,2.5-6.6,3.7-10.7,1.9c-3-1.3-5.3-4.1-5.9-7.4c-1.2-6.3,3.7-11.9,9.9-11.9
|
||||
c2.6,0,5,1.1,6.7,2.8v-2.5h3.4V145.3z M59.7,137c0.9-4-2.1-7.6-6-7.6c-3.4,0-6.1,2.8-6.1,6.1c0,3.8,3.3,6.7,7.2,6.1
|
||||
C57.1,141.2,59.1,139.3,59.7,137z"/>
|
||||
<path class="st0" d="M71.5,124.7v1.1h6.2v3.4h-6.2v16.1h-3.8v-20.5c0-4.3,3.1-6.8,7-6.8h4.7l-1.6,3.7h-3.1
|
||||
C72.9,121.6,71.5,123,71.5,124.7z"/>
|
||||
<path class="st0" d="M98.5,145.3h-3.3v-2.5c-2.6,2.5-6.6,3.7-10.7,1.9c-3-1.3-5.3-4.1-5.9-7.4c-1.2-6.3,3.7-11.9,9.9-11.9
|
||||
c2.6,0,5,1.1,6.7,2.8v-2.5h3.4v19.6H98.5z M94.5,137c0.9-4-2.1-7.6-6-7.6c-3.4,0-6.1,2.8-6.1,6.1c0,3.8,3.3,6.7,7.2,6.1
|
||||
C92,141.2,93.9,139.3,94.5,137z"/>
|
||||
<path class="st0" d="M119.4,133.8v11.5h-3.9v-11.6c0-2.4-2-4.4-4.4-4.4c-2.5,0-4.4,2-4.4,4.4v11.6h-3.9v-19.6h3.2v1.7
|
||||
c1.4-1.3,3.3-2,5.2-2C115.8,125.5,119.4,129.2,119.4,133.8z"/>
|
||||
<path class="st0" d="M142.4,145.3h-3.3v-2.5c-2.6,2.5-6.6,3.7-10.7,1.9c-3-1.3-5.3-4.1-5.9-7.4c-1.2-6.3,3.7-11.9,9.9-11.9
|
||||
c2.6,0,5,1.1,6.7,2.8v-2.5h3.4v19.6H142.4z M138.4,137c0.9-4-2.1-7.6-6-7.6c-3.4,0-6.1,2.8-6.1,6.1c0,3.8,3.3,6.7,7.2,6.1
|
||||
C135.9,141.2,137.8,139.3,138.4,137z"/>
|
||||
</g>
|
||||
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="71.25" y1="10.4893" x2="71.25" y2="113.3415" gradientTransform="matrix(1 0 0 -1 0 148.6)">
|
||||
<stop offset="0" style="stop-color:#FCEE1F"/>
|
||||
<stop offset="1" style="stop-color:#F15B2A"/>
|
||||
</linearGradient>
|
||||
<path class="st1" d="M122.9,49.9c-0.2-1.9-0.5-4.1-1.1-6.5c-0.6-2.4-1.6-5-2.9-7.8c-1.4-2.7-3.1-5.6-5.4-8.3
|
||||
c-0.9-1.1-1.9-2.1-2.9-3.2c1.6-6.3-1.9-11.8-1.9-11.8c-6.1-0.4-9.9,1.9-11.3,2.9c-0.2-0.1-0.5-0.2-0.7-0.3c-1-0.4-2.1-0.8-3.2-1.2
|
||||
c-1.1-0.3-2.2-0.7-3.3-0.9c-1.1-0.3-2.3-0.5-3.5-0.7c-0.2,0-0.4-0.1-0.6-0.1C83.5,3.6,75.9,0,75.9,0c-8.7,5.6-10.4,13.1-10.4,13.1
|
||||
s0,0.2-0.1,0.4c-0.5,0.1-0.9,0.3-1.4,0.4c-0.6,0.2-1.3,0.4-1.9,0.7c-0.6,0.3-1.3,0.5-1.9,0.8c-1.3,0.6-2.5,1.2-3.8,1.9
|
||||
c-1.2,0.7-2.4,1.4-3.5,2.2c-0.2-0.1-0.3-0.2-0.3-0.2c-11.7-4.5-22.1,0.9-22.1,0.9c-0.9,12.5,4.7,20.3,5.8,21.7
|
||||
c-0.3,0.8-0.5,1.5-0.8,2.3c-0.9,2.8-1.5,5.7-1.9,8.7c-0.1,0.4-0.1,0.9-0.2,1.3c-10.8,5.3-14,16.3-14,16.3c9,10.4,19.6,11,19.6,11
|
||||
l0,0c1.3,2.4,2.9,4.7,4.6,6.8c0.7,0.9,1.5,1.7,2.3,2.6c-3.3,9.4,0.5,17.3,0.5,17.3c10.1,0.4,16.7-4.4,18.1-5.5c1,0.3,2,0.6,3,0.9
|
||||
c3.1,0.8,6.3,1.3,9.4,1.4c0.8,0,1.6,0,2.4,0h0.4H80h0.5H81l0,0c4.7,6.8,13.1,7.7,13.1,7.7c5.9-6.3,6.3-12.4,6.3-13.8l0,0
|
||||
c0,0,0,0,0-0.1s0-0.2,0-0.2l0,0c0-0.1,0-0.2,0-0.3c1.2-0.9,2.4-1.8,3.6-2.8c2.4-2.1,4.4-4.6,6.2-7.2c0.2-0.2,0.3-0.5,0.5-0.7
|
||||
c6.7,0.4,11.4-4.2,11.4-4.2c-1.1-7-5.1-10.4-5.9-11l0,0c0,0,0,0-0.1-0.1l-0.1-0.1l0,0l-0.1-0.1c0-0.4,0.1-0.8,0.1-1.3
|
||||
c0.1-0.8,0.1-1.5,0.1-2.3v-0.6v-0.3v-0.1c0-0.2,0-0.1,0-0.2v-0.5v-0.6c0-0.2,0-0.4,0-0.6s0-0.4-0.1-0.6l-0.1-0.6l-0.1-0.6
|
||||
c-0.1-0.8-0.3-1.5-0.4-2.3c-0.7-3-1.9-5.9-3.4-8.4c-1.6-2.6-3.5-4.8-5.7-6.8c-2.2-1.9-4.6-3.5-7.2-4.6c-2.6-1.2-5.2-1.9-7.9-2.2
|
||||
c-1.3-0.2-2.7-0.2-4-0.2h-0.5h-0.1h-0.2h-0.2h-0.5c-0.2,0-0.4,0-0.5,0c-0.7,0.1-1.4,0.2-2,0.3c-2.7,0.5-5.2,1.5-7.4,2.8
|
||||
c-2.2,1.3-4.1,3-5.7,4.9s-2.8,3.9-3.6,6.1c-0.8,2.1-1.3,4.4-1.4,6.5c0,0.5,0,1.1,0,1.6c0,0.1,0,0.3,0,0.4v0.4c0,0.3,0,0.5,0.1,0.8
|
||||
c0.1,1.1,0.3,2.1,0.6,3.1c0.6,2,1.5,3.8,2.7,5.4s2.5,2.8,4,3.8s3,1.7,4.6,2.2c1.6,0.5,3.1,0.7,4.5,0.6c0.2,0,0.4,0,0.5,0
|
||||
c0.1,0,0.2,0,0.3,0s0.2,0,0.3,0c0.2,0,0.3,0,0.5,0h0.1h0.1c0.1,0,0.2,0,0.3,0c0.2,0,0.4-0.1,0.5-0.1c0.2,0,0.3-0.1,0.5-0.1
|
||||
c0.3-0.1,0.7-0.2,1-0.3c0.6-0.2,1.2-0.5,1.8-0.7c0.6-0.3,1.1-0.6,1.5-0.9c0.1-0.1,0.3-0.2,0.4-0.3c0.5-0.4,0.6-1.1,0.2-1.6
|
||||
c-0.4-0.4-1-0.5-1.5-0.3C88,74,87.9,74,87.7,74.1c-0.4,0.2-0.9,0.4-1.3,0.5c-0.5,0.1-1,0.3-1.5,0.4c-0.3,0-0.5,0.1-0.8,0.1
|
||||
c-0.1,0-0.3,0-0.4,0c-0.1,0-0.3,0-0.4,0s-0.3,0-0.4,0c-0.2,0-0.3,0-0.5,0c0,0-0.1,0,0,0h-0.1h-0.1c-0.1,0-0.1,0-0.2,0
|
||||
s-0.3,0-0.4-0.1c-1.1-0.2-2.3-0.5-3.4-1c-1.1-0.5-2.2-1.2-3.1-2.1c-1-0.9-1.8-1.9-2.5-3.1c-0.7-1.2-1.1-2.5-1.3-3.8
|
||||
c-0.1-0.7-0.2-1.4-0.1-2.1c0-0.2,0-0.4,0-0.6c0,0.1,0,0,0,0v-0.1v-0.1c0-0.1,0-0.2,0-0.3c0-0.4,0.1-0.7,0.2-1.1c0.5-3,2-5.9,4.3-8.1
|
||||
c0.6-0.6,1.2-1.1,1.9-1.5c0.7-0.5,1.4-0.9,2.1-1.2c0.7-0.3,1.5-0.6,2.3-0.8s1.6-0.4,2.4-0.4c0.4,0,0.8-0.1,1.2-0.1
|
||||
c0.1,0,0.2,0,0.3,0h0.3h0.2c0.1,0,0,0,0,0h0.1h0.3c0.9,0.1,1.8,0.2,2.6,0.4c1.7,0.4,3.4,1,5,1.9c3.2,1.8,5.9,4.5,7.5,7.8
|
||||
c0.8,1.6,1.4,3.4,1.7,5.3c0.1,0.5,0.1,0.9,0.2,1.4v0.3V66c0,0.1,0,0.2,0,0.3c0,0.1,0,0.2,0,0.3v0.3v0.3c0,0.2,0,0.6,0,0.8
|
||||
c0,0.5-0.1,1-0.1,1.5c-0.1,0.5-0.1,1-0.2,1.5s-0.2,1-0.3,1.5c-0.2,1-0.6,1.9-0.9,2.9c-0.7,1.9-1.7,3.7-2.9,5.3
|
||||
c-2.4,3.3-5.7,6-9.4,7.7c-1.9,0.8-3.8,1.5-5.8,1.8c-1,0.2-2,0.3-3,0.3H81h-0.2h-0.3H80h-0.3c0.1,0,0,0,0,0h-0.1
|
||||
c-0.5,0-1.1,0-1.6-0.1c-2.2-0.2-4.3-0.6-6.4-1.2c-2.1-0.6-4.1-1.4-6-2.4c-3.8-2-7.2-4.9-9.9-8.2c-1.3-1.7-2.5-3.5-3.5-5.4
|
||||
s-1.7-3.9-2.3-5.9c-0.6-2-0.9-4.1-1-6.2v-0.4v-0.1v-0.1v-0.2V60v-0.1v-0.1v-0.2v-0.5V59l0,0v-0.2c0-0.3,0-0.5,0-0.8
|
||||
c0-1,0.1-2.1,0.3-3.2c0.1-1.1,0.3-2.1,0.5-3.2c0.2-1.1,0.5-2.1,0.8-3.2c0.6-2.1,1.3-4.1,2.2-6c1.8-3.8,4.1-7.2,6.8-9.9
|
||||
c0.7-0.7,1.4-1.3,2.2-1.9c0.3-0.3,1-0.9,1.8-1.4c0.8-0.5,1.6-1,2.5-1.4c0.4-0.2,0.8-0.4,1.3-0.6c0.2-0.1,0.4-0.2,0.7-0.3
|
||||
c0.2-0.1,0.4-0.2,0.7-0.3c0.9-0.4,1.8-0.7,2.7-1c0.2-0.1,0.5-0.1,0.7-0.2c0.2-0.1,0.5-0.1,0.7-0.2c0.5-0.1,0.9-0.2,1.4-0.4
|
||||
c0.2-0.1,0.5-0.1,0.7-0.2c0.2,0,0.5-0.1,0.7-0.1c0.2,0,0.5-0.1,0.7-0.1l0.4-0.1l0.4-0.1c0.2,0,0.5-0.1,0.7-0.1
|
||||
c0.3,0,0.5-0.1,0.8-0.1c0.2,0,0.6-0.1,0.8-0.1c0.2,0,0.3,0,0.5-0.1h0.3h0.2h0.2c0.3,0,0.5,0,0.8-0.1h0.4c0,0,0.1,0,0,0h0.1h0.2
|
||||
c0.2,0,0.5,0,0.7,0c0.9,0,1.8,0,2.7,0c1.8,0.1,3.6,0.3,5.3,0.6c3.4,0.6,6.7,1.7,9.6,3.2c2.9,1.4,5.6,3.2,7.8,5.1
|
||||
c0.1,0.1,0.3,0.2,0.4,0.4c0.1,0.1,0.3,0.2,0.4,0.4c0.3,0.2,0.5,0.5,0.8,0.7c0.3,0.2,0.5,0.5,0.8,0.7c0.2,0.3,0.5,0.5,0.7,0.8
|
||||
c1,1,1.9,2.1,2.7,3.1c1.6,2.1,2.9,4.2,3.9,6.2c0.1,0.1,0.1,0.2,0.2,0.4c0.1,0.1,0.1,0.2,0.2,0.4s0.2,0.5,0.4,0.7
|
||||
c0.1,0.2,0.2,0.5,0.3,0.7c0.1,0.2,0.2,0.5,0.3,0.7c0.4,0.9,0.7,1.8,1,2.7c0.5,1.4,0.8,2.6,1.1,3.6c0.1,0.4,0.5,0.7,0.9,0.7
|
||||
c0.5,0,0.8-0.4,0.8-0.9C123,52.7,123,51.4,122.9,49.9z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 6.6 KiB |