Compare commits
243 Commits
v0.41.0
...
alert-link
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
403e41d460 | ||
|
|
4a8101b4be | ||
|
|
67779d6c2c | ||
|
|
f927969c7d | ||
|
|
ba0f63ad1e | ||
|
|
0760917a4b | ||
|
|
6aded04b7f | ||
|
|
70bc72b52f | ||
|
|
63effdc2ee | ||
|
|
b849705710 | ||
|
|
c913c8bf20 | ||
|
|
1328f05d78 | ||
|
|
1db1f76a72 | ||
|
|
932d892d9e | ||
|
|
3538815331 | ||
|
|
956a4d081d | ||
|
|
10b543dff1 | ||
|
|
96162d7949 | ||
|
|
3085093130 | ||
|
|
83d0ddeec0 | ||
|
|
ab444af8e6 | ||
|
|
749fba67cb | ||
|
|
fe96a78ee8 | ||
|
|
f77089da55 | ||
|
|
1d1d85efa3 | ||
|
|
c1c5c4dfa8 | ||
|
|
9eab315f76 | ||
|
|
1d86e5eb50 | ||
|
|
2f7495c6e4 | ||
|
|
1369fe1912 | ||
|
|
76b1e40cbc | ||
|
|
52e4c2d8ff | ||
|
|
7e79900973 | ||
|
|
f818a86720 | ||
|
|
0cf8817f3f | ||
|
|
0d043bf380 | ||
|
|
3b599ea41a | ||
|
|
da74619a46 | ||
|
|
93babc3019 | ||
|
|
f8b8080853 | ||
|
|
3c2bc06e6a | ||
|
|
a0d866c2ff | ||
|
|
52c8584e63 | ||
|
|
9a908c3f76 | ||
|
|
4887a1d8dd | ||
|
|
f2b0387a1b | ||
|
|
cbb9fd51f8 | ||
|
|
10e44ce440 | ||
|
|
6827d66ae9 | ||
|
|
611ec3e08d | ||
|
|
4ab350e721 | ||
|
|
631c12259f | ||
|
|
de497bf5b6 | ||
|
|
12be6ce020 | ||
|
|
2dbe598b2c | ||
|
|
cf64da2631 | ||
|
|
9ff0e34038 | ||
|
|
d313f44556 | ||
|
|
5a778dcb18 | ||
|
|
7e31b4ca01 | ||
|
|
3efd9801a1 | ||
|
|
0cbaa17d9f | ||
|
|
30bfad527f | ||
|
|
9f1c45bc32 | ||
|
|
51becf7cfb | ||
|
|
7460e650af | ||
|
|
a544723bb8 | ||
|
|
eb6f038db5 | ||
|
|
47dcd994f0 | ||
|
|
211fe4fdd5 | ||
|
|
e2992b42c1 | ||
|
|
3957d91a9b | ||
|
|
967aa16f21 | ||
|
|
08b1a87cb5 | ||
|
|
03ddcdd20e | ||
|
|
1aec7f3ca6 | ||
|
|
241edcb88a | ||
|
|
27d12871af | ||
|
|
e78e1d4b63 | ||
|
|
64bf580323 | ||
|
|
152aa4b518 | ||
|
|
b3d5831574 | ||
|
|
b85b9f42ed | ||
|
|
5c1c09c790 | ||
|
|
33960b05fd | ||
|
|
191d9b0648 | ||
|
|
7d81bc3417 | ||
|
|
506916661d | ||
|
|
5326f2d23b | ||
|
|
dfaa344dce | ||
|
|
882b540a0b | ||
|
|
1c4b579c3d | ||
|
|
706f25cc5d | ||
|
|
e6e0a59f5f | ||
|
|
b2c170c752 | ||
|
|
ee421af95c | ||
|
|
453be9074d | ||
|
|
3272444e13 | ||
|
|
71b3e6d522 | ||
|
|
6cf7cc9f4f | ||
|
|
5ec2f17d09 | ||
|
|
a45fb8ec0c | ||
|
|
bd148bbd5a | ||
|
|
1306e99ca8 | ||
|
|
1a8f063b4b | ||
|
|
c7668b9a78 | ||
|
|
5e3dba2587 | ||
|
|
374f30e0cd | ||
|
|
38d2833931 | ||
|
|
731eacbbca | ||
|
|
a63bb139bf | ||
|
|
a140bef0e6 | ||
|
|
48e5436167 | ||
|
|
0fc664a387 | ||
|
|
5817d50652 | ||
|
|
bb318cf52a | ||
|
|
ec0185da61 | ||
|
|
fc2bdb610f | ||
|
|
a9464de62d | ||
|
|
57bfdedfe1 | ||
|
|
7bdc9c0cb0 | ||
|
|
0d5934d56b | ||
|
|
3a5a61aff9 | ||
|
|
a54b7baa7d | ||
|
|
cd63dd972d | ||
|
|
389058b9b4 | ||
|
|
27e412d1ee | ||
|
|
03dccb0101 | ||
|
|
25b74b48a5 | ||
|
|
6815a96d29 | ||
|
|
e9bb05cc5d | ||
|
|
31c0b94ae6 | ||
|
|
59c242961f | ||
|
|
872ed9e963 | ||
|
|
d6cd155988 | ||
|
|
7f4a61ffb1 | ||
|
|
7737d513a7 | ||
|
|
2bd666efae | ||
|
|
d98265f345 | ||
|
|
b480ff1e48 | ||
|
|
af353b9340 | ||
|
|
96e7505922 | ||
|
|
8f6f2f0018 | ||
|
|
1f25d386df | ||
|
|
2d7a3733da | ||
|
|
ff2a3bc4b0 | ||
|
|
33383a4503 | ||
|
|
f05b94c01e | ||
|
|
fd632f9952 | ||
|
|
fd84d7b492 | ||
|
|
e4808e585a | ||
|
|
5cfeb56f9c | ||
|
|
b947f823d7 | ||
|
|
1520c1c57d | ||
|
|
f8477981d8 | ||
|
|
9b1d596816 | ||
|
|
6a4aa9a956 | ||
|
|
a7b0ef55ad | ||
|
|
87534b6fb6 | ||
|
|
c76cef47ba | ||
|
|
3276dfa03e | ||
|
|
1a14cc305c | ||
|
|
0c7e63d735 | ||
|
|
eb74cb4c5e | ||
|
|
a47d3289d0 | ||
|
|
8ad827130e | ||
|
|
93bdfd3d83 | ||
|
|
22d8889a07 | ||
|
|
7c93944d40 | ||
|
|
ec9dbb6853 | ||
|
|
7a7d814288 | ||
|
|
3babce3ecf | ||
|
|
1610b95b84 | ||
|
|
8c02f8ec31 | ||
|
|
5e0e9da6c4 | ||
|
|
51abe71421 | ||
|
|
00d74bfebb | ||
|
|
39e0ef68ca | ||
|
|
cff20f88cd | ||
|
|
a34c59762b | ||
|
|
397da5857f | ||
|
|
43ceb052d8 | ||
|
|
6eced60bf5 | ||
|
|
7c2f5352d2 | ||
|
|
e6e377beff | ||
|
|
6da9de6591 | ||
|
|
7549aee656 | ||
|
|
da4a6266c5 | ||
|
|
6ac938f2a6 | ||
|
|
990fc83269 | ||
|
|
5d5ff47d5e | ||
|
|
9f30bba9a8 | ||
|
|
6014bb76b6 | ||
|
|
e25b54f86a | ||
|
|
5959963b9d | ||
|
|
4fbb71484d | ||
|
|
f8e8132b58 | ||
|
|
a1dd170641 | ||
|
|
fe2ddf9d60 | ||
|
|
dfc99a7756 | ||
|
|
c2556facc2 | ||
|
|
31b1d58a70 | ||
|
|
0ac9f6f663 | ||
|
|
a30b75a2a8 | ||
|
|
dbd4363ff8 | ||
|
|
ad1b01f225 | ||
|
|
e1679790f7 | ||
|
|
ae594061e9 | ||
|
|
9e02147d4c | ||
|
|
2b3d1c8ee5 | ||
|
|
4c91dbcff0 | ||
|
|
83f68f13db | ||
|
|
994814864c | ||
|
|
f24135f5b0 | ||
|
|
5745727031 | ||
|
|
ae0d685b29 | ||
|
|
f34a49e19c | ||
|
|
9e557a0ebe | ||
|
|
0df3c26f04 | ||
|
|
0df86454ce | ||
|
|
63f0ae1c7c | ||
|
|
d9f232683d | ||
|
|
ad9d77d33f | ||
|
|
5a8479f4e9 | ||
|
|
f4e94c0ad1 | ||
|
|
6f3183823f | ||
|
|
01bb39da6a | ||
|
|
43f9830e8d | ||
|
|
4c2174958f | ||
|
|
07747e73d6 | ||
|
|
60946b5e9d | ||
|
|
0365fa5421 | ||
|
|
2a7ad596a1 | ||
|
|
6c455ab5ce | ||
|
|
7c062163a1 | ||
|
|
d6a256247c | ||
|
|
0e2c699518 | ||
|
|
cf22039562 | ||
|
|
2a62982885 | ||
|
|
1e1624ed4c | ||
|
|
d0feff00a7 | ||
|
|
5c2a9e8362 | ||
|
|
aad840da59 |
31
.github/workflows/jest-coverage-changes.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Jest Coverage - changed files
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: develop
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/heads/develop"
|
||||
token: ${{ secrets.GITHUB_TOKEN }} # Provide the GitHub token for authentication
|
||||
|
||||
- name: Fetch branch
|
||||
run: git fetch origin ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- run: |
|
||||
git checkout ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: lts/*
|
||||
|
||||
- name: Install dependencies
|
||||
run: cd frontend && npm install -g yarn && yarn
|
||||
|
||||
- name: npm run test:changedsince
|
||||
run: cd frontend && npm run i18n:generate-hash && npm run test:changedsince
|
||||
70
.github/workflows/staging-deployment.yaml
vendored
@@ -9,34 +9,46 @@ jobs:
|
||||
name: Deploy latest develop branch to staging
|
||||
runs-on: ubuntu-latest
|
||||
environment: staging
|
||||
permissions:
|
||||
contents: 'read'
|
||||
id-token: 'write'
|
||||
steps:
|
||||
- name: Executing remote ssh commands using ssh key
|
||||
uses: appleboy/ssh-action@v1.0.3
|
||||
env:
|
||||
GITHUB_BRANCH: develop
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
- id: 'auth'
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
host: ${{ secrets.HOST_DNS }}
|
||||
username: ${{ secrets.USERNAME }}
|
||||
key: ${{ secrets.SSH_KEY }}
|
||||
envs: GITHUB_BRANCH,GITHUB_SHA
|
||||
command_timeout: 60m
|
||||
script: |
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export OTELCOL_TAG="main"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
docker system prune --force
|
||||
docker pull signoz/signoz-otel-collector:main
|
||||
docker pull signoz/signoz-schema-migrator:main
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout ${GITHUB_BRANCH}
|
||||
git pull
|
||||
make build-ee-query-service-amd64
|
||||
make build-frontend-amd64
|
||||
make run-signoz
|
||||
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
|
||||
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: 'sdk'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'ssh'
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
|
||||
GCP_ZONE: ${{ secrets.GCP_ZONE }}
|
||||
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
|
||||
run: |
|
||||
read -r -d '' COMMAND <<EOF || true
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export OTELCOL_TAG="main"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
docker system prune --force
|
||||
docker pull signoz/signoz-otel-collector:main
|
||||
docker pull signoz/signoz-schema-migrator:main
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout ${GITHUB_BRANCH}
|
||||
git pull
|
||||
make build-ee-query-service-amd64
|
||||
make build-frontend-amd64
|
||||
make run-signoz
|
||||
EOF
|
||||
gcloud compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
|
||||
|
||||
68
.github/workflows/testing-deployment.yaml
vendored
@@ -9,35 +9,47 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment: testing
|
||||
if: ${{ github.event.label.name == 'testing-deploy' }}
|
||||
permissions:
|
||||
contents: 'read'
|
||||
id-token: 'write'
|
||||
steps:
|
||||
- name: Executing remote ssh commands using ssh key
|
||||
uses: appleboy/ssh-action@v1.0.3
|
||||
- id: 'auth'
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
|
||||
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: 'sdk'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'ssh'
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
host: ${{ secrets.HOST_DNS }}
|
||||
username: ${{ secrets.USERNAME }}
|
||||
key: ${{ secrets.SSH_KEY }}
|
||||
envs: GITHUB_BRANCH,GITHUB_SHA
|
||||
command_timeout: 60m
|
||||
script: |
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export DEV_BUILD="1"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
docker system prune --force
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout develop
|
||||
git pull
|
||||
# This is added to include the scenerio when new commit in PR is force-pushed
|
||||
git branch -D ${GITHUB_BRANCH}
|
||||
git checkout --track origin/${GITHUB_BRANCH}
|
||||
make build-ee-query-service-amd64
|
||||
make build-frontend-amd64
|
||||
make run-signoz
|
||||
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
|
||||
GCP_ZONE: ${{ secrets.GCP_ZONE }}
|
||||
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
|
||||
run: |
|
||||
read -r -d '' COMMAND <<EOF || true
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export DEV_BUILD="1"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
docker system prune --force
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout develop
|
||||
git pull
|
||||
# This is added to include the scenerio when new commit in PR is force-pushed
|
||||
git branch -D ${GITHUB_BRANCH}
|
||||
git checkout --track origin/${GITHUB_BRANCH}
|
||||
make build-ee-query-service-amd64
|
||||
make build-frontend-amd64
|
||||
make run-signoz
|
||||
EOF
|
||||
gcloud compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
|
||||
|
||||
1
.gitignore
vendored
@@ -47,6 +47,7 @@ ee/query-service/signoz.db
|
||||
ee/query-service/tests/test-deploy/data/
|
||||
|
||||
# local data
|
||||
*.backup
|
||||
*.db
|
||||
/deploy/docker/clickhouse-setup/data/
|
||||
/deploy/docker-swarm/clickhouse-setup/data/
|
||||
|
||||
@@ -22,7 +22,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
@@ -146,7 +146,7 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.41.0
|
||||
image: signoz/query-service:0.46.0
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
@@ -186,7 +186,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.41.0
|
||||
image: signoz/frontend:0.46.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -199,7 +199,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.88.15
|
||||
image: signoz/signoz-otel-collector:0.88.24
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
@@ -237,7 +237,7 @@ services:
|
||||
- query-service
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:0.88.15
|
||||
image: signoz/signoz-schema-migrator:0.88.24
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -111,18 +111,18 @@ processors:
|
||||
|
||||
exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/?database=signoz_traces
|
||||
datasource: tcp://clickhouse:9000/signoz_traces
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
# logging: {}
|
||||
clickhouselogsexporter:
|
||||
dsn: tcp://clickhouse:9000/
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
timeout: 10s
|
||||
extensions:
|
||||
|
||||
@@ -22,4 +22,4 @@ rule_files:
|
||||
scrape_configs: []
|
||||
|
||||
remote_read:
|
||||
- url: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
- url: tcp://clickhouse:9000/signoz_metrics
|
||||
|
||||
@@ -46,7 +46,7 @@ services:
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
@@ -66,7 +66,7 @@ services:
|
||||
- --storage.path=/data
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.15}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -81,7 +81,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: signoz-otel-collector
|
||||
image: signoz/signoz-otel-collector:0.88.15
|
||||
image: signoz/signoz-otel-collector:0.88.24
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
|
||||
@@ -21,7 +21,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
@@ -164,7 +164,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.41.0}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.46.0}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
@@ -203,7 +203,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.41.0}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.46.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -215,7 +215,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.15}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -229,7 +229,7 @@ services:
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.15}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
|
||||
64
deploy/docker/clickhouse-setup/keeper_config.xml
Normal file
@@ -0,0 +1,64 @@
|
||||
<clickhouse>
|
||||
<logger>
|
||||
<!-- Possible levels [1]:
|
||||
|
||||
- none (turns off logging)
|
||||
- fatal
|
||||
- critical
|
||||
- error
|
||||
- warning
|
||||
- notice
|
||||
- information
|
||||
- debug
|
||||
- trace
|
||||
|
||||
[1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114
|
||||
-->
|
||||
<level>information</level>
|
||||
<log>/var/log/clickhouse-keeper/clickhouse-keeper.log</log>
|
||||
<errorlog>/var/log/clickhouse-keeper/clickhouse-keeper.err.log</errorlog>
|
||||
<!-- Rotation policy
|
||||
See https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/FileChannel.h#L54-L85
|
||||
-->
|
||||
<size>1000M</size>
|
||||
<count>10</count>
|
||||
<!-- <console>1</console> --> <!-- Default behavior is autodetection (log to console if not daemon mode and is tty) -->
|
||||
</logger>
|
||||
|
||||
<listen_host>0.0.0.0</listen_host>
|
||||
<max_connections>4096</max_connections>
|
||||
|
||||
<keeper_server>
|
||||
<tcp_port>9181</tcp_port>
|
||||
|
||||
<!-- Must be unique among all keeper serves -->
|
||||
<server_id>1</server_id>
|
||||
|
||||
<log_storage_path>/var/lib/clickhouse/coordination/logs</log_storage_path>
|
||||
<snapshot_storage_path>/var/lib/clickhouse/coordination/snapshots</snapshot_storage_path>
|
||||
|
||||
<coordination_settings>
|
||||
<operation_timeout_ms>10000</operation_timeout_ms>
|
||||
<min_session_timeout_ms>10000</min_session_timeout_ms>
|
||||
<session_timeout_ms>100000</session_timeout_ms>
|
||||
<raft_logs_level>information</raft_logs_level>
|
||||
<compress_logs>false</compress_logs>
|
||||
<!-- All settings listed in https://github.com/ClickHouse/ClickHouse/blob/master/src/Coordination/CoordinationSettings.h -->
|
||||
</coordination_settings>
|
||||
|
||||
<!-- enable sanity hostname checks for cluster configuration (e.g. if localhost is used with remote endpoints) -->
|
||||
<hostname_checks_enabled>true</hostname_checks_enabled>
|
||||
<raft_configuration>
|
||||
<server>
|
||||
<id>1</id>
|
||||
|
||||
<!-- Internal port and hostname -->
|
||||
<hostname>clickhouses-keeper-1</hostname>
|
||||
<port>9234</port>
|
||||
</server>
|
||||
|
||||
<!-- Add more servers here -->
|
||||
|
||||
</raft_configuration>
|
||||
</keeper_server>
|
||||
</clickhouse>
|
||||
@@ -122,21 +122,20 @@ extensions:
|
||||
|
||||
exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/?database=signoz_traces
|
||||
datasource: tcp://clickhouse:9000/signoz_traces
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
# logging: {}
|
||||
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
clickhouselogsexporter:
|
||||
dsn: tcp://clickhouse:9000/
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
timeout: 10s
|
||||
# logging: {}
|
||||
|
||||
service:
|
||||
telemetry:
|
||||
|
||||
@@ -22,4 +22,4 @@ rule_files:
|
||||
scrape_configs: []
|
||||
|
||||
remote_read:
|
||||
- url: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
- url: tcp://clickhouse:9000/signoz_metrics
|
||||
|
||||
@@ -152,7 +152,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metrics/query_range", am.ViewAccess(ah.queryRangeMetricsV2)).Methods(http.MethodPost)
|
||||
|
||||
// PAT APIs
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||
|
||||
@@ -74,7 +74,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
defer r.Body.Close()
|
||||
requestBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
zap.S().Errorf("received no input in api\n", err)
|
||||
zap.L().Error("received no input in api", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
@@ -82,7 +82,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
err = json.Unmarshal(requestBody, &req)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("received invalid user registration request", zap.Error(err))
|
||||
zap.L().Error("received invalid user registration request", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
|
||||
return
|
||||
}
|
||||
@@ -90,13 +90,13 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
// get invite object
|
||||
invite, err := baseauth.ValidateInvite(ctx, req)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to validate invite token", err)
|
||||
zap.L().Error("failed to validate invite token", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if invite == nil {
|
||||
zap.S().Errorf("failed to validate invite token: it is either empty or invalid", err)
|
||||
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
|
||||
return
|
||||
}
|
||||
@@ -104,7 +104,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
// get auth domain from email domain
|
||||
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
|
||||
if apierr != nil {
|
||||
zap.S().Errorf("failed to get domain from email", apierr)
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
|
||||
}
|
||||
|
||||
@@ -205,24 +205,24 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
|
||||
ctx := context.Background()
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.S().Errorf("[receiveGoogleAuth] sso requested but feature unavailable %s in org domain %s", model.SSO)
|
||||
zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
q := r.URL.Query()
|
||||
if errType := q.Get("error"); errType != "" {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to login with google auth", q.Get("error_description"))
|
||||
zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description")))
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
relayState := q.Get("state")
|
||||
zap.S().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
|
||||
zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to process response - invalid response from IDP", err, r)
|
||||
zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
@@ -244,14 +244,14 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
|
||||
|
||||
identity, err := callbackHandler.HandleCallback(r)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to process HandleCallback ", domain.String(), zap.Error(err))
|
||||
zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
|
||||
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
@@ -266,14 +266,14 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.S().Errorf("[receiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO)
|
||||
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
@@ -281,11 +281,11 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// the relay state is sent when a login request is submitted to
|
||||
// Idp.
|
||||
relayState := r.FormValue("RelayState")
|
||||
zap.S().Debug("[receiveML] relay state", zap.String("relayState", relayState))
|
||||
zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
@@ -302,34 +302,34 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
sp, err := domain.PrepareSamlRequest(parsedState)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to prepare saml request for domain (%s): %v", domain.String(), err)
|
||||
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse"))
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to retrieve assertion info from saml response for organization (%s): %v", domain.String(), err)
|
||||
zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
if assertionInfo.WarningInfo.InvalidTime {
|
||||
zap.S().Errorf("[receiveSAML] expired saml response for organization (%s): %v", domain.String(), err)
|
||||
zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
email := assertionInfo.NameID
|
||||
if email == "" {
|
||||
zap.S().Errorf("[receiveSAML] invalid email in the SSO response (%s)", domain.String())
|
||||
zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String()))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
|
||||
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
|
||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) lockDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
@@ -191,7 +191,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
url := fmt.Sprintf("%s/trial?licenseKey=%s", constants.LicenseSignozIo, currentActiveLicenseKey)
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
zap.S().Error("Error while creating request for trial details", err)
|
||||
zap.L().Error("Error while creating request for trial details", zap.Error(err))
|
||||
// If there is an error in fetching trial details, we will still return the license details
|
||||
// to avoid blocking the UI
|
||||
ah.Respond(w, resp)
|
||||
@@ -200,7 +200,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
|
||||
trialResp, err := hClient.Do(req)
|
||||
if err != nil {
|
||||
zap.S().Error("Error while fetching trial details", err)
|
||||
zap.L().Error("Error while fetching trial details", zap.Error(err))
|
||||
// If there is an error in fetching trial details, we will still return the license details
|
||||
// to avoid incorrectly blocking the UI
|
||||
ah.Respond(w, resp)
|
||||
@@ -211,7 +211,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
trialRespBody, err := io.ReadAll(trialResp.Body)
|
||||
|
||||
if err != nil || trialResp.StatusCode != http.StatusOK {
|
||||
zap.S().Error("Error while fetching trial details", err)
|
||||
zap.L().Error("Error while fetching trial details", zap.Error(err))
|
||||
// If there is an error in fetching trial details, we will still return the license details
|
||||
// to avoid incorrectly blocking the UI
|
||||
ah.Respond(w, resp)
|
||||
@@ -222,7 +222,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
var trialRespData model.SubscriptionServerResp
|
||||
|
||||
if err := json.Unmarshal(trialRespBody, &trialRespData); err != nil {
|
||||
zap.S().Error("Error while decoding trial details", err)
|
||||
zap.L().Error("Error while decoding trial details", zap.Error(err))
|
||||
// If there is an error in fetching trial details, we will still return the license details
|
||||
// to avoid incorrectly blocking the UI
|
||||
ah.Respond(w, resp)
|
||||
|
||||
@@ -1,236 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sync"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/app/metrics"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/parser"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
|
||||
if !ah.CheckFeature(basemodel.CustomMetricsFunction) {
|
||||
zap.S().Info("CustomMetricsFunction feature is not enabled in this plan")
|
||||
ah.APIHandler.QueryRangeMetricsV2(w, r)
|
||||
return
|
||||
}
|
||||
metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.S().Errorf(apiErrorObj.Err.Error())
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// prometheus instant query needs same timestamp
|
||||
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
|
||||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.PROM {
|
||||
metricsQueryRangeParams.Start = metricsQueryRangeParams.End
|
||||
}
|
||||
|
||||
// round up the end to nearest multiple
|
||||
if metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER {
|
||||
end := (metricsQueryRangeParams.End) / 1000
|
||||
step := metricsQueryRangeParams.Step
|
||||
metricsQueryRangeParams.End = (end / step * step) * 1000
|
||||
}
|
||||
|
||||
type channelResult struct {
|
||||
Series []*basemodel.Series
|
||||
TableName string
|
||||
Err error
|
||||
Name string
|
||||
Query string
|
||||
}
|
||||
|
||||
execClickHouseQueries := func(queries map[string]string) ([]*basemodel.Series, []string, error, map[string]string) {
|
||||
var seriesList []*basemodel.Series
|
||||
var tableName []string
|
||||
ch := make(chan channelResult, len(queries))
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for name, query := range queries {
|
||||
wg.Add(1)
|
||||
go func(name, query string) {
|
||||
defer wg.Done()
|
||||
seriesList, tableName, err := ah.opts.DataConnector.GetMetricResultEE(r.Context(), query)
|
||||
for _, series := range seriesList {
|
||||
series.QueryName = name
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query}
|
||||
return
|
||||
}
|
||||
ch <- channelResult{Series: seriesList, TableName: tableName}
|
||||
}(name, query)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
continue
|
||||
}
|
||||
seriesList = append(seriesList, r.Series...)
|
||||
tableName = append(tableName, r.TableName)
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
|
||||
}
|
||||
return seriesList, tableName, nil, nil
|
||||
}
|
||||
|
||||
execPromQueries := func(metricsQueryRangeParams *basemodel.QueryRangeParamsV2) ([]*basemodel.Series, error, map[string]string) {
|
||||
var seriesList []*basemodel.Series
|
||||
ch := make(chan channelResult, len(metricsQueryRangeParams.CompositeMetricQuery.PromQueries))
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for name, query := range metricsQueryRangeParams.CompositeMetricQuery.PromQueries {
|
||||
if query.Disabled {
|
||||
continue
|
||||
}
|
||||
wg.Add(1)
|
||||
go func(name string, query *basemodel.PromQuery) {
|
||||
var seriesList []*basemodel.Series
|
||||
defer wg.Done()
|
||||
tmpl := template.New("promql-query")
|
||||
tmpl, tmplErr := tmpl.Parse(query.Query)
|
||||
if tmplErr != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
|
||||
return
|
||||
}
|
||||
var queryBuf bytes.Buffer
|
||||
tmplErr = tmpl.Execute(&queryBuf, metricsQueryRangeParams.Variables)
|
||||
if tmplErr != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
|
||||
return
|
||||
}
|
||||
query.Query = queryBuf.String()
|
||||
queryModel := basemodel.QueryRangeParams{
|
||||
Start: time.UnixMilli(metricsQueryRangeParams.Start),
|
||||
End: time.UnixMilli(metricsQueryRangeParams.End),
|
||||
Step: time.Duration(metricsQueryRangeParams.Step * int64(time.Second)),
|
||||
Query: query.Query,
|
||||
}
|
||||
promResult, _, err := ah.opts.DataConnector.GetQueryRangeResult(r.Context(), &queryModel)
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query.Query}
|
||||
return
|
||||
}
|
||||
matrix, _ := promResult.Matrix()
|
||||
for _, v := range matrix {
|
||||
var s basemodel.Series
|
||||
s.QueryName = name
|
||||
s.Labels = v.Metric.Copy().Map()
|
||||
for _, p := range v.Floats {
|
||||
s.Points = append(s.Points, basemodel.MetricPoint{Timestamp: p.T, Value: p.F})
|
||||
}
|
||||
seriesList = append(seriesList, &s)
|
||||
}
|
||||
ch <- channelResult{Series: seriesList}
|
||||
}(name, query)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
continue
|
||||
}
|
||||
seriesList = append(seriesList, r.Series...)
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
|
||||
}
|
||||
return seriesList, nil, nil
|
||||
}
|
||||
|
||||
var seriesList []*basemodel.Series
|
||||
var tableName []string
|
||||
var err error
|
||||
var errQuriesByName map[string]string
|
||||
switch metricsQueryRangeParams.CompositeMetricQuery.QueryType {
|
||||
case basemodel.QUERY_BUILDER:
|
||||
runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME)
|
||||
if runQueries.Err != nil {
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: runQueries.Err}, nil)
|
||||
return
|
||||
}
|
||||
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(runQueries.Queries)
|
||||
|
||||
case basemodel.CLICKHOUSE:
|
||||
queries := make(map[string]string)
|
||||
|
||||
for name, chQuery := range metricsQueryRangeParams.CompositeMetricQuery.ClickHouseQueries {
|
||||
if chQuery.Disabled {
|
||||
continue
|
||||
}
|
||||
tmpl := template.New("clickhouse-query")
|
||||
tmpl, err := tmpl.Parse(chQuery.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
var query bytes.Buffer
|
||||
|
||||
// replace go template variables
|
||||
querytemplate.AssignReservedVars(metricsQueryRangeParams)
|
||||
|
||||
err = tmpl.Execute(&query, metricsQueryRangeParams.Variables)
|
||||
if err != nil {
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
queries[name] = query.String()
|
||||
}
|
||||
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(queries)
|
||||
case basemodel.PROM:
|
||||
seriesList, err, errQuriesByName = execPromQueries(metricsQueryRangeParams)
|
||||
default:
|
||||
err = fmt.Errorf("invalid query type")
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, errQuriesByName)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
apiErrObj := &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
|
||||
len(seriesList) > 1 &&
|
||||
(metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER ||
|
||||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.CLICKHOUSE) {
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
type ResponseFormat struct {
|
||||
ResultType string `json:"resultType"`
|
||||
Result []*basemodel.Series `json:"result"`
|
||||
TableName []string `json:"tableName"`
|
||||
}
|
||||
resp := ResponseFormat{ResultType: "matrix", Result: seriesList, TableName: tableName}
|
||||
ah.Respond(w, resp)
|
||||
}
|
||||
@@ -43,8 +43,8 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
pat := model.PAT{
|
||||
Name: req.Name,
|
||||
Role: req.Role,
|
||||
Name: req.Name,
|
||||
Role: req.Role,
|
||||
ExpiresAt: req.ExpiresInDays,
|
||||
}
|
||||
err = validatePATRequest(pat)
|
||||
@@ -65,7 +65,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
zap.S().Debugf("Got Create PAT request: %+v", pat)
|
||||
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
|
||||
var apierr basemodel.BaseApiError
|
||||
if pat, apierr = ah.AppDao().CreatePAT(ctx, pat); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
@@ -115,7 +115,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
req.UpdatedByUserID = user.Id
|
||||
id := mux.Vars(r)["id"]
|
||||
req.UpdatedAt = time.Now().Unix()
|
||||
zap.S().Debugf("Got Update PAT request: %+v", req)
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
if apierr = ah.AppDao().UpdatePAT(ctx, req, id); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
@@ -135,7 +135,7 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
zap.S().Infof("Get PATs for user: %+v", user.Id)
|
||||
zap.L().Info("Get PATs for user", zap.String("user_id", user.Id))
|
||||
pats, apierr := ah.AppDao().ListPATs(ctx)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
@@ -156,7 +156,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
zap.S().Debugf("Revoke PAT with id: %+v", id)
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id))
|
||||
if apierr := ah.AppDao().RevokePAT(ctx, id, user.Id); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
|
||||
@@ -2,10 +2,8 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/app/db"
|
||||
"go.signoz.io/signoz/ee/query-service/constants"
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
baseapp "go.signoz.io/signoz/pkg/query-service/app"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
@@ -15,21 +13,17 @@ import (
|
||||
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
|
||||
zap.S().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
ah.APIHandler.SearchTraces(w, r)
|
||||
return
|
||||
}
|
||||
traceId, spanId, levelUpInt, levelDownInt, err := baseapp.ParseSearchTracesParams(r)
|
||||
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
||||
return
|
||||
}
|
||||
spanLimit, err := strconv.Atoi(constants.SpanLimitStr)
|
||||
if err != nil {
|
||||
zap.S().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable: ", err)
|
||||
return
|
||||
}
|
||||
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, spanLimit, db.SmartTraceAlgorithm)
|
||||
|
||||
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
|
||||
if ah.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ import (
|
||||
func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) {
|
||||
|
||||
defer utils.Elapsed("GetMetricResult")()
|
||||
zap.S().Infof("Executing metric result query: %s", query)
|
||||
zap.L().Info("Executing metric result query: ", zap.String("query", query))
|
||||
|
||||
var hash string
|
||||
// If getSubTreeSpans function is used in the clickhouse query
|
||||
@@ -38,9 +38,8 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
|
||||
}
|
||||
|
||||
rows, err := r.conn.Query(ctx, query)
|
||||
zap.S().Debug(query)
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing query: ", err)
|
||||
zap.L().Error("Error in processing query", zap.Error(err))
|
||||
return nil, "", fmt.Errorf("error in processing query")
|
||||
}
|
||||
|
||||
@@ -117,7 +116,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
|
||||
}
|
||||
default:
|
||||
zap.S().Errorf("invalid var found in metric builder query result", v, colName)
|
||||
zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName))
|
||||
}
|
||||
}
|
||||
sort.Strings(groupBy)
|
||||
@@ -140,7 +139,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
|
||||
}
|
||||
// err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash)
|
||||
// if err != nil {
|
||||
// zap.S().Error("Error in dropping temporary table: ", err)
|
||||
// zap.L().Error("Error in dropping temporary table: ", err)
|
||||
// return nil, err
|
||||
// }
|
||||
if hash == "" {
|
||||
@@ -152,7 +151,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
|
||||
|
||||
func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) {
|
||||
|
||||
zap.S().Debugf("Executing getSubTreeSpans function")
|
||||
zap.L().Debug("Executing getSubTreeSpans function")
|
||||
|
||||
// str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;`
|
||||
|
||||
@@ -162,28 +161,28 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
|
||||
|
||||
err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash)
|
||||
if err != nil {
|
||||
zap.S().Error("Error in dropping temporary table: ", err)
|
||||
zap.L().Error("Error in dropping temporary table", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
|
||||
// Create temporary table to store the getSubTreeSpans() results
|
||||
zap.S().Debugf("Creating temporary table getSubTreeSpans%s", hash)
|
||||
zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)")
|
||||
if err != nil {
|
||||
zap.S().Error("Error in creating temporary table: ", err)
|
||||
zap.L().Error("Error in creating temporary table", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
|
||||
var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse
|
||||
getSpansSubQuery := subtreeInput
|
||||
// Execute the subTree query
|
||||
zap.S().Debugf("Executing subTree query: %s", getSpansSubQuery)
|
||||
zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery))
|
||||
err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery)
|
||||
|
||||
// zap.S().Info(getSpansSubQuery)
|
||||
// zap.L().Info(getSpansSubQuery)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return query, hash, fmt.Errorf("Error in processing sql query")
|
||||
}
|
||||
|
||||
@@ -196,16 +195,16 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
|
||||
if len(getSpansSubQueryDBResponses) == 0 {
|
||||
return query, hash, fmt.Errorf("No spans found for the given query")
|
||||
}
|
||||
zap.S().Debugf("Executing query to fetch all the spans from the same TraceID: %s", modelQuery)
|
||||
zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery))
|
||||
err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return query, hash, fmt.Errorf("Error in processing sql query")
|
||||
}
|
||||
|
||||
// Process model to fetch the spans
|
||||
zap.S().Debugf("Processing model to fetch the spans")
|
||||
zap.L().Debug("Processing model to fetch the spans")
|
||||
searchSpanResponses := []basemodel.SearchSpanResponseItem{}
|
||||
for _, item := range searchScanResponses {
|
||||
var jsonItem basemodel.SearchSpanResponseItem
|
||||
@@ -218,17 +217,17 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
|
||||
}
|
||||
// Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash
|
||||
// Use map to store pointer to the spans to avoid duplicates and save memory
|
||||
zap.S().Debugf("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans%s", hash)
|
||||
zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
|
||||
treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses)
|
||||
if err != nil {
|
||||
zap.S().Error("Error in getSubTreeAlgorithm function: ", err)
|
||||
zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
zap.S().Debugf("Preparing batch to store subtree spans in temporary table getSubTreeSpans%s", hash)
|
||||
zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash))
|
||||
if err != nil {
|
||||
zap.S().Error("Error in preparing batch statement: ", err)
|
||||
zap.L().Error("Error in preparing batch statement", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
for _, span := range treeSearchResponse {
|
||||
@@ -251,14 +250,14 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
|
||||
span.Events,
|
||||
)
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
}
|
||||
zap.S().Debugf("Inserting the subtree spans in temporary table getSubTreeSpans%s", hash)
|
||||
zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
err = statement.Send()
|
||||
if err != nil {
|
||||
zap.S().Error("Error in sending statement: ", err)
|
||||
zap.L().Error("Error in sending statement", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
return query, hash, nil
|
||||
@@ -323,7 +322,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
|
||||
spans = append(spans, span)
|
||||
}
|
||||
|
||||
zap.S().Debug("Building Tree")
|
||||
zap.L().Debug("Building Tree")
|
||||
roots, err := buildSpanTrees(&spans)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -333,7 +332,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
|
||||
// For each root, get the subtree spans
|
||||
for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses {
|
||||
targetSpan := &model.SpanForTraceDetails{}
|
||||
// zap.S().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
|
||||
// zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
|
||||
// Search target span object in the tree
|
||||
for _, root := range roots {
|
||||
targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID)
|
||||
@@ -341,7 +340,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
zap.S().Error("Error during BreadthFirstSearch(): ", err)
|
||||
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,11 @@ import (
|
||||
func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]basemodel.SearchSpansResult, error) {
|
||||
var spans []*model.SpanForTraceDetails
|
||||
|
||||
// if targetSpanId is null or not present then randomly select a span as targetSpanId
|
||||
if (targetSpanId == "" || targetSpanId == "null") && len(payload) > 0 {
|
||||
targetSpanId = payload[0].SpanID
|
||||
}
|
||||
|
||||
// Build a slice of spans from the payload
|
||||
for _, spanItem := range payload {
|
||||
var parentID string
|
||||
@@ -49,7 +54,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
zap.S().Error("Error during BreadthFirstSearch(): ", err)
|
||||
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
@@ -115,6 +120,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
searchSpansResult := []basemodel.SearchSpansResult{{
|
||||
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"},
|
||||
Events: make([][]interface{}, len(resultSpansSet)),
|
||||
IsSubTree: true,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -186,7 +192,7 @@ func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTra
|
||||
|
||||
// If the parent span is not found, add current span to list of roots
|
||||
if parent == nil {
|
||||
// zap.S().Debug("Parent Span not found parent_id: ", span.ParentID)
|
||||
// zap.L().Debug("Parent Span not found parent_id: ", span.ParentID)
|
||||
roots = append(roots, span)
|
||||
span.ParentID = ""
|
||||
continue
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"net/http"
|
||||
_ "net/http/pprof" // http profiler
|
||||
"os"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/handlers"
|
||||
@@ -134,7 +135,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
var reader interfaces.DataConnector
|
||||
storage := os.Getenv("STORAGE")
|
||||
if storage == "clickhouse" {
|
||||
zap.S().Info("Using ClickHouse as datastore ...")
|
||||
zap.L().Info("Using ClickHouse as datastore ...")
|
||||
qb := db.NewDataConnector(
|
||||
localDB,
|
||||
serverOptions.PromConfigPath,
|
||||
@@ -328,7 +329,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
|
||||
r.Use(loggingMiddleware)
|
||||
|
||||
apiHandler.RegisterRoutes(r, am)
|
||||
apiHandler.RegisterMetricsRoutes(r, am)
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
apiHandler.RegisterIntegrationRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
@@ -393,13 +393,14 @@ func (lrw *loggingResponseWriter) Flush() {
|
||||
lrw.ResponseWriter.(http.Flusher).Flush()
|
||||
}
|
||||
|
||||
func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface{}, bool) {
|
||||
pathToExtractBodyFrom := "/api/v3/query_range"
|
||||
func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}, bool) {
|
||||
pathToExtractBodyFromV3 := "/api/v3/query_range"
|
||||
pathToExtractBodyFromV4 := "/api/v4/query_range"
|
||||
|
||||
data := map[string]interface{}{}
|
||||
var postData *v3.QueryRangeParamsV3
|
||||
|
||||
if path == pathToExtractBodyFrom && (r.Method == "POST") {
|
||||
if (r.Method == "POST") && ((path == pathToExtractBodyFromV3) || (path == pathToExtractBodyFromV4)) {
|
||||
if r.Body != nil {
|
||||
bodyBytes, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
@@ -417,32 +418,68 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface
|
||||
return nil, false
|
||||
}
|
||||
|
||||
referrer := r.Header.Get("Referer")
|
||||
|
||||
dashboardMatched, err := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer)
|
||||
if err != nil {
|
||||
zap.L().Error("error while matching the referrer", zap.Error(err))
|
||||
}
|
||||
alertMatched, err := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer)
|
||||
if err != nil {
|
||||
zap.L().Error("error while matching the alert: ", zap.Error(err))
|
||||
}
|
||||
logsExplorerMatched, err := regexp.MatchString(`/logs/logs-explorer(?:\?.*)?$`, referrer)
|
||||
if err != nil {
|
||||
zap.L().Error("error while matching the logs explorer: ", zap.Error(err))
|
||||
}
|
||||
traceExplorerMatched, err := regexp.MatchString(`/traces-explorer(?:\?.*)?$`, referrer)
|
||||
if err != nil {
|
||||
zap.L().Error("error while matching the trace explorer: ", zap.Error(err))
|
||||
}
|
||||
|
||||
signozMetricsUsed := false
|
||||
signozLogsUsed := false
|
||||
dataSources := []string{}
|
||||
signozTracesUsed := false
|
||||
if postData != nil {
|
||||
|
||||
if postData.CompositeQuery != nil {
|
||||
data["queryType"] = postData.CompositeQuery.QueryType
|
||||
data["panelType"] = postData.CompositeQuery.PanelType
|
||||
|
||||
signozLogsUsed, signozMetricsUsed, _ = telemetry.GetInstance().CheckSigNozSignals(postData)
|
||||
signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData)
|
||||
}
|
||||
}
|
||||
|
||||
if signozMetricsUsed || signozLogsUsed {
|
||||
if signozMetricsUsed || signozLogsUsed || signozTracesUsed {
|
||||
if signozMetricsUsed {
|
||||
dataSources = append(dataSources, "metrics")
|
||||
telemetry.GetInstance().AddActiveMetricsUser()
|
||||
}
|
||||
if signozLogsUsed {
|
||||
dataSources = append(dataSources, "logs")
|
||||
telemetry.GetInstance().AddActiveLogsUser()
|
||||
}
|
||||
data["dataSources"] = dataSources
|
||||
if signozTracesUsed {
|
||||
telemetry.GetInstance().AddActiveTracesUser()
|
||||
}
|
||||
data["metricsUsed"] = signozMetricsUsed
|
||||
data["logsUsed"] = signozLogsUsed
|
||||
data["tracesUsed"] = signozTracesUsed
|
||||
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
|
||||
if err == nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_V3, data, userEmail, true)
|
||||
// switch case to set data["screen"] based on the referrer
|
||||
switch {
|
||||
case dashboardMatched:
|
||||
data["screen"] = "panel"
|
||||
case alertMatched:
|
||||
data["screen"] = "alert"
|
||||
case logsExplorerMatched:
|
||||
data["screen"] = "logs-explorer"
|
||||
case traceExplorerMatched:
|
||||
data["screen"] = "traces-explorer"
|
||||
default:
|
||||
data["screen"] = "unknown"
|
||||
return data, true
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail, true, false)
|
||||
}
|
||||
}
|
||||
return data, true
|
||||
@@ -469,7 +506,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
|
||||
route := mux.CurrentRoute(r)
|
||||
path, _ := route.GetPathTemplate()
|
||||
|
||||
queryRangeV3data, metadataExists := extractQueryRangeV3Data(path, r)
|
||||
queryRangeData, metadataExists := extractQueryRangeData(path, r)
|
||||
getActiveLogs(path, r)
|
||||
|
||||
lrw := NewLoggingResponseWriter(w)
|
||||
@@ -477,7 +514,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
|
||||
|
||||
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
|
||||
if metadataExists {
|
||||
for key, value := range queryRangeV3data {
|
||||
for key, value := range queryRangeData {
|
||||
data[key] = value
|
||||
}
|
||||
}
|
||||
@@ -485,7 +522,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
|
||||
if _, ok := telemetry.EnabledPaths()[path]; ok {
|
||||
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
|
||||
if err == nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail)
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail, true, false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -522,7 +559,7 @@ func (s *Server) initListeners() error {
|
||||
return err
|
||||
}
|
||||
|
||||
zap.S().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
|
||||
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
|
||||
|
||||
// listen on private port to support internal services
|
||||
privateHostPort := s.serverOptions.PrivateHostPort
|
||||
@@ -535,7 +572,7 @@ func (s *Server) initListeners() error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
zap.S().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
|
||||
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -547,7 +584,7 @@ func (s *Server) Start() error {
|
||||
if !s.serverOptions.DisableRules {
|
||||
s.ruleManager.Start()
|
||||
} else {
|
||||
zap.S().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
}
|
||||
|
||||
err := s.initListeners()
|
||||
@@ -561,23 +598,23 @@ func (s *Server) Start() error {
|
||||
}
|
||||
|
||||
go func() {
|
||||
zap.S().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
|
||||
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
|
||||
|
||||
switch err := s.httpServer.Serve(s.httpConn); err {
|
||||
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
|
||||
// normal exit, nothing to do
|
||||
default:
|
||||
zap.S().Error("Could not start HTTP server", zap.Error(err))
|
||||
zap.L().Error("Could not start HTTP server", zap.Error(err))
|
||||
}
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
}()
|
||||
|
||||
go func() {
|
||||
zap.S().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
|
||||
zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
|
||||
|
||||
err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
|
||||
if err != nil {
|
||||
zap.S().Error("Could not start pprof server", zap.Error(err))
|
||||
zap.L().Error("Could not start pprof server", zap.Error(err))
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -587,14 +624,14 @@ func (s *Server) Start() error {
|
||||
}
|
||||
|
||||
go func() {
|
||||
zap.S().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
|
||||
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
|
||||
|
||||
switch err := s.privateHTTP.Serve(s.privateConn); err {
|
||||
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
|
||||
// normal exit, nothing to do
|
||||
zap.S().Info("private http server closed")
|
||||
zap.L().Info("private http server closed")
|
||||
default:
|
||||
zap.S().Error("Could not start private HTTP server", zap.Error(err))
|
||||
zap.L().Error("Could not start private HTTP server", zap.Error(err))
|
||||
}
|
||||
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
@@ -602,10 +639,10 @@ func (s *Server) Start() error {
|
||||
}()
|
||||
|
||||
go func() {
|
||||
zap.S().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
|
||||
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
|
||||
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
|
||||
if err != nil {
|
||||
zap.S().Info("opamp ws server failed to start", err)
|
||||
zap.L().Error("opamp ws server failed to start", zap.Error(err))
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
}
|
||||
}()
|
||||
@@ -673,6 +710,7 @@ func makeRulesManager(
|
||||
Logger: nil,
|
||||
DisableRules: disableRules,
|
||||
FeatureFlags: fm,
|
||||
Reader: ch,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
@@ -681,7 +719,7 @@ func makeRulesManager(
|
||||
return nil, fmt.Errorf("rule manager error: %v", err)
|
||||
}
|
||||
|
||||
zap.S().Info("rules manager is ready")
|
||||
zap.L().Info("rules manager is ready")
|
||||
|
||||
return manager, nil
|
||||
}
|
||||
|
||||
@@ -17,25 +17,25 @@ import (
|
||||
func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel.UserPayload, error) {
|
||||
patToken := r.Header.Get("SIGNOZ-API-KEY")
|
||||
if len(patToken) > 0 {
|
||||
zap.S().Debugf("Received a non-zero length PAT token")
|
||||
zap.L().Debug("Received a non-zero length PAT token")
|
||||
ctx := context.Background()
|
||||
dao := apiHandler.AppDao()
|
||||
|
||||
pat, err := dao.GetPAT(ctx, patToken)
|
||||
if err == nil && pat != nil {
|
||||
zap.S().Debugf("Found valid PAT: %+v", pat)
|
||||
zap.L().Debug("Found valid PAT: ", zap.Any("pat", pat))
|
||||
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
|
||||
zap.S().Debugf("PAT has expired: %+v", pat)
|
||||
zap.L().Info("PAT has expired: ", zap.Any("pat", pat))
|
||||
return nil, fmt.Errorf("PAT has expired")
|
||||
}
|
||||
group, apiErr := dao.GetGroupByName(ctx, pat.Role)
|
||||
if apiErr != nil {
|
||||
zap.S().Debugf("Error while getting group for PAT: %+v", apiErr)
|
||||
zap.L().Error("Error while getting group for PAT: ", zap.Any("apiErr", apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
user, err := dao.GetUser(ctx, pat.UserID)
|
||||
if err != nil {
|
||||
zap.S().Debugf("Error while getting user for PAT: %+v", err)
|
||||
zap.L().Error("Error while getting user for PAT: ", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
telemetry.GetInstance().SetPatTokenUser()
|
||||
@@ -48,7 +48,7 @@ func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel
|
||||
}, nil
|
||||
}
|
||||
if err != nil {
|
||||
zap.S().Debugf("Error while getting user for PAT: %+v", err)
|
||||
zap.L().Error("Error while getting user for PAT: ", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,8 @@ const (
|
||||
var LicenseSignozIo = "https://license.signoz.io/api/v1"
|
||||
var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
|
||||
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var SpanLimitStr = GetOrDefaultEnv("SPAN_LIMIT", "5000")
|
||||
var SpanRenderLimitStr = GetOrDefaultEnv("SPAN_RENDER_LIMIT", "2500")
|
||||
var MaxSpansInTraceStr = GetOrDefaultEnv("MAX_SPANS_IN_TRACE", "250000")
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
|
||||
@@ -22,19 +22,19 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
|
||||
domain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
|
||||
if apierr != nil {
|
||||
zap.S().Errorf("failed to get domain from email", apierr)
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
return nil, model.InternalErrorStr("failed to get domain from email")
|
||||
}
|
||||
|
||||
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
|
||||
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
|
||||
return nil, model.InternalErrorStr("failed to generate password hash")
|
||||
}
|
||||
|
||||
group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup)
|
||||
if apiErr != nil {
|
||||
zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err)
|
||||
zap.L().Error("GetGroupByName failed", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
|
||||
|
||||
user, apiErr = m.CreateUser(ctx, user, false)
|
||||
if apiErr != nil {
|
||||
zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err)
|
||||
zap.L().Error("CreateUser failed", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
|
||||
|
||||
userPayload, apierr := m.GetUserByEmail(ctx, email)
|
||||
if !apierr.IsNil() {
|
||||
zap.S().Errorf(" failed to get user with email received from auth provider", apierr.Error())
|
||||
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
|
||||
return "", model.BadRequestStr("invalid user email received from the auth provider")
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
|
||||
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
|
||||
user = newUser
|
||||
if apiErr != nil {
|
||||
zap.S().Errorf("failed to create user with email received from auth provider: %v", apierr.Error())
|
||||
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
|
||||
return "", apiErr
|
||||
}
|
||||
} else {
|
||||
@@ -84,7 +84,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
|
||||
|
||||
tokenStore, err := baseauth.GenerateJWTForUser(user)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to generate token for SSO login user", err)
|
||||
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
|
||||
return "", model.InternalErrorStr("failed to generate token for the user")
|
||||
}
|
||||
|
||||
@@ -143,8 +143,8 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
// do nothing, just skip sso
|
||||
ssoAvailable = false
|
||||
default:
|
||||
zap.S().Errorf("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
|
||||
return resp, model.BadRequest(err)
|
||||
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
|
||||
return resp, model.BadRequestStr(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -160,7 +160,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
if len(emailComponents) > 0 {
|
||||
emailDomain = emailComponents[1]
|
||||
}
|
||||
zap.S().Errorf("failed to get org domain from email", zap.String("emailDomain", emailDomain), apierr.ToError())
|
||||
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
|
||||
return resp, apierr
|
||||
}
|
||||
|
||||
@@ -176,7 +176,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
escapedUrl, _ := url.QueryUnescape(sourceUrl)
|
||||
siteUrl, err := url.Parse(escapedUrl)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to parse referer", err)
|
||||
zap.L().Error("failed to parse referer", zap.Error(err))
|
||||
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
|
||||
}
|
||||
|
||||
@@ -185,7 +185,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), err)
|
||||
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
|
||||
return resp, model.InternalError(err)
|
||||
}
|
||||
|
||||
|
||||
@@ -48,13 +48,13 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
|
||||
if domainIdStr != "" {
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to parse domainId from relay state", err)
|
||||
zap.L().Error("failed to parse domainId from relay state", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse domainId from IdP response")
|
||||
}
|
||||
|
||||
domain, err = m.GetDomain(ctx, domainId)
|
||||
if (err != nil) || domain == nil {
|
||||
zap.S().Errorf("failed to find domain from domainId received in IdP response", err.Error())
|
||||
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
}
|
||||
@@ -64,7 +64,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
|
||||
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
|
||||
domain = domainFromDB
|
||||
if (err != nil) || domain == nil {
|
||||
zap.S().Errorf("failed to find domain from domainName received in IdP response", err.Error())
|
||||
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
}
|
||||
@@ -132,7 +132,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo
|
||||
for _, s := range stored {
|
||||
domain := model.OrgDomain{Id: s.Id, Name: s.Name, OrgId: s.OrgId}
|
||||
if err := domain.LoadConfig(s.Data); err != nil {
|
||||
zap.S().Errorf("ListDomains() failed", zap.Error(err))
|
||||
zap.L().Error("ListDomains() failed", zap.Error(err))
|
||||
}
|
||||
domains = append(domains, domain)
|
||||
}
|
||||
@@ -153,7 +153,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
|
||||
configJson, err := json.Marshal(domain)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to unmarshal domain config", zap.Error(err))
|
||||
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain creation failed"))
|
||||
}
|
||||
|
||||
@@ -167,7 +167,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
time.Now().Unix())
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to insert domain in db", zap.Error(err))
|
||||
zap.L().Error("failed to insert domain in db", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain creation failed"))
|
||||
}
|
||||
|
||||
@@ -178,13 +178,13 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
|
||||
|
||||
if domain.Id == uuid.Nil {
|
||||
zap.S().Errorf("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
configJson, err := json.Marshal(domain)
|
||||
if err != nil {
|
||||
zap.S().Errorf("domain update failed", zap.Error(err))
|
||||
zap.L().Error("domain update failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
@@ -195,7 +195,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
domain.Id)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("domain update failed", zap.Error(err))
|
||||
zap.L().Error("domain update failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
@@ -206,7 +206,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
|
||||
|
||||
if id == uuid.Nil {
|
||||
zap.S().Errorf("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
return model.InternalError(fmt.Errorf("domain delete failed"))
|
||||
}
|
||||
|
||||
@@ -215,7 +215,7 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.Bas
|
||||
id)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("domain delete failed", zap.Error(err))
|
||||
zap.L().Error("domain delete failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain delete failed"))
|
||||
}
|
||||
|
||||
|
||||
@@ -26,12 +26,12 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basem
|
||||
p.Revoked,
|
||||
)
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
id, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to get last inserted id, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to get last inserted id, err: %v", zap.Error(err))
|
||||
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
p.Id = strconv.Itoa(int(id))
|
||||
@@ -62,7 +62,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) basemo
|
||||
p.UpdatedByUserID,
|
||||
id)
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to update PAT in db, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT update failed"))
|
||||
}
|
||||
return nil
|
||||
@@ -74,7 +74,7 @@ func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed
|
||||
lastUsed,
|
||||
token)
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to update PAT last used in db, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT last used update failed"))
|
||||
}
|
||||
return nil
|
||||
@@ -84,7 +84,7 @@ func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApi
|
||||
pats := []model.PAT{}
|
||||
|
||||
if err := m.DB().Select(&pats, "SELECT * FROM personal_access_tokens WHERE revoked=false ORDER by updated_at DESC;"); err != nil {
|
||||
zap.S().Errorf("Failed to fetch PATs err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
|
||||
}
|
||||
for i := range pats {
|
||||
@@ -129,7 +129,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) base
|
||||
"UPDATE personal_access_tokens SET revoked=true, updated_by_user_id = $1, updated_at=$2 WHERE id=$3",
|
||||
userID, updatedAt, id)
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to revoke PAT in db, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT revoke failed"))
|
||||
}
|
||||
return nil
|
||||
|
||||
@@ -47,13 +47,13 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError)
|
||||
httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString))
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to connect to license.signoz.io", err)
|
||||
zap.L().Error("failed to connect to license.signoz.io", zap.Error(err))
|
||||
return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
|
||||
}
|
||||
|
||||
httpBody, err := io.ReadAll(httpResponse.Body)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to read activation response from license.signoz.io", err)
|
||||
zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err))
|
||||
return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
|
||||
}
|
||||
|
||||
@@ -63,7 +63,7 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError)
|
||||
result := ActivationResult{}
|
||||
err = json.Unmarshal(httpBody, &result)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to marshal activation response from license.signoz.io", err)
|
||||
zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err))
|
||||
return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
|
||||
}
|
||||
|
||||
|
||||
@@ -97,7 +97,7 @@ func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error {
|
||||
l.ValidationMessage)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("error in inserting license data: ", zap.Error(err))
|
||||
zap.L().Error("error in inserting license data: ", zap.Error(err))
|
||||
return fmt.Errorf("failed to insert license in db: %v", err)
|
||||
}
|
||||
|
||||
@@ -121,7 +121,7 @@ func (r *Repo) UpdatePlanDetails(ctx context.Context,
|
||||
_, err := r.db.ExecContext(ctx, query, planDetails, time.Now(), key)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("error in updating license: ", zap.Error(err))
|
||||
zap.L().Error("error in updating license: ", zap.Error(err))
|
||||
return fmt.Errorf("failed to update license in db: %v", err)
|
||||
}
|
||||
|
||||
|
||||
@@ -100,7 +100,7 @@ func (lm *Manager) SetActive(l *model.License) {
|
||||
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.S().Panicf("Couldn't activate features: %v", err)
|
||||
zap.L().Panic("Couldn't activate features", zap.Error(err))
|
||||
}
|
||||
if !lm.validatorRunning {
|
||||
// we want to make sure only one validator runs,
|
||||
@@ -125,13 +125,13 @@ func (lm *Manager) LoadActiveLicense() error {
|
||||
if active != nil {
|
||||
lm.SetActive(active)
|
||||
} else {
|
||||
zap.S().Info("No active license found, defaulting to basic plan")
|
||||
zap.L().Info("No active license found, defaulting to basic plan")
|
||||
// if no active license is found, we default to basic(free) plan with all default features
|
||||
lm.activeFeatures = model.BasicPlan
|
||||
setDefaultFeatures(lm)
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.S().Error("Couldn't initialize features: ", err)
|
||||
zap.L().Error("Couldn't initialize features", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -191,7 +191,7 @@ func (lm *Manager) Validator(ctx context.Context) {
|
||||
|
||||
// Validate validates the current active license
|
||||
func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
zap.S().Info("License validation started")
|
||||
zap.L().Info("License validation started")
|
||||
if lm.activeLicense == nil {
|
||||
return nil
|
||||
}
|
||||
@@ -201,12 +201,12 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
|
||||
lm.lastValidated = time.Now().Unix()
|
||||
if reterr != nil {
|
||||
zap.S().Errorf("License validation completed with error", reterr)
|
||||
zap.L().Error("License validation completed with error", zap.Error(reterr))
|
||||
atomic.AddUint64(&lm.failedAttempts, 1)
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
|
||||
map[string]interface{}{"err": reterr.Error()}, "")
|
||||
map[string]interface{}{"err": reterr.Error()}, "", true, false)
|
||||
} else {
|
||||
zap.S().Info("License validation completed with no errors")
|
||||
zap.L().Info("License validation completed with no errors")
|
||||
}
|
||||
|
||||
lm.mutex.Unlock()
|
||||
@@ -214,7 +214,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
|
||||
response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId)
|
||||
if apiError != nil {
|
||||
zap.S().Errorf("failed to validate license", apiError)
|
||||
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
|
||||
return apiError.Err
|
||||
}
|
||||
|
||||
@@ -235,7 +235,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
}
|
||||
|
||||
if err := l.ParsePlan(); err != nil {
|
||||
zap.S().Errorf("failed to parse updated license", zap.Error(err))
|
||||
zap.L().Error("failed to parse updated license", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -245,7 +245,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
if err != nil {
|
||||
// unexpected db write issue but we can let the user continue
|
||||
// and wait for update to work in next cycle.
|
||||
zap.S().Errorf("failed to validate license", zap.Error(err))
|
||||
zap.L().Error("failed to validate license", zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -263,14 +263,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
|
||||
userEmail, err := auth.GetEmailFromJwt(ctx)
|
||||
if err == nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
|
||||
map[string]interface{}{"err": errResponse.Err.Error()}, userEmail)
|
||||
map[string]interface{}{"err": errResponse.Err.Error()}, userEmail, true, false)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
response, apiError := validate.ActivateLicense(key, "")
|
||||
if apiError != nil {
|
||||
zap.S().Errorf("failed to activate license", zap.Error(apiError.Err))
|
||||
zap.L().Error("failed to activate license", zap.Error(apiError.Err))
|
||||
return nil, apiError
|
||||
}
|
||||
|
||||
@@ -284,14 +284,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
|
||||
err := l.ParsePlan()
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to activate license", zap.Error(err))
|
||||
zap.L().Error("failed to activate license", zap.Error(err))
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
// store the license before activating it
|
||||
err = lm.repo.InsertLicense(ctx, l)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to activate license", zap.Error(err))
|
||||
zap.L().Error("failed to activate license", zap.Error(err))
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
|
||||
@@ -16,8 +16,10 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
|
||||
"go.signoz.io/signoz/pkg/query-service/migrate"
|
||||
"go.signoz.io/signoz/pkg/query-service/version"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
|
||||
zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder"
|
||||
zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync"
|
||||
@@ -27,18 +29,19 @@ import (
|
||||
)
|
||||
|
||||
func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
|
||||
config := zap.NewDevelopmentConfig()
|
||||
config := zap.NewProductionConfig()
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
|
||||
defer stop()
|
||||
|
||||
config.EncoderConfig.EncodeDuration = zapcore.StringDurationEncoder
|
||||
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
|
||||
consoleEncoder := zapcore.NewConsoleEncoder(config.EncoderConfig)
|
||||
defaultLogLevel := zapcore.DebugLevel
|
||||
config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
|
||||
config.EncoderConfig.EncodeDuration = zapcore.MillisDurationEncoder
|
||||
config.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
|
||||
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
|
||||
consoleEncoder := zapcore.NewJSONEncoder(config.EncoderConfig)
|
||||
defaultLogLevel := zapcore.InfoLevel
|
||||
|
||||
res := resource.NewWithAttributes(
|
||||
semconv.SchemaURL,
|
||||
semconv.ServiceNameKey.String("query-service"),
|
||||
@@ -48,14 +51,15 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
|
||||
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
|
||||
)
|
||||
|
||||
if enableQueryServiceLogOTLPExport == true {
|
||||
conn, err := grpc.DialContext(ctx, constants.OTLPTarget, grpc.WithBlock(), grpc.WithInsecure(), grpc.WithTimeout(time.Second*30))
|
||||
if enableQueryServiceLogOTLPExport {
|
||||
ctx, _ := context.WithTimeout(ctx, time.Second*30)
|
||||
conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials()))
|
||||
if err != nil {
|
||||
log.Println("failed to connect to otlp collector to export query service logs with error:", err)
|
||||
log.Fatalf("failed to establish connection: %v", err)
|
||||
} else {
|
||||
logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize)
|
||||
if err != nil {
|
||||
logExportBatchSizeInt = 1000
|
||||
logExportBatchSizeInt = 512
|
||||
}
|
||||
ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{
|
||||
BatchSize: logExportBatchSizeInt,
|
||||
@@ -113,7 +117,6 @@ func main() {
|
||||
zap.ReplaceGlobals(loggerMgr)
|
||||
defer loggerMgr.Sync() // flushes buffer, if any
|
||||
|
||||
logger := loggerMgr.Sugar()
|
||||
version.PrintVersion()
|
||||
|
||||
serverOptions := &app.ServerOptions{
|
||||
@@ -137,22 +140,28 @@ func main() {
|
||||
auth.JwtSecret = os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
|
||||
if len(auth.JwtSecret) == 0 {
|
||||
zap.S().Warn("No JWT secret key is specified.")
|
||||
zap.L().Warn("No JWT secret key is specified.")
|
||||
} else {
|
||||
zap.S().Info("No JWT secret key set successfully.")
|
||||
zap.L().Info("JWT secret key set successfully.")
|
||||
}
|
||||
|
||||
if err := migrate.Migrate(constants.RELATIONAL_DATASOURCE_PATH); err != nil {
|
||||
zap.L().Error("Failed to migrate", zap.Error(err))
|
||||
} else {
|
||||
zap.L().Info("Migration successful")
|
||||
}
|
||||
|
||||
server, err := app.NewServer(serverOptions)
|
||||
if err != nil {
|
||||
logger.Fatal("Failed to create server", zap.Error(err))
|
||||
zap.L().Fatal("Failed to create server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := server.Start(); err != nil {
|
||||
logger.Fatal("Could not start servers", zap.Error(err))
|
||||
zap.L().Fatal("Could not start server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := auth.InitAuthCache(context.Background()); err != nil {
|
||||
logger.Fatal("Failed to initialize auth cache", zap.Error(err))
|
||||
zap.L().Fatal("Failed to initialize auth cache", zap.Error(err))
|
||||
}
|
||||
|
||||
signalsChannel := make(chan os.Signal, 1)
|
||||
@@ -161,9 +170,9 @@ func main() {
|
||||
for {
|
||||
select {
|
||||
case status := <-server.HealthCheckStatus():
|
||||
logger.Info("Received HealthCheck status: ", zap.Int("status", int(status)))
|
||||
zap.L().Info("Received HealthCheck status: ", zap.Int("status", int(status)))
|
||||
case <-signalsChannel:
|
||||
logger.Fatal("Received OS Interrupt Signal ... ")
|
||||
zap.L().Fatal("Received OS Interrupt Signal ... ")
|
||||
server.Stop()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,8 +9,8 @@ import (
|
||||
"github.com/google/uuid"
|
||||
"github.com/pkg/errors"
|
||||
saml2 "github.com/russellhaering/gosaml2"
|
||||
"go.signoz.io/signoz/ee/query-service/sso/saml"
|
||||
"go.signoz.io/signoz/ee/query-service/sso"
|
||||
"go.signoz.io/signoz/ee/query-service/sso/saml"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@@ -24,16 +24,16 @@ const (
|
||||
|
||||
// OrgDomain identify org owned web domains for auth and other purposes
|
||||
type OrgDomain struct {
|
||||
Id uuid.UUID `json:"id"`
|
||||
Name string `json:"name"`
|
||||
OrgId string `json:"orgId"`
|
||||
SsoEnabled bool `json:"ssoEnabled"`
|
||||
SsoType SSOType `json:"ssoType"`
|
||||
Id uuid.UUID `json:"id"`
|
||||
Name string `json:"name"`
|
||||
OrgId string `json:"orgId"`
|
||||
SsoEnabled bool `json:"ssoEnabled"`
|
||||
SsoType SSOType `json:"ssoType"`
|
||||
|
||||
SamlConfig *SamlConfig `json:"samlConfig"`
|
||||
SamlConfig *SamlConfig `json:"samlConfig"`
|
||||
GoogleAuthConfig *GoogleOAuthConfig `json:"googleAuthConfig"`
|
||||
|
||||
Org *basemodel.Organization
|
||||
Org *basemodel.Organization
|
||||
}
|
||||
|
||||
func (od *OrgDomain) String() string {
|
||||
@@ -100,8 +100,8 @@ func (od *OrgDomain) GetSAMLCert() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in
|
||||
// requesting OAuth and also used in processing response from google
|
||||
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in
|
||||
// requesting OAuth and also used in processing response from google
|
||||
func (od *OrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) {
|
||||
if od.GoogleAuthConfig == nil {
|
||||
return nil, fmt.Errorf("Google auth is not setup correctly for this domain")
|
||||
@@ -137,38 +137,36 @@ func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServicePro
|
||||
}
|
||||
|
||||
func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
|
||||
|
||||
|
||||
fmtDomainId := strings.Replace(od.Id.String(), "-", ":", -1)
|
||||
|
||||
|
||||
// build redirect url from window.location sent by frontend
|
||||
redirectURL := fmt.Sprintf("%s://%s%s", siteUrl.Scheme, siteUrl.Host, siteUrl.Path)
|
||||
|
||||
// prepare state that gets relayed back when the auth provider
|
||||
// calls back our url. here we pass the app url (where signoz runs)
|
||||
// and the domain Id. The domain Id helps in identifying sso config
|
||||
// when the call back occurs and the app url is useful in redirecting user
|
||||
// back to the right path.
|
||||
// when the call back occurs and the app url is useful in redirecting user
|
||||
// back to the right path.
|
||||
// why do we need to pass app url? the callback typically is handled by backend
|
||||
// and sometimes backend might right at a different port or is unaware of frontend
|
||||
// endpoint (unless SITE_URL param is set). hence, we receive this build sso request
|
||||
// along with frontend window.location and use it to relay the information through
|
||||
// auth provider to the backend (HandleCallback or HandleSSO method).
|
||||
// along with frontend window.location and use it to relay the information through
|
||||
// auth provider to the backend (HandleCallback or HandleSSO method).
|
||||
relayState := fmt.Sprintf("%s?domainId=%s", redirectURL, fmtDomainId)
|
||||
|
||||
|
||||
switch (od.SsoType) {
|
||||
switch od.SsoType {
|
||||
case SAML:
|
||||
|
||||
sp, err := od.PrepareSamlRequest(siteUrl)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
|
||||
return sp.BuildAuthURL(relayState)
|
||||
|
||||
|
||||
case GoogleAuth:
|
||||
|
||||
|
||||
googleProvider, err := od.PrepareGoogleOAuthProvider(siteUrl)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -176,9 +174,8 @@ func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
|
||||
return googleProvider.BuildAuthURL(relayState)
|
||||
|
||||
default:
|
||||
zap.S().Errorf("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
|
||||
return "", fmt.Errorf("unsupported SSO config for the domain")
|
||||
zap.L().Error("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
|
||||
return "", fmt.Errorf("unsupported SSO config for the domain")
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -52,14 +52,14 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: 20,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: 10,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
|
||||
@@ -102,6 +102,6 @@ func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (*
|
||||
IDPCertificateStore: certStore,
|
||||
SPKeyStore: randomKeyStore,
|
||||
}
|
||||
zap.S().Debugf("SAML request:", sp)
|
||||
zap.L().Debug("SAML request", zap.Any("sp", sp))
|
||||
return sp, nil
|
||||
}
|
||||
|
||||
@@ -91,12 +91,12 @@ func (lm *Manager) UploadUsage() {
|
||||
// check if license is present or not
|
||||
license, err := lm.licenseRepo.GetActiveLicense(ctx)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to get active license: %v", zap.Error(err))
|
||||
zap.L().Error("failed to get active license", zap.Error(err))
|
||||
return
|
||||
}
|
||||
if license == nil {
|
||||
// we will not start the usage reporting if license is not present.
|
||||
zap.S().Info("no license present, skipping usage reporting")
|
||||
zap.L().Info("no license present, skipping usage reporting")
|
||||
return
|
||||
}
|
||||
|
||||
@@ -123,7 +123,7 @@ func (lm *Manager) UploadUsage() {
|
||||
dbusages := []model.UsageDB{}
|
||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||
zap.S().Errorf("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
for _, u := range dbusages {
|
||||
@@ -133,16 +133,16 @@ func (lm *Manager) UploadUsage() {
|
||||
}
|
||||
|
||||
if len(usages) <= 0 {
|
||||
zap.S().Info("no snapshots to upload, skipping.")
|
||||
zap.L().Info("no snapshots to upload, skipping.")
|
||||
return
|
||||
}
|
||||
|
||||
zap.S().Info("uploading usage data")
|
||||
zap.L().Info("uploading usage data")
|
||||
|
||||
orgName := ""
|
||||
orgNames, orgError := lm.modelDao.GetOrgs(ctx)
|
||||
if orgError != nil {
|
||||
zap.S().Errorf("failed to get org data: %v", zap.Error(orgError))
|
||||
zap.L().Error("failed to get org data: %v", zap.Error(orgError))
|
||||
}
|
||||
if len(orgNames) == 1 {
|
||||
orgName = orgNames[0].Name
|
||||
@@ -152,14 +152,14 @@ func (lm *Manager) UploadUsage() {
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
if err != nil {
|
||||
zap.S().Errorf("error while decrypting usage data: %v", zap.Error(err))
|
||||
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
usageData := model.Usage{}
|
||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||
if err != nil {
|
||||
zap.S().Errorf("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -184,13 +184,13 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
|
||||
for i := 1; i <= MaxRetries; i++ {
|
||||
apiErr := licenseserver.SendUsage(ctx, payload)
|
||||
if apiErr != nil && i == MaxRetries {
|
||||
zap.S().Errorf("retries stopped : %v", zap.Error(apiErr))
|
||||
zap.L().Error("retries stopped : %v", zap.Error(apiErr))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return
|
||||
} else if apiErr != nil {
|
||||
// sleeping for exponential backoff
|
||||
sleepDuration := RetryInterval * time.Duration(i)
|
||||
zap.S().Errorf("failed to upload snapshot retrying after %v secs : %v", sleepDuration.Seconds(), zap.Error(apiErr.Err))
|
||||
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err))
|
||||
time.Sleep(sleepDuration)
|
||||
} else {
|
||||
break
|
||||
@@ -201,7 +201,7 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
|
||||
func (lm *Manager) Stop() {
|
||||
lm.scheduler.Stop()
|
||||
|
||||
zap.S().Debug("sending usage data before shutting down")
|
||||
zap.L().Info("sending usage data before shutting down")
|
||||
// send usage before shutting down
|
||||
lm.UploadUsage()
|
||||
|
||||
|
||||
3
frontend/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
# Sentry Config File
|
||||
.env.sentry-build-plugin
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM nginx:1.25.2-alpine
|
||||
FROM nginx:1.26-alpine
|
||||
|
||||
# Add Maintainer Info
|
||||
LABEL maintainer="signoz"
|
||||
|
||||
@@ -4,6 +4,7 @@ const config: Config.InitialOptions = {
|
||||
clearMocks: true,
|
||||
coverageDirectory: 'coverage',
|
||||
coverageReporters: ['text', 'cobertura', 'html', 'json-summary'],
|
||||
collectCoverageFrom: ['src/**/*.{ts,tsx}'],
|
||||
moduleFileExtensions: ['ts', 'tsx', 'js', 'json'],
|
||||
modulePathIgnorePatterns: ['dist'],
|
||||
moduleNameMapper: {
|
||||
@@ -20,8 +21,6 @@ const config: Config.InitialOptions = {
|
||||
transform: {
|
||||
'^.+\\.(ts|tsx)?$': 'ts-jest',
|
||||
'^.+\\.(js|jsx)$': 'babel-jest',
|
||||
'^.+\\.(css|scss|sass|less)$': 'jest-preview/transforms/css',
|
||||
'^(?!.*\\.(js|jsx|mjs|cjs|ts|tsx|css|json)$)': 'jest-preview/transforms/file',
|
||||
},
|
||||
transformIgnorePatterns: [
|
||||
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color)/)',
|
||||
@@ -35,6 +34,14 @@ const config: Config.InitialOptions = {
|
||||
browsers: ['chromium', 'firefox', 'webkit'],
|
||||
},
|
||||
},
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
statements: 80,
|
||||
branches: 65,
|
||||
functions: 80,
|
||||
lines: 80,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
|
||||
@@ -13,15 +13,15 @@
|
||||
"jest": "jest",
|
||||
"jest:coverage": "jest --coverage",
|
||||
"jest:watch": "jest --watch",
|
||||
"jest-preview": "jest-preview",
|
||||
"test:debug": "npm-run-all -p test jest-preview",
|
||||
"postinstall": "is-ci || yarn husky:configure",
|
||||
"playwright": "npm run i18n:generate-hash && NODE_ENV=testing playwright test --config=./playwright.config.ts",
|
||||
"playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium",
|
||||
"playwright:codegen:local": "playwright codegen http://localhost:3301",
|
||||
"playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json",
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1"
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest --coverage",
|
||||
"test:changedsince": "jest --changedSince=develop --coverage --silent"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.15.0"
|
||||
@@ -41,14 +41,17 @@
|
||||
"@radix-ui/react-tabs": "1.0.4",
|
||||
"@radix-ui/react-tooltip": "1.0.7",
|
||||
"@sentry/react": "7.102.1",
|
||||
"@sentry/webpack-plugin": "2.14.2",
|
||||
"@sentry/webpack-plugin": "2.16.0",
|
||||
"@signozhq/design-tokens": "0.0.8",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
"@visx/group": "3.3.0",
|
||||
"@visx/shape": "3.5.0",
|
||||
"@visx/tooltip": "3.3.0",
|
||||
"@xstate/react": "^3.0.0",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"axios": "1.6.2",
|
||||
"axios": "1.6.4",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
@@ -82,7 +85,7 @@
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "0.321.0",
|
||||
"lucide-react": "0.379.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"papaparse": "5.4.1",
|
||||
"react": "18.2.0",
|
||||
@@ -121,6 +124,7 @@
|
||||
"web-vitals": "^0.2.4",
|
||||
"webpack": "5.88.2",
|
||||
"webpack-dev-server": "^4.15.1",
|
||||
"webpack-retry-chunk-load-plugin": "3.1.1",
|
||||
"xstate": "^4.31.0"
|
||||
},
|
||||
"browserslist": {
|
||||
@@ -177,7 +181,7 @@
|
||||
"@types/webpack-dev-server": "^4.7.2",
|
||||
"@typescript-eslint/eslint-plugin": "^4.33.0",
|
||||
"@typescript-eslint/parser": "^4.33.0",
|
||||
"autoprefixer": "^9.0.0",
|
||||
"autoprefixer": "10.4.19",
|
||||
"babel-plugin-styled-components": "^1.12.0",
|
||||
"compression-webpack-plugin": "9.0.0",
|
||||
"copy-webpack-plugin": "^8.1.0",
|
||||
@@ -200,12 +204,12 @@
|
||||
"husky": "^7.0.4",
|
||||
"is-ci": "^3.0.1",
|
||||
"jest-playwright-preset": "^1.7.2",
|
||||
"jest-preview": "0.3.1",
|
||||
"jest-styled-components": "^7.0.8",
|
||||
"lint-staged": "^12.5.0",
|
||||
"msw": "1.3.2",
|
||||
"npm-run-all": "latest",
|
||||
"portfinder-sync": "^0.0.2",
|
||||
"postcss": "8.4.38",
|
||||
"prettier": "2.2.1",
|
||||
"raw-loader": "4.0.2",
|
||||
"react-hooks-testing-library": "0.6.0",
|
||||
|
||||
1
frontend/public/Icons/cable-car.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#prefix__clip0_2022_1972)" stroke="#fff" stroke-width="1.333" stroke-linecap="round" stroke-linejoin="round"><path d="M6.667 2h.006M9.333 1.333h.007M1.333 6l13.334-3.333M8 8V4.333M11.333 8H4.667a2 2 0 00-2 2v2.667a2 2 0 002 2h6.666a2 2 0 002-2V10a2 2 0 00-2-2zM6 8v3.333M10 8v3.333M2.667 11.334h10.666"/></g><defs><clipPath id="prefix__clip0_2022_1972"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>
|
||||
|
After Width: | Height: | Size: 507 B |
1
frontend/public/Icons/configure.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.333" stroke-linecap="round"><path d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z" stroke-linejoin="round"/><path d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3M10.75 10.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"/></g></svg>
|
||||
|
After Width: | Height: | Size: 644 B |
1
frontend/public/Icons/dashboard_emoji.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="32" height="32" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M13.72 12.839l-9.054.92s.05.649.236.798c.178.142 5.617.066 11.048.088 5.433.023 10.82.125 10.944.072.249-.107.249-.992.249-.992l-13.424-.886zM16.55 7.787l-12.623-.32s.275.61.637.813c.523.29 3.71.889 11.518.918 7.808.028 10.635-.4 11.317-.678.58-.238 1.215-1.576 1.215-1.576l-12.064.843z" fill="#8A1E0C"/><path d="M21.95 8.658v1.335l2.176-.087V8.542l-2.176.116z" fill="#8A1E0C"/><path d="M21.948 9.566h2.177v16.797l-2.206.294.029-17.09z" fill="#EB2901"/><path d="M21.355 26.19c-.111.193-.111 2.297-.007 2.444.105.147 3.242.104 3.326 0 .085-.104.063-2.38 0-2.464-.062-.085-3.235-.125-3.32.02z" fill="#474C4F"/><path d="M8.462 9.85V8.488l2.042.125v1.22l-2.042.017z" fill="#8A1E0C"/><path d="M8.462 9.55l-.038 17.051 2.08-.207V9.566l-2.042-.015z" fill="#EB2901"/><path d="M7.804 25.919c-.073.073-.147 2.36-.02 2.464.125.104 3.14.129 3.244.024.105-.104.085-2.304.023-2.43-.063-.127-3.142-.163-3.247-.058z" fill="#474C4F"/><path d="M14.788 8.107v4.876l2.393-.33V8.108h-2.393z" fill="#EB2901"/><path d="M27.067 11.978c-.115-.16-.482-.138-.482-.138l-1.137-.013c.002-.398-.01-.913-.078-.996-.116-.137-4.542-.09-4.702.047-.091.078-.11.527-.107.898-2.738-.027-5.99-.058-8.83-.076 0-.384-.012-.849-.078-.915-.116-.116-4.22-.185-4.38-.07-.113.083-.136.647-.138.97-1.384.002-2.275.013-2.34.04-.322.137-.137 2.042-.137 2.042l22.476.16c.002.002.049-1.787-.067-1.95z" fill="#EB2901"/><path d="M3.93 6.942s-.646-.34-1.377-1.573c-.509-.858-.595-1.658-.387-1.778.21-.12 2.154 1.08 5.745 1.616a60.81 60.81 0 008.173.644c2.884.027 5.717-.135 8.397-.644 3.62-.689 4.906-1.436 5.264-1.316.36.12-.109 1.227-.369 1.78-.178.376-.944 1.77-1.515 1.87-.411.072-19.953-.09-19.953-.09l-3.977-.509z" fill="#474C4F"/><path d="M3.31 5.724c-.108.137-.057.457.212 1.06.107.237.415.782.529.917 0 0 2.982.756 11.977.7 8.995-.055 12.108-.62 12.108-.62s.911-1.277.745-1.32c-.096-.024-4.847.98-12.909.898C7.911 7.277 3.311 5.724 3.311 5.724z" fill="#EB2901"/></svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
1
frontend/public/Icons/dashboards.svg
Normal file
|
After Width: | Height: | Size: 5.2 KiB |
1
frontend/public/Icons/group.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.333" stroke-linecap="round" stroke-linejoin="round"><path d="M2 4.667V3.333C2 2.6 2.6 2 3.333 2h1.334M11.333 2h1.334C13.4 2 14 2.6 14 3.333v1.334M14 11.334v1.333C14 13.4 13.4 14 12.667 14h-1.334M4.667 14H3.333C2.6 14 2 13.4 2 12.667v-1.333M8.667 4.667H5.333a.667.667 0 00-.666.666v2c0 .368.298.667.666.667h3.334a.667.667 0 00.666-.667v-2a.667.667 0 00-.666-.667zM10.667 8H7.333a.667.667 0 00-.666.667v2c0 .368.298.666.666.666h3.334a.667.667 0 00.666-.666v-2A.667.667 0 0010.667 8z"/></g></svg>
|
||||
|
After Width: | Height: | Size: 604 B |
1
frontend/public/Icons/landscape.svg
Normal file
|
After Width: | Height: | Size: 6.1 KiB |
@@ -1 +0,0 @@
|
||||
<svg width="24" height="24" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M12 2c1 2.538 2.5 2.962 3.5 3.808.942.78 1.481 1.845 1.5 2.961 0 1.122-.527 2.198-1.464 2.992C14.598 12.554 13.326 13 12 13s-2.598-.446-3.536-1.24C7.527 10.968 7 9.892 7 8.77c0-.255 0-.508.1-.762.085.25.236.48.443.673.207.193.463.342.75.437a2.334 2.334 0 001.767-.128c.263-.135.485-.32.65-.539.166-.22.269-.468.301-.727a1.452 1.452 0 00-.11-.765 1.699 1.699 0 00-.501-.644C8 4.115 11 2 12 2zM17 16l-5 6-5-6h10z" stroke="#fff" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/></svg>
|
||||
|
Before Width: | Height: | Size: 581 B |
1
frontend/public/Icons/tools.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M1.305 13.063c.74.739 1.637.482 2.156-.109.53-.604.813-.956.813-.956.66-.973 3.392-4.227 5.724-6.568a2.638 2.638 0 002.74-.434 2.648 2.648 0 00.922-2.041.155.155 0 00-.23-.132l-1.607.927a1.64 1.64 0 01-1.076-1.864l1.6-.923a.153.153 0 00.077-.134.153.153 0 00-.077-.133 2.65 2.65 0 00-3.66 3.563C6.11 6.826 2.966 9.604 2.15 10.223c0 0-.492.356-.962.84-.464.476-.636 1.245.117 1.999zm.542-1.137a.592.592 0 111.184 0 .592.592 0 01-1.184 0z" fill="#82AEC0"/><path d="M8.334 4.61l.353-.35a2.63 2.63 0 01-.212-2.039c.073-.12.189-.249.262-.171-.03.946.245 1.931.902 2.611.327.338.752.582 1.207.696.224.057.458.082.69.069.137-.008.519-.149.596-.044v.004a2.656 2.656 0 01-2.135.043 38.176 38.176 0 00-1.903 2.05c.262-.495 1.034-1.408 1.241-1.757a.412.412 0 00-.036-.464c-.207-.255-.633-.493-.965-.649z" fill="#2F7889"/><path d="M5.186 8.529c.06-.062.004-.167-.08-.148-.158.035-.386.125-.657.345-.531.43-1.934 1.595-2.107 1.825-.173.23.522-.003.767-.047.2-.036 1.602-1.48 2.077-1.975zM10.048 1.104c-.296.212-.563.465-.84.701-.072.061-.177.122-.25.065-.08-.064-.03-.191.03-.274C9.512.874 10.493.358 11.442.563c-.5.161-.95.223-1.395.541z" fill="#B9E4EA"/><path d="M12.408 3.583a2.1 2.1 0 01-.371.19c-.112.031-.43-.092-.522-.166l1.183-.772c.043-.028.087-.056.137-.072a.546.546 0 01.185-.014c.087.004.51-.01.56.064.05.075-.126.149-.183.183-.33.197-.66.391-.99.587zM7.867 7.687L6.624 6.254c-.45.423-.895.835-1.321 1.225l.362-.078a.482.482 0 01.439.13l.58.65c.122.122.142.334.096.5l-.065.308c.367-.423.755-.862 1.152-1.302z" fill="#2F7889"/><g><path d="M13.378 12.86l-.744.643a.686.686 0 01-.968-.072L2.84 2.779l1.135-.853 9.459 9.976a.668.668 0 01-.057.957z" fill="#A06841"/><path d="M3.648 3.752l2.1 2.535c.328-.493.494-1.084.629-1.83l-2.028-2.14a1.838 1.838 0 00-.414.48 2.17 2.17 0 00-.287.955z" fill="#7D5133"/><path d="M7.81.438C5.885.416 5.17.588 4.098 1.515l-.966.835c-.35.302-.815.566-.742 1.089.027.19.086.384.05.573-.034.179-.242.268-.39.166-.139-.096-.292-.214-.463-.234a.588.588 0 00-.45.14l-.747.664s-.107.434.729 1.38c.835.946 1.373.878 1.373.878l.702-.618a.53.53 0 00.176-.412c-.003-.184-.11-.326-.174-.49-.013-.031-.083-.143.04-.244.109-.094.333-.062.46-.027.129.034.25.088.38.122.25.065.369-.051.543-.201L6.013 3.93c.619-.536-.325-1.474-.325-1.474C5.244 1.953 7.941.687 7.941.687c.198-.069.138-.246-.13-.249z" fill="#82AEC0"/><path d="M4.076 5.338a.504.504 0 00.14.016v-.02c-.011-.12-.077-.23-.144-.33A7.18 7.18 0 002.545 3.33a1.683 1.683 0 00-.154-.111.726.726 0 00-.002.22c.027.19.086.384.05.573-.038.196-.242.25-.399.177a3.27 3.27 0 011.011 1.027c.035.056.07.115.11.168a.2.2 0 01.075-.14c.109-.095.333-.063.46-.029.13.034.25.088.38.123zM1.778 5.573c.585.613.914 1.247.734 1.42-.179.17-.799-.186-1.384-.797C.542 5.584.21 4.92.388 4.748c.18-.171.804.213 1.39.825z" fill="#2F7889"/><path d="M4.057 2.41c.465-.198.88-.623 1.422-1.09A2.53 2.53 0 016.03.964c.076-.035.048-.149-.036-.148-.278.005-.527.09-.772.196-.342.149-.644.374-.935.608-.2.16-.67.555-.965.805-.055.047-.012.12.06.12.208.002.325.014.674-.135zM1.124 4.352c-.196.221.055.281.496.646.311.257.642.018.645-.223.003-.216-.052-.333-.366-.53-.315-.199-.597-.093-.775.107z" fill="#B9E4EA"/></g></svg>
|
||||
|
After Width: | Height: | Size: 3.2 KiB |
234
frontend/public/Images/blankDashboardTemplatePreview.svg
Normal file
|
After Width: | Height: | Size: 204 KiB |
9
frontend/public/Images/redisTemplatePreview.svg
Normal file
|
After Width: | Height: | Size: 1.7 MiB |
@@ -37,11 +37,16 @@
|
||||
"text_condition1": "Send a notification when",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_1min": "1 min",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_30min": "30 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_4hours": "4 hours",
|
||||
"option_3hours": "3 hours",
|
||||
"option_6hours": "6 hours",
|
||||
"option_12hours": "12 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
@@ -112,6 +117,7 @@
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit",
|
||||
"text_alert_on_absent": "Send a notification if data is missing for",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query"
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
"button_test_channel": "Test",
|
||||
"button_return": "Back",
|
||||
"field_channel_name": "Name",
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"field_slack_recipient": "Recipient",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"create_dashboard": "Create Dashboard",
|
||||
"import_json": "Import JSON",
|
||||
"import_json": "Import Dashboard JSON",
|
||||
"import_grafana_json": "Import Grafana JSON",
|
||||
"copy_to_clipboard": "Copy To ClipBoard",
|
||||
"download_json": "Download JSON",
|
||||
@@ -9,13 +9,14 @@
|
||||
"upload_json_file": "Upload JSON file",
|
||||
"paste_json_below": "Paste JSON below",
|
||||
"error_upload_json": "Invalid JSON",
|
||||
"load_json": "Load JSON",
|
||||
"import_and_next": "Import and Next",
|
||||
"import_dashboard_by_pasting": "Import dashboard by pasting JSON or importing JSON file",
|
||||
"error_loading_json": "Error loading JSON file",
|
||||
"empty_json_not_allowed": "Empty JSON is not allowed",
|
||||
"new_dashboard_title": "Sample Title",
|
||||
"layout_saved_successfully": "Layout saved successfully",
|
||||
"add_panel": "Add Panel",
|
||||
"add_row": "Add Row",
|
||||
"save_layout": "Save Layout",
|
||||
"variable_updated_successfully": "Variable updated successfully",
|
||||
"error_while_updating_variable": "Error while updating variable",
|
||||
|
||||
@@ -14,6 +14,5 @@
|
||||
"delete_domain_message": "Are you sure you want to delete this domain?",
|
||||
"delete_domain": "Delete Domain",
|
||||
"add_domain": "Add Domains",
|
||||
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly",
|
||||
"invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually"
|
||||
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly"
|
||||
}
|
||||
|
||||
@@ -37,11 +37,16 @@
|
||||
"text_condition1": "Send a notification when",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_1min": "1 min",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_30min": "30 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_3hours": "3 hours",
|
||||
"option_4hours": "4 hours",
|
||||
"option_6hours": "6 hours",
|
||||
"option_12hours": "12 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
@@ -112,6 +117,7 @@
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit",
|
||||
"text_alert_on_absent": "Send a notification if data is missing for",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query"
|
||||
}
|
||||
|
||||
14
frontend/public/locales/en/billings.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"days_remaining": "days remaining in your billing period.",
|
||||
"billing": "Billing",
|
||||
"manage_billing_and_costs": "Manage your billing information, invoices, and monitor costs.",
|
||||
"enterprise_cloud": "Enterprise Cloud",
|
||||
"enterprise": "Enterprise",
|
||||
"card_details_recieved_and_billing_info": "We have received your card details, your billing will only start after the end of your free trial period.",
|
||||
"upgrade_plan": "Upgrade Plan",
|
||||
"manage_billing": "Manage Billing",
|
||||
"upgrade_now_text": "Upgrade now to have uninterrupted access",
|
||||
"billing_start_info": "Your billing will start only after the trial period",
|
||||
"checkout_plans": "Check out features in paid plans",
|
||||
"here": "here"
|
||||
}
|
||||
@@ -15,6 +15,7 @@
|
||||
"button_test_channel": "Test",
|
||||
"button_return": "Back",
|
||||
"field_channel_name": "Name",
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"field_slack_recipient": "Recipient",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"create_dashboard": "Create Dashboard",
|
||||
"import_json": "Import JSON",
|
||||
"import_json": "Import Dashboard JSON",
|
||||
"import_grafana_json": "Import Grafana JSON",
|
||||
"copy_to_clipboard": "Copy To ClipBoard",
|
||||
"download_json": "Download JSON",
|
||||
@@ -9,13 +9,14 @@
|
||||
"upload_json_file": "Upload JSON file",
|
||||
"paste_json_below": "Paste JSON below",
|
||||
"error_upload_json": "Invalid JSON",
|
||||
"load_json": "Load JSON",
|
||||
"import_and_next": "Import and Next",
|
||||
"import_dashboard_by_pasting": "Import dashboard by pasting JSON or importing JSON file",
|
||||
"error_loading_json": "Error loading JSON file",
|
||||
"empty_json_not_allowed": "Empty JSON is not allowed",
|
||||
"new_dashboard_title": "Sample Title",
|
||||
"layout_saved_successfully": "Layout saved successfully",
|
||||
"add_panel": "Add Panel",
|
||||
"add_row": "Add Row",
|
||||
"save_layout": "Save Layout",
|
||||
"full_view": "Full Screen View",
|
||||
"variable_updated_successfully": "Variable updated successfully",
|
||||
|
||||
@@ -14,6 +14,5 @@
|
||||
"delete_domain_message": "Are you sure you want to delete this domain?",
|
||||
"delete_domain": "Delete Domain",
|
||||
"add_domain": "Add Domains",
|
||||
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly",
|
||||
"invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually"
|
||||
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly"
|
||||
}
|
||||
|
||||
@@ -48,5 +48,5 @@
|
||||
"TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views",
|
||||
"DEFAULT": "Open source Observability Platform | SigNoz",
|
||||
"SHORTCUTS": "SigNoz | Shortcuts",
|
||||
"INTEGRATIONS_INSTALLED": "SigNoz | Integrations"
|
||||
"INTEGRATIONS": "SigNoz | Integrations"
|
||||
}
|
||||
|
||||
@@ -9,13 +9,14 @@ import ROUTES from 'constants/routes';
|
||||
import AppLayout from 'container/AppLayout';
|
||||
import useAnalytics from 'hooks/analytics/useAnalytics';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import useGetFeatureFlag from 'hooks/useGetFeatureFlag';
|
||||
import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import history from 'lib/history';
|
||||
import { identity, pickBy } from 'lodash-es';
|
||||
import { identity, pick, pickBy } from 'lodash-es';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useEffect, useState } from 'react';
|
||||
@@ -46,12 +47,14 @@ function App(): JSX.Element {
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const { trackPageView } = useAnalytics();
|
||||
const { trackPageView, trackEvent } = useAnalytics();
|
||||
|
||||
const { hostname, pathname } = window.location;
|
||||
|
||||
const isCloudUserVal = isCloudUser();
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const featureResponse = useGetFeatureFlag((allFlags) => {
|
||||
const isOnboardingEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active ||
|
||||
@@ -147,7 +150,11 @@ function App(): JSX.Element {
|
||||
}
|
||||
}
|
||||
|
||||
if (isOnBasicPlan || (isLoggedInState && role && role !== 'ADMIN')) {
|
||||
if (
|
||||
isOnBasicPlan ||
|
||||
(isLoggedInState && role && role !== 'ADMIN') ||
|
||||
!(isCloudUserVal || isEECloudUser())
|
||||
) {
|
||||
const newRoutes = routes.filter((route) => route?.path !== ROUTES.BILLING);
|
||||
setRoutes(newRoutes);
|
||||
}
|
||||
@@ -170,6 +177,25 @@ function App(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
try {
|
||||
const isThemeAnalyticsSent = getLocalStorageApi(
|
||||
LOCALSTORAGE.THEME_ANALYTICS,
|
||||
);
|
||||
if (!isThemeAnalyticsSent) {
|
||||
trackEvent('Theme Analytics', {
|
||||
theme: isDarkMode ? THEME_MODE.DARK : THEME_MODE.LIGHT,
|
||||
user: pick(user, ['email', 'userId', 'name']),
|
||||
org,
|
||||
});
|
||||
setLocalStorageApi(LOCALSTORAGE.THEME_ANALYTICS, 'true');
|
||||
}
|
||||
} catch {
|
||||
console.error('Failed to parse local storage theme analytics event');
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
|
||||
@@ -7,7 +7,7 @@ export const ServicesTablePage = Loadable(
|
||||
export const ServiceMetricsPage = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "ServiceMetricsPage" */ 'pages/MetricsApplication'
|
||||
/* webpackChunkName: "ServiceMetricsPage" */ 'pages/MetricsApplication/MetricsApplication'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -197,11 +197,3 @@ export const InstalledIntegrations = Loadable(
|
||||
/* webpackChunkName: "InstalledIntegrations" */ 'pages/IntegrationsModulePage'
|
||||
),
|
||||
);
|
||||
|
||||
export const IntegrationsMarketPlace = Loadable(
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "IntegrationsMarketPlace" */ 'pages/IntegrationsModulePage'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
ErrorDetails,
|
||||
IngestionSettings,
|
||||
InstalledIntegrations,
|
||||
IntegrationsMarketPlace,
|
||||
LicensePage,
|
||||
ListAllALertsPage,
|
||||
LiveLogs,
|
||||
@@ -338,18 +337,11 @@ const routes: AppRoutes[] = [
|
||||
key: 'SHORTCUTS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.INTEGRATIONS_INSTALLED,
|
||||
path: ROUTES.INTEGRATIONS,
|
||||
exact: true,
|
||||
component: InstalledIntegrations,
|
||||
isPrivate: true,
|
||||
key: 'INTEGRATIONS_INSTALLED',
|
||||
},
|
||||
{
|
||||
path: ROUTES.INTEGRATIONS_MARKETPLACE,
|
||||
exact: true,
|
||||
component: IntegrationsMarketPlace,
|
||||
isPrivate: true,
|
||||
key: 'INTEGRATIONS_MARKETPLACE',
|
||||
key: 'INTEGRATIONS',
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
@@ -30,7 +30,8 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
|
||||
statusCode,
|
||||
payload: null,
|
||||
error: errorMessage,
|
||||
message: null,
|
||||
message: (response.data as any)?.status,
|
||||
body: JSON.stringify((response.data as any).data),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ const listAllDomain = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(`orgs/${props.orgId}/domains`);
|
||||
const response = await axios.get(`/orgs/${props.orgId}/domains`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
|
||||
@@ -13,6 +13,7 @@ export interface UsageResponsePayloadProps {
|
||||
billTotal: number;
|
||||
};
|
||||
discount: number;
|
||||
subscriptionStatus?: string;
|
||||
}
|
||||
|
||||
const getUsage = async (
|
||||
|
||||
@@ -12,7 +12,7 @@ const create = async (
|
||||
name: props.name,
|
||||
email_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
to: props.to,
|
||||
html: props.html,
|
||||
headers: props.headers,
|
||||
|
||||
@@ -12,7 +12,7 @@ const create = async (
|
||||
name: props.name,
|
||||
msteams_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
webhook_url: props.webhook_url,
|
||||
title: props.title,
|
||||
text: props.text,
|
||||
|
||||
@@ -12,7 +12,7 @@ const create = async (
|
||||
name: props.name,
|
||||
pagerduty_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
routing_key: props.routing_key,
|
||||
client: props.client,
|
||||
client_url: props.client_url,
|
||||
|
||||
@@ -12,7 +12,7 @@ const create = async (
|
||||
name: props.name,
|
||||
slack_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
api_url: props.api_url,
|
||||
channel: props.channel,
|
||||
title: props.title,
|
||||
|
||||
@@ -30,7 +30,7 @@ const create = async (
|
||||
name: props.name,
|
||||
webhook_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
url: props.api_url,
|
||||
http_config: httpConfig,
|
||||
},
|
||||
|
||||
@@ -12,7 +12,7 @@ const editEmail = async (
|
||||
name: props.name,
|
||||
email_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
to: props.to,
|
||||
html: props.html,
|
||||
headers: props.headers,
|
||||
|
||||
@@ -12,7 +12,7 @@ const editMsTeams = async (
|
||||
name: props.name,
|
||||
msteams_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
webhook_url: props.webhook_url,
|
||||
title: props.title,
|
||||
text: props.text,
|
||||
|
||||
@@ -12,7 +12,7 @@ const editOpsgenie = async (
|
||||
name: props.name,
|
||||
opsgenie_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
api_key: props.api_key,
|
||||
description: props.description,
|
||||
priority: props.priority,
|
||||
|
||||
@@ -12,7 +12,7 @@ const editPager = async (
|
||||
name: props.name,
|
||||
pagerduty_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
routing_key: props.routing_key,
|
||||
client: props.client,
|
||||
client_url: props.client_url,
|
||||
|
||||
@@ -12,7 +12,7 @@ const editSlack = async (
|
||||
name: props.name,
|
||||
slack_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
api_url: props.api_url,
|
||||
channel: props.channel,
|
||||
title: props.title,
|
||||
|
||||
@@ -29,7 +29,7 @@ const editWebhook = async (
|
||||
name: props.name,
|
||||
webhook_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
url: props.api_url,
|
||||
http_config: httpConfig,
|
||||
},
|
||||
|
||||
28
frontend/src/api/common/logEvent.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { EventSuccessPayloadProps } from 'types/api/events/types';
|
||||
|
||||
const logEvent = async (
|
||||
eventName: string,
|
||||
attributes: Record<string, unknown>,
|
||||
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/event', {
|
||||
eventName,
|
||||
attributes,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default logEvent;
|
||||
@@ -1,26 +1,20 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/dashboard/update';
|
||||
|
||||
const updateDashboard = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.put(`/dashboards/${props.uuid}`, {
|
||||
...props.data,
|
||||
});
|
||||
const response = await axios.put(`/dashboards/${props.uuid}`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
export default updateDashboard;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiV4Instance } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { MetricMetaProps } from 'types/api/metrics/getApDex';
|
||||
|
||||
@@ -6,4 +6,6 @@ export const getMetricMeta = (
|
||||
metricName: string,
|
||||
servicename: string,
|
||||
): Promise<AxiosResponse<MetricMetaProps>> =>
|
||||
axios.get(`/metric_meta?metricName=${metricName}&serviceName=${servicename}`);
|
||||
ApiV4Instance.get(
|
||||
`/metric/metric_metadata?metricName=${metricName}&serviceName=${servicename}`,
|
||||
);
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
MetricNameProps,
|
||||
MetricNamesPayloadProps,
|
||||
} from 'types/api/metrics/getMetricName';
|
||||
|
||||
export const getMetricName = async (
|
||||
props: MetricNameProps,
|
||||
): Promise<SuccessResponse<MetricNamesPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`/metrics/autocomplete/list?match=${props || ''}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
@@ -1,6 +1,7 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ApiV3Instance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
TagKeyProps,
|
||||
@@ -8,15 +9,19 @@ import {
|
||||
TagValueProps,
|
||||
TagValuesPayloadProps,
|
||||
} from 'types/api/metrics/getResourceAttributes';
|
||||
import { DataSource, MetricAggregateOperator } from 'types/common/queryBuilder';
|
||||
|
||||
export const getResourceAttributesTagKeys = async (
|
||||
props: TagKeyProps,
|
||||
): Promise<SuccessResponse<TagKeysPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`/metrics/autocomplete/tagKey?metricName=${props.metricName}${
|
||||
props.match ? `&match=${props.match}` : ''
|
||||
}`,
|
||||
`/autocomplete/attribute_keys?${createQueryParams({
|
||||
aggregateOperator: MetricAggregateOperator.RATE,
|
||||
searchText: props.match,
|
||||
dataSource: DataSource.METRICS,
|
||||
aggregateAttribute: props.metricName,
|
||||
})}`,
|
||||
);
|
||||
|
||||
return {
|
||||
@@ -35,7 +40,13 @@ export const getResourceAttributesTagValues = async (
|
||||
): Promise<SuccessResponse<TagValuesPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`/metrics/autocomplete/tagValue?metricName=${props.metricName}&tagKey=${props.tagKey}`,
|
||||
`/autocomplete/attribute_values?${createQueryParams({
|
||||
aggregateOperator: MetricAggregateOperator.RATE,
|
||||
dataSource: DataSource.METRICS,
|
||||
aggregateAttribute: props.metricName,
|
||||
attributeKey: props.tagKey,
|
||||
searchText: '',
|
||||
})}`,
|
||||
);
|
||||
|
||||
return {
|
||||
|
||||
44
frontend/src/api/plannedDowntime/createDowntimeSchedule.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import { Recurrence } from './getAllDowntimeSchedules';
|
||||
|
||||
export interface DowntimeSchedulePayload {
|
||||
name: string;
|
||||
description?: string;
|
||||
alertIds: string[];
|
||||
schedule: {
|
||||
timezone?: string;
|
||||
startTime?: string;
|
||||
endTime?: string;
|
||||
recurrence?: Recurrence;
|
||||
};
|
||||
}
|
||||
|
||||
export interface PayloadProps {
|
||||
status: string;
|
||||
data: string;
|
||||
}
|
||||
|
||||
const createDowntimeSchedule = async (
|
||||
props: DowntimeSchedulePayload,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/downtime_schedules', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default createDowntimeSchedule;
|
||||
19
frontend/src/api/plannedDowntime/deleteDowntimeSchedule.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import axios from 'api';
|
||||
import { useMutation, UseMutationResult } from 'react-query';
|
||||
|
||||
export interface DeleteDowntimeScheduleProps {
|
||||
id?: number;
|
||||
}
|
||||
|
||||
export interface DeleteSchedulePayloadProps {
|
||||
status: string;
|
||||
data: string;
|
||||
}
|
||||
|
||||
export const useDeleteDowntimeSchedule = (
|
||||
props: DeleteDowntimeScheduleProps,
|
||||
): UseMutationResult<DeleteSchedulePayloadProps, Error, number> =>
|
||||
useMutation({
|
||||
mutationKey: [props.id],
|
||||
mutationFn: () => axios.delete(`/downtime_schedules/${props.id}`),
|
||||
});
|
||||
50
frontend/src/api/plannedDowntime/getAllDowntimeSchedules.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import axios from 'api';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { Option } from 'container/PlannedDowntime/DropdownWithSubMenu/DropdownWithSubMenu';
|
||||
import { useQuery, UseQueryResult } from 'react-query';
|
||||
|
||||
export type Recurrence = {
|
||||
startTime?: string | null;
|
||||
endTime?: string | null;
|
||||
duration?: number | string | null;
|
||||
repeatType?: string | Option | null;
|
||||
repeatOn?: string[] | null;
|
||||
};
|
||||
|
||||
type Schedule = {
|
||||
timezone: string | null;
|
||||
startTime: string | null;
|
||||
endTime: string | null;
|
||||
recurrence: Recurrence | null;
|
||||
};
|
||||
|
||||
export interface DowntimeSchedules {
|
||||
id: number;
|
||||
name: string | null;
|
||||
description: string | null;
|
||||
schedule: Schedule | null;
|
||||
alertIds: string[] | null;
|
||||
createdAt: string | null;
|
||||
createdBy: string | null;
|
||||
updatedAt: string | null;
|
||||
updatedBy: string | null;
|
||||
}
|
||||
export type PayloadProps = { data: DowntimeSchedules[] };
|
||||
|
||||
export const getAllDowntimeSchedules = async (
|
||||
props?: GetAllDowntimeSchedulesPayloadProps,
|
||||
): Promise<AxiosResponse<PayloadProps>> =>
|
||||
axios.get('/downtime_schedules', { params: props });
|
||||
|
||||
export interface GetAllDowntimeSchedulesPayloadProps {
|
||||
active?: boolean;
|
||||
recurrence?: boolean;
|
||||
}
|
||||
|
||||
export const useGetAllDowntimeSchedules = (
|
||||
props?: GetAllDowntimeSchedulesPayloadProps,
|
||||
): UseQueryResult<AxiosResponse<PayloadProps>, AxiosError> =>
|
||||
useQuery<AxiosResponse<PayloadProps>, AxiosError>({
|
||||
queryKey: ['getAllDowntimeSchedules', props],
|
||||
queryFn: () => getAllDowntimeSchedules(props),
|
||||
});
|
||||
37
frontend/src/api/plannedDowntime/updateDowntimeSchedule.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import { DowntimeSchedulePayload } from './createDowntimeSchedule';
|
||||
|
||||
export interface DowntimeScheduleUpdatePayload {
|
||||
data: DowntimeSchedulePayload;
|
||||
id?: number;
|
||||
}
|
||||
|
||||
export interface PayloadProps {
|
||||
status: string;
|
||||
data: string;
|
||||
}
|
||||
|
||||
const updateDowntimeSchedule = async (
|
||||
props: DowntimeScheduleUpdatePayload,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.put(`/downtime_schedules/${props.id}`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateDowntimeSchedule;
|
||||
@@ -24,7 +24,7 @@ export const getAggregateAttribute = async ({
|
||||
const response: AxiosResponse<{
|
||||
data: IQueryAutocompleteResponse;
|
||||
}> = await ApiV3Instance.get(
|
||||
`autocomplete/aggregate_attributes?${createQueryParams({
|
||||
`/autocomplete/aggregate_attributes?${createQueryParams({
|
||||
aggregateOperator,
|
||||
searchText,
|
||||
dataSource,
|
||||
|
||||
@@ -25,7 +25,7 @@ export const getAggregateKeys = async ({
|
||||
const response: AxiosResponse<{
|
||||
data: IQueryAutocompleteResponse;
|
||||
}> = await ApiV3Instance.get(
|
||||
`autocomplete/attribute_keys?${createQueryParams({
|
||||
`/autocomplete/attribute_keys?${createQueryParams({
|
||||
aggregateOperator,
|
||||
searchText,
|
||||
dataSource,
|
||||
|
||||
@@ -2,4 +2,4 @@ import axios from 'api';
|
||||
import { DeleteViewPayloadProps } from 'types/api/saveViews/types';
|
||||
|
||||
export const deleteView = (uuid: string): Promise<DeleteViewPayloadProps> =>
|
||||
axios.delete(`explorer/views/${uuid}`);
|
||||
axios.delete(`/explorer/views/${uuid}`);
|
||||
|
||||
@@ -6,4 +6,4 @@ import { DataSource } from 'types/common/queryBuilder';
|
||||
export const getAllViews = (
|
||||
sourcepage: DataSource,
|
||||
): Promise<AxiosResponse<AllViewsProps>> =>
|
||||
axios.get(`explorer/views?sourcePage=${sourcepage}`);
|
||||
axios.get(`/explorer/views?sourcePage=${sourcepage}`);
|
||||
|
||||
@@ -8,7 +8,7 @@ export const saveView = ({
|
||||
viewName,
|
||||
extraData,
|
||||
}: SaveViewProps): Promise<AxiosResponse<SaveViewPayloadProps>> =>
|
||||
axios.post('explorer/views', {
|
||||
axios.post('/explorer/views', {
|
||||
name: viewName,
|
||||
sourcePage,
|
||||
compositeQuery,
|
||||
|
||||
@@ -11,7 +11,7 @@ export const updateView = ({
|
||||
sourcePage,
|
||||
viewKey,
|
||||
}: UpdateViewProps): Promise<UpdateViewPayloadProps> =>
|
||||
axios.put(`explorer/views/${viewKey}`, {
|
||||
axios.put(`/explorer/views/${viewKey}`, {
|
||||
name: viewName,
|
||||
compositeQuery,
|
||||
extraData,
|
||||
|
||||
176
frontend/src/assets/CustomIcons/ApacheIcon.tsx
Normal file
@@ -0,0 +1,176 @@
|
||||
export default function ApacheIcon(): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
width="14"
|
||||
height="14"
|
||||
viewBox="0 0 14 14"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M8.7112 1.05739C8.3796 1.30831 8.08524 1.60498 7.83691 1.93853L8.17977 2.58567C8.40218 2.26279 8.64677 1.95576 8.91177 1.66681C8.93063 1.64581 8.94091 1.63596 8.94091 1.63596L8.91177 1.66681C8.66003 1.95965 8.43081 2.27111 8.22606 2.59853C8.67305 2.56672 9.11808 2.51165 9.55934 2.43353C9.60936 2.25525 9.62374 2.06886 9.60168 1.88502C9.57962 1.70118 9.52154 1.52349 9.43077 1.3621C9.43077 1.3621 9.09991 0.828957 8.7112 1.05739Z"
|
||||
fill="url(#paint0_linear_2061_4195)"
|
||||
/>
|
||||
<path
|
||||
d="M7.55932 6.49365L7.42432 6.51722L7.49332 6.50651C7.51432 6.50265 7.53703 6.49837 7.55932 6.49365Z"
|
||||
fill="#BE202E"
|
||||
/>
|
||||
<path
|
||||
opacity="0.35"
|
||||
d="M7.55932 6.49365L7.42432 6.51722L7.49332 6.50651C7.51432 6.50265 7.53703 6.49837 7.55932 6.49365Z"
|
||||
fill="#BE202E"
|
||||
/>
|
||||
<path
|
||||
d="M7.67407 5.92858L7.6955 5.92558C7.72464 5.9213 7.75336 5.91616 7.78122 5.91016L7.67493 5.92816L7.67407 5.92858Z"
|
||||
fill="#BE202E"
|
||||
/>
|
||||
<path
|
||||
opacity="0.35"
|
||||
d="M7.67407 5.92858L7.6955 5.92558C7.72464 5.9213 7.75336 5.91616 7.78122 5.91016L7.67493 5.92816L7.67407 5.92858Z"
|
||||
fill="#BE202E"
|
||||
/>
|
||||
<path
|
||||
d="M7.16872 4.25736C7.273 4.0625 7.37858 3.87221 7.48543 3.6865C7.59629 3.49393 7.70829 3.3075 7.82143 3.12721L7.84115 3.09507C7.95315 2.91793 8.066 2.74779 8.17972 2.58464L7.83686 1.9375L7.75886 2.03393C7.65986 2.15736 7.55743 2.29107 7.452 2.43036C7.33329 2.58893 7.21115 2.75779 7.08729 2.93564C6.97286 3.09979 6.85672 3.27164 6.74058 3.44993C6.64158 3.60121 6.54258 3.75721 6.444 3.91707L6.43286 3.93507L6.879 4.8145C6.97443 4.62707 7.071 4.44136 7.16872 4.25736Z"
|
||||
fill="url(#paint1_linear_2061_4195)"
|
||||
/>
|
||||
<path
|
||||
d="M5.13606 9.22519C5.07692 9.38748 5.01763 9.55305 4.95821 9.72191L4.95563 9.72919L4.93035 9.80076C4.89049 9.91476 4.85535 10.015 4.77563 10.2503C4.92444 10.3467 5.0416 10.4847 5.11249 10.6472C5.10283 10.4581 5.01907 10.2804 4.87935 10.1526C5.16043 10.1981 5.44862 10.1654 5.71236 10.0581C5.97609 9.95074 6.20521 9.77291 6.37463 9.54405C6.40102 9.50088 6.42464 9.45607 6.44535 9.40991C6.37459 9.49741 6.28141 9.56408 6.17575 9.6028C6.07008 9.64152 5.95589 9.65084 5.84535 9.62976C6.20937 9.49539 6.51802 9.24316 6.72221 8.91319C6.76978 8.83691 6.81563 8.75376 6.86278 8.66162C6.6974 8.84328 6.48756 8.97872 6.25393 9.05463C6.02029 9.13053 5.77091 9.14426 5.53035 9.09448L5.16949 9.13391C5.15706 9.16434 5.14721 9.19476 5.13606 9.22519Z"
|
||||
fill="url(#paint2_linear_2061_4195)"
|
||||
/>
|
||||
<path
|
||||
d="M5.30448 8.417C5.38248 8.21528 5.46276 8.01157 5.54533 7.80586C5.62419 7.60871 5.70519 7.41057 5.78833 7.21143C5.87148 7.01228 5.95633 6.81228 6.0429 6.61143C6.1309 6.40828 6.22076 6.20557 6.31248 6.00328C6.40419 5.801 6.49633 5.60257 6.5889 5.408C6.62262 5.33714 6.65662 5.26643 6.6909 5.19586C6.75005 5.07414 6.80962 4.95343 6.86962 4.83371C6.87262 4.82728 6.87605 4.82086 6.87948 4.81443L6.4329 3.93457L6.41105 3.97014C6.30733 4.14157 6.20362 4.313 6.10205 4.49128C6.00048 4.66957 5.89848 4.853 5.80205 5.03814C5.7189 5.19443 5.63776 5.35157 5.55862 5.50957L5.51148 5.606C5.41419 5.80614 5.32633 5.99943 5.24705 6.18543C5.15705 6.396 5.07762 6.59686 5.00876 6.788C4.9629 6.91357 4.92305 7.03486 4.88362 7.151C4.85233 7.25043 4.82276 7.34986 4.79448 7.451C4.72762 7.68471 4.67048 7.91771 4.62305 8.15L5.07133 9.035C5.13076 8.87671 5.19162 8.71614 5.2539 8.55328L5.30448 8.417Z"
|
||||
fill="url(#paint3_linear_2061_4195)"
|
||||
/>
|
||||
<path
|
||||
d="M4.615 8.18082C4.55868 8.46014 4.51975 8.74268 4.49843 9.02682C4.49843 9.03668 4.49843 9.04653 4.49629 9.05639C4.3547 8.8778 4.1801 8.72808 3.982 8.61539C4.24526 8.95089 4.41819 9.34823 4.48429 9.76953C4.28982 9.78674 4.0942 9.75338 3.91643 9.67268C4.05057 9.80984 4.21714 9.91096 4.40072 9.96668C4.14974 10.0146 3.9168 10.1307 3.72743 10.3022C3.97375 10.178 4.25059 10.1271 4.525 10.1557C4.21858 11.024 3.91129 11.9827 3.604 13.0001C3.64664 12.988 3.6856 12.9655 3.71739 12.9346C3.74918 12.9037 3.7728 12.8654 3.78615 12.8231C3.841 12.6388 4.20443 11.4298 4.77443 9.84068L4.82372 9.70439L4.83743 9.66625C4.89772 9.49968 4.96015 9.32968 5.02472 9.15625L5.06758 9.03753V9.03539L4.62143 8.15039C4.61929 8.16025 4.61672 8.17053 4.615 8.18082Z"
|
||||
fill="url(#paint4_linear_2061_4195)"
|
||||
/>
|
||||
<path
|
||||
d="M6.94843 4.89059L6.90986 4.96987C6.87129 5.04959 6.83214 5.13102 6.79243 5.21416C6.74957 5.30416 6.70671 5.3963 6.66386 5.49059C6.64157 5.53816 6.621 5.58573 6.59743 5.63416C6.53057 5.7793 6.46286 5.92916 6.39429 6.08373C6.30857 6.27402 6.22286 6.47173 6.13714 6.67687C6.054 6.87259 5.96971 7.07516 5.88429 7.28459C5.80314 7.48459 5.721 7.68973 5.63786 7.90002C5.56386 8.08888 5.48914 8.28273 5.41371 8.48159C5.40986 8.49145 5.40643 8.50087 5.403 8.51073C5.32814 8.70873 5.25257 8.91187 5.17629 9.12016L5.17114 9.1343L5.532 9.09487L5.51057 9.09102C6.039 8.98351 6.52026 8.7126 6.88629 8.31659C7.0686 8.1182 7.22683 7.89896 7.35771 7.66345C7.47147 7.46033 7.57252 7.25036 7.66029 7.03473C7.74386 6.83245 7.824 6.61387 7.90114 6.37645C7.79448 6.43086 7.68084 6.47037 7.56343 6.49388C7.54157 6.49859 7.52057 6.50287 7.49657 6.50673C7.47257 6.51059 7.45071 6.51445 7.42757 6.51745C7.80325 6.36305 8.10458 6.06924 8.26843 5.69759C8.12155 5.79773 7.95733 5.86966 7.78414 5.90973C7.75586 5.91616 7.72757 5.92087 7.69843 5.92516L7.677 5.92816C7.80484 5.87656 7.92565 5.80903 8.03657 5.72716C8.05843 5.71087 8.07943 5.69373 8.1 5.67573C8.13129 5.64873 8.16086 5.62045 8.18914 5.59002C8.20714 5.57116 8.22471 5.55145 8.24186 5.53087C8.28293 5.48179 8.32059 5.42996 8.35457 5.37573C8.36529 5.35859 8.376 5.34145 8.38629 5.32345C8.39957 5.29773 8.41243 5.27245 8.42486 5.24716C8.481 5.13402 8.526 5.03288 8.56157 4.94459C8.57957 4.90173 8.595 4.85888 8.60829 4.82116C8.61386 4.80616 8.619 4.79116 8.62371 4.7783C8.63786 4.73545 8.64943 4.69816 8.65843 4.66473C8.66941 4.62595 8.67828 4.58661 8.685 4.54688C8.67015 4.5586 8.65454 4.56934 8.63829 4.57902C8.4822 4.66169 8.31419 4.71953 8.14029 4.75045H8.13257L8.08157 4.75859L8.09057 4.75473L6.957 4.87902L6.94843 4.89059Z"
|
||||
fill="url(#paint5_linear_2061_4195)"
|
||||
/>
|
||||
<path
|
||||
d="M8.22475 2.59871C8.12404 2.75343 8.01389 2.92914 7.89561 3.12843L7.87675 3.16014C7.77446 3.33157 7.66604 3.52114 7.55147 3.72886C7.45261 3.90771 7.34989 4.10014 7.24332 4.30614C7.15018 4.48586 7.05418 4.67671 6.95532 4.87871L8.08889 4.75443C8.33914 4.65567 8.55501 4.48583 8.70989 4.26586C8.74804 4.211 8.78618 4.15357 8.82432 4.09443C8.94089 3.91271 9.05489 3.71257 9.15689 3.51371C9.25018 3.33322 9.33429 3.14813 9.40889 2.95914C9.44758 2.86102 9.48092 2.76087 9.50875 2.65914C9.52932 2.58028 9.54561 2.50571 9.55804 2.43457C9.11675 2.51236 8.67172 2.56715 8.22475 2.59871Z"
|
||||
fill="url(#paint6_linear_2061_4195)"
|
||||
/>
|
||||
<path
|
||||
d="M7.49234 6.50635C7.46963 6.5102 7.44648 6.51406 7.42334 6.51706C7.44648 6.51406 7.47134 6.5102 7.49234 6.50635Z"
|
||||
fill="#BE202E"
|
||||
/>
|
||||
<path
|
||||
opacity="0.35"
|
||||
d="M7.49234 6.50635C7.46963 6.5102 7.44648 6.51406 7.42334 6.51706C7.44648 6.51406 7.47134 6.5102 7.49234 6.50635Z"
|
||||
fill="#BE202E"
|
||||
/>
|
||||
<path
|
||||
d="M7.49234 6.50635C7.46963 6.5102 7.44648 6.51406 7.42334 6.51706C7.44648 6.51406 7.47134 6.5102 7.49234 6.50635Z"
|
||||
fill="url(#paint7_linear_2061_4195)"
|
||||
/>
|
||||
<defs>
|
||||
<linearGradient
|
||||
id="paint0_linear_2061_4195"
|
||||
x1="7.26579"
|
||||
y1="1.16584"
|
||||
x2="9.7782"
|
||||
y2="0.46843"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop stopColor="#F69923" />
|
||||
<stop offset="0.312" stopColor="#F79A23" />
|
||||
<stop offset="0.838" stopColor="#E97826" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint1_linear_2061_4195"
|
||||
x1="1.76109"
|
||||
y1="12.4382"
|
||||
x2="6.87606"
|
||||
y2="1.48267"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.323" stopColor="#9E2064" />
|
||||
<stop offset="0.63" stopColor="#C92037" />
|
||||
<stop offset="0.751" stopColor="#CD2335" />
|
||||
<stop offset="1" stopColor="#E97826" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint2_linear_2061_4195"
|
||||
x1="3.47784"
|
||||
y1="11.6287"
|
||||
x2="6.5258"
|
||||
y2="5.10046"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop stopColor="#282662" />
|
||||
<stop offset="0.095" stopColor="#662E8D" />
|
||||
<stop offset="0.788" stopColor="#9F2064" />
|
||||
<stop offset="0.949" stopColor="#CD2032" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint3_linear_2061_4195"
|
||||
x1="1.94596"
|
||||
y1="11.7748"
|
||||
x2="7.06094"
|
||||
y2="0.819304"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.323" stopColor="#9E2064" />
|
||||
<stop offset="0.63" stopColor="#C92037" />
|
||||
<stop offset="0.751" stopColor="#CD2335" />
|
||||
<stop offset="1" stopColor="#E97826" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint4_linear_2061_4195"
|
||||
x1="2.46768"
|
||||
y1="11.0455"
|
||||
x2="5.15578"
|
||||
y2="5.28798"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop stopColor="#282662" />
|
||||
<stop offset="0.095" stopColor="#662E8D" />
|
||||
<stop offset="0.788" stopColor="#9F2064" />
|
||||
<stop offset="0.949" stopColor="#CD2032" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint5_linear_2061_4195"
|
||||
x1="3.08048"
|
||||
y1="12.3044"
|
||||
x2="8.19546"
|
||||
y2="1.3489"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.323" stopColor="#9E2064" />
|
||||
<stop offset="0.63" stopColor="#C92037" />
|
||||
<stop offset="0.751" stopColor="#CD2335" />
|
||||
<stop offset="1" stopColor="#E97826" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint6_linear_2061_4195"
|
||||
x1="2.70679"
|
||||
y1="12.9581"
|
||||
x2="7.82195"
|
||||
y2="2.00223"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.323" stopColor="#9E2064" />
|
||||
<stop offset="0.63" stopColor="#C92037" />
|
||||
<stop offset="0.751" stopColor="#CD2335" />
|
||||
<stop offset="1" stopColor="#E97826" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint7_linear_2061_4195"
|
||||
x1="3.41759"
|
||||
y1="12.4619"
|
||||
x2="8.53277"
|
||||
y2="1.50629"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.323" stopColor="#9E2064" />
|
||||
<stop offset="0.63" stopColor="#C92037" />
|
||||
<stop offset="0.751" stopColor="#CD2335" />
|
||||
<stop offset="1" stopColor="#E97826" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
28
frontend/src/assets/CustomIcons/DockerIcon.tsx
Normal file
@@ -0,0 +1,28 @@
|
||||
export default function DockerIcon(): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
width="14"
|
||||
height="14"
|
||||
viewBox="0 0 14 14"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<g clipPath="url(#clip0_2061_4220)">
|
||||
<path
|
||||
d="M13.574 5.82107C13.2652 5.60679 12.5575 5.52644 12.0042 5.63358C11.9398 5.09788 11.6439 4.62915 11.1292 4.21399L10.8332 3.99971L10.6274 4.30774C10.37 4.70951 10.2413 5.27198 10.2799 5.80768C10.2928 5.99517 10.3571 6.32998 10.5502 6.62461C10.37 6.73175 9.99686 6.86567 9.50789 6.86567H0.204685L0.17895 6.97281C0.0888774 7.5085 0.0888774 9.18254 1.14401 10.4682C1.9418 11.4458 3.12561 11.9414 4.68258 11.9414C8.05386 11.9414 10.5502 10.3209 11.7211 7.38797C12.1843 7.40136 13.1751 7.38797 13.677 6.38354C13.6898 6.35676 13.7156 6.30319 13.8056 6.10231L13.8571 5.99517L13.574 5.82107ZM7.6421 2.04443H6.22668V3.38367H7.6421V2.04443ZM7.6421 3.65151H6.22668V4.99074H7.6421V3.65151ZM5.96933 3.65151H4.5539V4.99074H5.96933V3.65151ZM4.29655 3.65151H2.88113V4.99074H4.29655V3.65151ZM2.62378 5.25859H1.20835V6.59782H2.62378V5.25859ZM4.29655 5.25859H2.88113V6.59782H4.29655V5.25859ZM5.96933 5.25859H4.5539V6.59782H5.96933V5.25859ZM7.6421 5.25859H6.22668V6.59782H7.6421V5.25859ZM9.31488 5.25859H7.89945V6.59782H9.31488V5.25859Z"
|
||||
fill="#2396ED"
|
||||
/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_2061_4220">
|
||||
<rect
|
||||
width="13.7143"
|
||||
height="13.7143"
|
||||
fill="white"
|
||||
transform="translate(0.142822 0.143066)"
|
||||
/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
36
frontend/src/assets/CustomIcons/ElasticSearchIcon.tsx
Normal file
@@ -0,0 +1,36 @@
|
||||
export default function ElasticSearchIcon(): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M6.38041 6.94508L9.70241 8.45858L13.0524 5.52158C13.1019 5.27892 13.1255 5.03171 13.1229 4.78408C13.1236 3.9846 12.8681 3.20592 12.3941 2.56216C11.92 1.9184 11.2522 1.44339 10.4885 1.20675C9.72487 0.970101 8.9055 0.984259 8.15047 1.24714C7.39544 1.51003 6.74446 2.00782 6.29291 2.66758L5.73291 5.56008L6.38041 6.94508Z"
|
||||
fill="#FED10A"
|
||||
/>
|
||||
<path
|
||||
d="M2.943 10.4593C2.89356 10.7062 2.86994 10.9575 2.8725 11.2093C2.87361 12.012 3.13177 12.7932 3.60915 13.4385C4.08654 14.0838 4.75804 14.5592 5.52528 14.7952C6.29252 15.0311 7.11514 15.0151 7.87262 14.7495C8.63009 14.4839 9.28259 13.9827 9.7345 13.3193L10.2845 10.4398L9.55 9.02928L6.215 7.50928L2.943 10.4593Z"
|
||||
fill="#24BBB1"
|
||||
/>
|
||||
<path
|
||||
d="M2.92394 4.71302L5.19994 5.25002L5.69994 2.66552C5.39077 2.43015 5.01365 2.30134 4.62509 2.29839C4.23654 2.29544 3.8575 2.41852 3.54479 2.64916C3.23208 2.8798 3.00256 3.20559 2.89063 3.57768C2.77869 3.94978 2.79039 4.34813 2.92394 4.71302Z"
|
||||
fill="#EF5098"
|
||||
/>
|
||||
<path
|
||||
d="M2.72503 5.2583C2.23266 5.42016 1.80253 5.73058 1.49379 6.14687C1.18505 6.56317 1.01286 7.0649 1.00091 7.58305C0.988962 8.1012 1.13784 8.61033 1.42706 9.04041C1.71628 9.4705 2.13164 9.80042 2.61603 9.9848L5.81003 7.0998L5.22653 5.8498L2.72503 5.2583Z"
|
||||
fill="#17A8E0"
|
||||
/>
|
||||
<path
|
||||
d="M10.312 13.3197C10.6209 13.5543 10.9975 13.6825 11.3853 13.6851C11.7732 13.6877 12.1514 13.5646 12.4634 13.3342C12.7755 13.1038 13.0044 12.7785 13.116 12.407C13.2276 12.0356 13.2159 11.6379 13.0825 11.2737L10.812 10.7412L10.312 13.3197Z"
|
||||
fill="#93C83E"
|
||||
/>
|
||||
<path
|
||||
d="M10.7735 10.145L13.2735 10.7285C13.7666 10.5672 14.1976 10.257 14.507 9.84058C14.8165 9.42415 14.9891 8.922 15.0013 8.40333C15.0134 7.88467 14.8643 7.375 14.5747 6.94457C14.2851 6.51414 13.8691 6.18413 13.384 6L10.1135 8.8665L10.7735 10.145Z"
|
||||
fill="#0779A1"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
18
frontend/src/assets/CustomIcons/GrafanaIcon.tsx
Normal file
27
frontend/src/assets/CustomIcons/HerokuIcon.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
function HerokuIcon(): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<g clipPath="url(#clip0_2061_4245)">
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M13.6285 0H2.10787C1.31283 0 0.666748 0.644312 0.666748 1.44112V14.5602C0.666748 15.3557 1.31283 16 2.10787 16H13.6285C14.4237 16 15.0678 15.3557 15.0678 14.5602V1.44112C15.0678 0.644312 14.4237 0 13.6285 0ZM14.2677 14.5602C14.2677 14.9135 13.9815 15.1994 13.6285 15.1994H2.10787C1.75506 15.1994 1.46688 14.9135 1.46688 14.5602V1.44112C1.46688 1.08654 1.75506 0.800133 2.10787 0.800133H13.6285C13.9815 0.800133 14.2677 1.08654 14.2677 1.44112V14.5602ZM4.26699 13.6009L6.06823 12.0002L4.26699 10.3999V13.6009ZM10.7719 7.11485C10.4481 6.78927 9.8569 6.40038 8.86819 6.40038C7.78342 6.40038 6.66611 6.68302 5.86752 6.94177V2.40082H4.26704V9.33907L5.39807 8.82667C5.41688 8.81826 7.24025 8.00086 8.86819 8.00086C9.68027 8.00086 9.86022 8.44796 9.86885 8.82158V13.6009H11.4676V8.801C11.4693 8.6983 11.4592 7.81051 10.7719 7.11485ZM8.66761 5.00027H10.2681C10.9912 4.1811 11.3597 3.30903 11.4677 2.40066H9.86881C9.69063 3.30726 9.29643 4.17424 8.66761 5.00027Z"
|
||||
fill="#6762A6"
|
||||
/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_2061_4245">
|
||||
<rect width="16" height="16" fill="white" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
export default HerokuIcon;
|
||||
82
frontend/src/assets/CustomIcons/JuiceBoxIcon.tsx
Normal file
@@ -0,0 +1,82 @@
|
||||
function JuiceBoxIcon(): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M7.15412 6.3041L3.80207 4.64419C3.80207 4.64419 3.79429 4.48531 3.94429 4.40531C4.09539 4.32532 6.47082 3.35204 6.66192 3.35204C6.85302 3.35204 7.97185 3.56758 8.78625 3.74868C9.60065 3.92978 11.7528 4.39198 11.7628 4.53308C11.7728 4.67419 9.56066 6.93629 9.56066 6.93629L7.15412 6.3041Z"
|
||||
fill="#C3FECE"
|
||||
/>
|
||||
<path
|
||||
d="M10.2117 5.2686C10.2117 5.2686 8.80626 4.85529 7.43855 4.53419C6.80192 4.3842 4.93091 3.992 4.93091 3.992C4.93091 3.992 5.12979 3.90534 5.27978 3.84201C5.43199 3.77868 5.5531 3.74535 5.5531 3.74535C5.5531 3.74535 6.77303 3.98867 7.67965 4.18199C8.90625 4.44309 10.6228 4.95528 10.6628 5.03639C10.7039 5.11639 10.2117 5.2686 10.2117 5.2686Z"
|
||||
fill="white"
|
||||
/>
|
||||
<path
|
||||
d="M7.48978 4.87973C7.19091 5.0875 7.48423 5.21305 8.03531 5.34193C8.63861 5.48303 9.09858 5.6908 9.48634 5.46526C9.83632 5.26304 8.9797 5.04306 8.58639 4.95862C8.28641 4.89418 7.72088 4.71974 7.48978 4.87973Z"
|
||||
fill="#ACB1B2"
|
||||
/>
|
||||
<path
|
||||
d="M8.58521 5.29749C8.58299 5.29749 8.59521 2.86317 8.59521 2.77317C8.59521 2.68318 8.49411 2.60207 8.60521 2.41097C8.71632 2.21987 9.00741 2.23987 9.00741 2.23987C9.00741 2.23987 10.2151 1.88878 10.6973 1.76767C11.1795 1.64657 12.2461 1.36547 12.2461 1.36547L12.3172 1.97544C12.3172 1.97544 11.1995 2.29098 10.6462 2.43208C10.0929 2.57319 9.33851 2.80428 9.33851 2.80428L9.2274 2.90539L9.25962 5.28415C9.25962 5.28415 9.1563 5.3997 8.9252 5.3997C8.70076 5.39859 8.58521 5.29749 8.58521 5.29749Z"
|
||||
fill="#FFD816"
|
||||
/>
|
||||
<path
|
||||
d="M12.0472 1.67768C12.0561 1.86767 12.1572 1.98544 12.2772 1.98544C12.3972 1.98544 12.4794 1.83211 12.4616 1.63212C12.4427 1.43325 12.3138 1.34214 12.2038 1.3788C12.0939 1.41547 12.0405 1.5388 12.0472 1.67768Z"
|
||||
fill="#FEB804"
|
||||
/>
|
||||
<path
|
||||
d="M9.22962 2.96094C9.20851 2.96094 9.1874 2.95539 9.16851 2.94206L8.59187 2.55763C8.54076 2.52319 8.52743 2.4543 8.56076 2.4032C8.59521 2.35209 8.66298 2.33764 8.7152 2.37209L9.29184 2.75651C9.34294 2.79095 9.35628 2.85984 9.32295 2.91095C9.30072 2.94317 9.26517 2.96094 9.22962 2.96094Z"
|
||||
fill="#FEB804"
|
||||
/>
|
||||
<path
|
||||
d="M9.31847 2.86761C9.28514 2.86761 9.25181 2.85206 9.22959 2.82317C9.08182 2.62651 8.91294 2.40319 8.88294 2.36875C8.84516 2.33208 8.83739 2.27209 8.86739 2.22653C8.90183 2.17542 8.9696 2.16098 9.02182 2.19542C9.04182 2.20876 9.05293 2.21653 9.40736 2.68873C9.44402 2.73762 9.41069 2.80761 9.3618 2.84428C9.34181 2.85983 9.34181 2.86761 9.31847 2.86761Z"
|
||||
fill="#FEB804"
|
||||
/>
|
||||
<path
|
||||
d="M9.18298 3.05427C9.17742 3.05427 9.17187 3.05427 9.1652 3.05316L8.60968 2.96539C8.54968 2.95539 8.50746 2.89872 8.51746 2.83873C8.52746 2.77873 8.58301 2.73651 8.64412 2.74651L9.19965 2.83428C9.25964 2.84428 9.31186 2.85539 9.30186 2.9165C9.29408 2.96983 9.23742 3.05427 9.18298 3.05427Z"
|
||||
fill="#FEB804"
|
||||
/>
|
||||
<path
|
||||
d="M3.79761 4.63197C3.97426 4.63197 5.39197 5.00417 6.12304 5.13861C6.85411 5.27304 9.06177 5.79524 9.13621 5.80968C9.21066 5.82413 9.19621 6.77963 9.19621 6.77963C9.19621 6.77963 10.2406 12.1793 10.2106 12.4338C10.1806 12.6871 9.49508 14.6414 9.49508 14.6414C9.49508 14.6414 9.09177 14.7014 8.06294 14.4026C7.0341 14.1037 4.31869 13.3582 4.15426 13.1493C3.98982 12.9404 4.09426 10.1806 4.00537 8.48065C3.91427 6.77963 3.79761 4.63197 3.79761 4.63197Z"
|
||||
fill="#79DD8A"
|
||||
/>
|
||||
<path
|
||||
d="M12.0294 13.2682C12.086 13.0804 11.7705 11.0672 11.7261 9.21728C11.6816 7.36849 11.8405 4.5553 11.7638 4.53308C11.6872 4.51197 9.13621 5.80968 9.13621 5.80968C9.13621 5.80968 9.1251 7.64514 9.15621 9.37616C9.1962 11.616 9.37508 14.5814 9.49396 14.6414C9.61285 14.7014 10.645 14.1237 10.9561 13.9548C11.5072 13.6559 11.9849 13.4182 12.0294 13.2682Z"
|
||||
fill="#02AB46"
|
||||
/>
|
||||
<path
|
||||
d="M10.204 6.65964L10.2095 5.26638L10.6662 5.04195L10.6795 6.56964L10.4684 6.93962L10.204 6.65964Z"
|
||||
fill="#DBDFE1"
|
||||
/>
|
||||
<path
|
||||
d="M6.16088 7.1485C5.17427 7.04295 4.49097 7.9229 4.5443 9.06951C4.60763 10.4239 5.4998 11.4216 6.46531 11.6205C7.43081 11.8194 8.36632 11.4005 8.38632 9.91947C8.40854 8.37621 7.05194 7.24294 6.16088 7.1485Z"
|
||||
fill="#FEFEFD"
|
||||
/>
|
||||
<path
|
||||
d="M6.81861 8.3851C6.81861 8.3851 6.61862 7.71292 6.01754 7.89179C5.41646 8.07067 5.09092 9.37838 6.11087 9.96724C7.09082 10.5328 7.99299 9.49504 7.67745 8.82286C7.3908 8.20956 6.81861 8.3851 6.81861 8.3851Z"
|
||||
fill="#EF5B44"
|
||||
/>
|
||||
<path
|
||||
d="M6.58968 7.56292C6.52191 7.58737 6.46858 7.86402 6.53968 8.12845C6.59302 8.32622 6.76301 8.59731 6.86189 8.57954C6.94077 8.56509 7.00077 8.254 6.92633 7.9929C6.83967 7.69181 6.66857 7.53404 6.58968 7.56292Z"
|
||||
fill="#B8CF17"
|
||||
/>
|
||||
<path
|
||||
d="M6.57531 9.12284C6.2931 9.03729 5.97534 10.0072 6.54642 10.6116C7.17639 11.2772 7.9119 10.6905 7.89191 10.5617C7.86302 10.3828 7.36971 10.2539 7.12639 9.9539C6.88307 9.6517 6.73974 9.17284 6.57531 9.12284Z"
|
||||
fill="#FD8F01"
|
||||
/>
|
||||
<path
|
||||
d="M5.67307 8.9584C5.67307 8.9584 5.72973 8.53398 5.33642 8.55731C4.95644 8.58065 5.04977 9.02951 5.04977 9.02951C5.04977 9.02951 4.73312 9.06729 4.78645 9.41838C4.82645 9.68392 5.10643 9.68059 5.10643 9.68059C5.10643 9.68059 4.79867 9.80169 4.93866 10.1528C5.0631 10.465 5.34975 10.3394 5.34975 10.3394C5.34975 10.3394 5.2442 10.6439 5.49308 10.805C5.70307 10.9405 5.88639 10.8261 5.88639 10.8261C5.88639 10.8261 5.87528 11.0861 6.16526 11.1338C6.51636 11.1916 6.66857 10.7639 6.41858 10.5661C6.22859 10.4161 6.05638 10.5328 6.05638 10.5328C6.05638 10.5328 6.09193 10.4494 6.0586 10.3539C6.03638 10.2917 6.00083 10.2683 6.00083 10.2683C6.00083 10.2683 6.30859 10.2394 6.25859 9.88947C6.2086 9.53837 5.90083 9.5817 5.90083 9.5817C5.90083 9.5817 6.07971 9.43838 6.00861 9.18061C5.93861 8.92174 5.67307 8.9584 5.67307 8.9584Z"
|
||||
fill="#A281D0"
|
||||
/>
|
||||
<path
|
||||
d="M10.5205 7.04739C10.265 7.04739 9.17503 5.96412 9.16503 5.95301C9.12504 5.90634 9.13059 5.83635 9.17726 5.79635C9.22392 5.75635 9.29281 5.76191 9.33391 5.80746C9.40391 5.88523 10.2372 6.65853 10.4772 6.80741C10.6327 6.5752 11.1383 5.55858 11.5727 4.64085C11.5994 4.5853 11.666 4.56197 11.7205 4.58752C11.776 4.61419 11.7993 4.67974 11.7738 4.73529C10.7027 6.99851 10.5972 7.02851 10.5405 7.04406C10.5339 7.0474 10.5272 7.04739 10.5205 7.04739Z"
|
||||
fill="#2D802D"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
export default JuiceBoxIcon;
|
||||
22
frontend/src/assets/CustomIcons/KubernetesIcon.tsx
Normal file
38
frontend/src/assets/CustomIcons/MagicBallIcon.tsx
Normal file
@@ -0,0 +1,38 @@
|
||||
function MagicBallIcon(): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M13.2883 12.3928C14.4283 11.0451 15.4015 8.62412 14.396 5.84427C13.9061 4.49101 13.3528 3.73549 12.7084 3.25552C12.3162 2.96331 10.7496 2.2278 8.67524 2.54889C7.15865 2.78443 5.1332 3.74105 3.8166 5.29763C2.54 6.80978 1.5545 8.00416 1.50895 9.1641C1.45006 10.6562 2.57445 11.9384 2.71111 12.1462C2.95443 12.515 4.61434 14.6238 7.73306 14.7316C10.4862 14.826 12.2795 13.5861 13.2883 12.3928Z"
|
||||
fill="#403D3E"
|
||||
/>
|
||||
<path
|
||||
d="M4.04763 2.43331C2.61104 3.45881 0.996679 5.55647 1.28666 8.49854C1.42777 9.93069 1.77997 10.7995 2.2855 11.4228C2.59326 11.8028 3.92875 12.9328 6.02086 13.0994C8.12964 13.2672 9.73288 12.795 11.4072 11.6306C14.8104 9.26295 14.3093 5.68313 14.1638 5.26649C14.0182 4.84984 12.9283 2.39664 9.93176 1.52446C7.28746 0.755617 5.31867 1.52446 4.04763 2.43331Z"
|
||||
fill="#5E6367"
|
||||
/>
|
||||
<path
|
||||
d="M6.57189 2.63219C4.99308 2.57553 3.48427 3.81213 3.33872 5.33871C3.19318 6.86419 4.13757 8.02635 5.6086 8.263C7.07964 8.49855 8.78066 7.60526 9.12286 5.73425C9.47618 3.80657 8.07958 2.68663 6.57189 2.63219Z"
|
||||
fill="white"
|
||||
/>
|
||||
<path
|
||||
d="M7.05971 5.24541C7.05971 5.24541 7.43969 5.16653 7.51524 4.60655C7.58968 4.05547 7.31636 3.5855 6.67862 3.41662C5.98532 3.23329 5.51535 3.61438 5.39313 4.01547C5.22314 4.57322 5.47424 4.83876 5.47424 4.83876C5.47424 4.83876 4.79427 5.00209 4.73983 5.80427C4.68872 6.5609 5.20536 6.96754 5.72422 7.09198C6.3653 7.24642 7.09193 7.07087 7.2697 6.25313C7.41747 5.57984 7.05971 5.24541 7.05971 5.24541Z"
|
||||
fill="#303030"
|
||||
/>
|
||||
<path
|
||||
d="M5.99081 4.22544C5.92971 4.45543 6.05192 4.67764 6.29191 4.73874C6.55079 4.8043 6.78633 4.71875 6.84966 4.45431C6.90521 4.21988 6.79411 4.01323 6.52079 3.94656C6.29635 3.89101 6.05748 3.97434 5.99081 4.22544Z"
|
||||
fill="white"
|
||||
/>
|
||||
<path
|
||||
d="M6.18643 5.36871C5.89533 5.27871 5.51091 5.39093 5.4498 5.78202C5.38869 6.17311 5.62313 6.3731 5.92978 6.42865C6.23643 6.4842 6.52641 6.3231 6.58308 6.00978C6.63863 5.69758 6.47641 5.45759 6.18643 5.36871Z"
|
||||
fill="white"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
export default MagicBallIcon;
|
||||
68
frontend/src/assets/CustomIcons/MongoDBIcon.tsx
Normal file
@@ -0,0 +1,68 @@
|
||||
export default function MongoDBIcon(): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
width="14"
|
||||
height="14"
|
||||
viewBox="0 0 14 14"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M7.26568 13.0004L6.94382 12.8937C6.94382 12.8937 6.98668 11.2651 6.39739 11.1507C6.01168 10.7015 6.45439 -8.02403 7.86439 11.0868C7.59687 11.2225 7.39217 11.4564 7.29311 11.7395C7.24001 12.1577 7.23082 12.5803 7.26568 13.0004Z"
|
||||
fill="url(#paint0_linear_2061_4238)"
|
||||
/>
|
||||
<path
|
||||
d="M7.43957 11.4272C8.29654 10.7821 8.95282 9.90701 9.33214 8.90369C9.71147 7.90037 9.79826 6.81 9.58243 5.75931C8.95243 2.98002 7.46058 2.06631 7.29986 1.71745C7.1612 1.50022 7.04284 1.27068 6.94629 1.03174L7.065 8.7756C7.065 8.7756 6.819 11.1422 7.43957 11.4272Z"
|
||||
fill="url(#paint1_linear_2061_4238)"
|
||||
/>
|
||||
<path
|
||||
d="M6.78015 11.5296C6.78015 11.5296 4.15687 9.74286 4.30858 6.58214C4.32273 5.62928 4.54121 4.69054 4.94926 3.82935C5.3573 2.96816 5.94542 2.20456 6.67387 1.59014C6.759 1.51782 6.82663 1.42715 6.87168 1.32493C6.91674 1.22272 6.93805 1.11163 6.93401 1C7.0973 1.35143 7.07072 6.247 7.08787 6.81957C7.1543 9.04686 6.96401 11.1091 6.78015 11.5296Z"
|
||||
fill="url(#paint2_linear_2061_4238)"
|
||||
/>
|
||||
<defs>
|
||||
<linearGradient
|
||||
id="paint0_linear_2061_4238"
|
||||
x1="5.1021"
|
||||
y1="7.10853"
|
||||
x2="8.80138"
|
||||
y2="8.36386"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.231" stopColor="#999875" />
|
||||
<stop offset="0.563" stopColor="#9B9977" />
|
||||
<stop offset="0.683" stopColor="#A09F7E" />
|
||||
<stop offset="0.768" stopColor="#A9A889" />
|
||||
<stop offset="0.837" stopColor="#B7B69A" />
|
||||
<stop offset="0.896" stopColor="#C9C7B0" />
|
||||
<stop offset="0.948" stopColor="#DEDDCB" />
|
||||
<stop offset="0.994" stopColor="#F8F6EB" />
|
||||
<stop offset="1" stopColor="#FBF9EF" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint1_linear_2061_4238"
|
||||
x1="6.45855"
|
||||
y1="0.976404"
|
||||
x2="8.09399"
|
||||
y2="11.1888"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop stopColor="#48A547" />
|
||||
<stop offset="1" stopColor="#3F9143" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint2_linear_2061_4238"
|
||||
x1="4.08293"
|
||||
y1="6.89501"
|
||||
x2="8.47176"
|
||||
y2="5.42519"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop stopColor="#41A247" />
|
||||
<stop offset="0.352" stopColor="#4BA74B" />
|
||||
<stop offset="0.956" stopColor="#67B554" />
|
||||
<stop offset="1" stopColor="#69B655" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||