Compare commits
248 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7554bce11c | ||
|
|
9a6fcb6b1d | ||
|
|
8ef4c0bcdd | ||
|
|
ded2c98167 | ||
|
|
22d4c53a43 | ||
|
|
7a4156a3b7 | ||
|
|
8844144c01 | ||
|
|
f8ec850670 | ||
|
|
ee76cf6294 | ||
|
|
2bf534b56f | ||
|
|
c37d6c3785 | ||
|
|
17c61a61ec | ||
|
|
ec7c99dd26 | ||
|
|
03220fcf11 | ||
|
|
233589b867 | ||
|
|
0946bcd5fc | ||
|
|
1fc0461c0f | ||
|
|
86b725757c | ||
|
|
b6004ce157 | ||
|
|
87c244ccfa | ||
|
|
557ebcec79 | ||
|
|
6363c71442 | ||
|
|
74c4a36e26 | ||
|
|
abb8b2b122 | ||
|
|
c35f2ec0aa | ||
|
|
3b22698e35 | ||
|
|
900752b6e2 | ||
|
|
f47c23032c | ||
|
|
7ad489ebb4 | ||
|
|
3d03ad52b1 | ||
|
|
37349786f1 | ||
|
|
c6ac8df707 | ||
|
|
ae3d4fece8 | ||
|
|
d63ae429bd | ||
|
|
171cea14e2 | ||
|
|
3d5dc05c08 | ||
|
|
d14bd7386e | ||
|
|
afd893b8b5 | ||
|
|
10141a207b | ||
|
|
daebea701d | ||
|
|
7fb29ad2ee | ||
|
|
e61b0a4d67 | ||
|
|
ff392ba883 | ||
|
|
9aa1fb366a | ||
|
|
49b456cc7f | ||
|
|
ed65c92fc7 | ||
|
|
558352e43b | ||
|
|
dd1e845095 | ||
|
|
db4101e795 | ||
|
|
b339f0509b | ||
|
|
668f0c6e2b | ||
|
|
d4024d1af9 | ||
|
|
355a297795 | ||
|
|
aada9060da | ||
|
|
5bf64053b7 | ||
|
|
f1fff4ca0c | ||
|
|
562621a117 | ||
|
|
68ab022836 | ||
|
|
2cdafa0564 | ||
|
|
fafa6f9960 | ||
|
|
54738432af | ||
|
|
23e6902921 | ||
|
|
348fa0ba5e | ||
|
|
1a3e46cecd | ||
|
|
4397c53494 | ||
|
|
fabdf87ed1 | ||
|
|
5f89e84eaf | ||
|
|
7493193e66 | ||
|
|
378fdb522e | ||
|
|
bc4a4edc7f | ||
|
|
5c83d133a2 | ||
|
|
5469cd34fa | ||
|
|
a2c03243cb | ||
|
|
4753868298 | ||
|
|
203eef8cde | ||
|
|
b915f9ef7b | ||
|
|
2c5c972801 | ||
|
|
872759f579 | ||
|
|
e86e045a28 | ||
|
|
7b85ece796 | ||
|
|
127ccdacb4 | ||
|
|
cac637ac88 | ||
|
|
b9409820cc | ||
|
|
e6fa1383f3 | ||
|
|
59deac01bd | ||
|
|
55f49c38c7 | ||
|
|
7e220a9f61 | ||
|
|
ace2d8a3b3 | ||
|
|
c4ce057d7a | ||
|
|
ef0e63c35b | ||
|
|
765153caa8 | ||
|
|
6d7081a4bd | ||
|
|
f1818235dc | ||
|
|
c321a1741f | ||
|
|
a235beae36 | ||
|
|
98a2ef4080 | ||
|
|
5a2a987a9b | ||
|
|
7822b4efee | ||
|
|
8f1451e154 | ||
|
|
07833b9859 | ||
|
|
0de40a889d | ||
|
|
b3a6deb71b | ||
|
|
b5af238374 | ||
|
|
49afc2549f | ||
|
|
0ed6594e48 | ||
|
|
50142321f7 | ||
|
|
29df1188c7 | ||
|
|
f3817d7335 | ||
|
|
fea8a71f51 | ||
|
|
5e89211f53 | ||
|
|
0beffb50ca | ||
|
|
6efa1011aa | ||
|
|
c68b611ad9 | ||
|
|
69828548b1 | ||
|
|
22d0aa951c | ||
|
|
7f9ba6c43a | ||
|
|
7a177e18e4 | ||
|
|
433f930956 | ||
|
|
206e8b8dc3 | ||
|
|
225b2248c8 | ||
|
|
783a54a8ee | ||
|
|
216499051d | ||
|
|
6b77165d09 | ||
|
|
d26022efb1 | ||
|
|
60c0836d3e | ||
|
|
7f162e5381 | ||
|
|
98745fc307 | ||
|
|
08d496e314 | ||
|
|
538261aa99 | ||
|
|
10ffbf7d81 | ||
|
|
8e20ca8405 | ||
|
|
7818f918a8 | ||
|
|
5bfcc1db70 | ||
|
|
39c6410bbe | ||
|
|
149fdebfaa | ||
|
|
915738e1f7 | ||
|
|
340f14be3d | ||
|
|
a6e6c171c3 | ||
|
|
1d1ddbef40 | ||
|
|
d7f5e5d6ac | ||
|
|
c656803162 | ||
|
|
e746bae8db | ||
|
|
54899930b0 | ||
|
|
ac40e08474 | ||
|
|
c51a15f1e8 | ||
|
|
5346cdd283 | ||
|
|
cc62d2cf71 | ||
|
|
44416df5dc | ||
|
|
a1a5e8bf9b | ||
|
|
652bd52ea7 | ||
|
|
ed200e50c8 | ||
|
|
f0f93c64d2 | ||
|
|
c0d10f0d88 | ||
|
|
e3e0787459 | ||
|
|
c72729f8bc | ||
|
|
2b3934b845 | ||
|
|
2e85bd0264 | ||
|
|
e3d26d3f10 | ||
|
|
5042c56b4c | ||
|
|
84c81b054c | ||
|
|
532ebdc856 | ||
|
|
f18b073810 | ||
|
|
857e505323 | ||
|
|
1295e179b2 | ||
|
|
193dca3a2b | ||
|
|
360029f350 | ||
|
|
eb2a955323 | ||
|
|
1d00ac9ded | ||
|
|
d4b95b4848 | ||
|
|
0750231b4b | ||
|
|
c1664dde6a | ||
|
|
197ccca30f | ||
|
|
76ba364317 | ||
|
|
e97609ce23 | ||
|
|
720edb162e | ||
|
|
dba4e00b02 | ||
|
|
8363dadd8d | ||
|
|
b1c1a95e29 | ||
|
|
5540692500 | ||
|
|
2722538e82 | ||
|
|
b8aba4f935 | ||
|
|
ea89433dc0 | ||
|
|
22bbfaf495 | ||
|
|
bc400c2bcf | ||
|
|
b7c50cc76d | ||
|
|
193b04ff0f | ||
|
|
10a3a6d3e5 | ||
|
|
78da014b52 | ||
|
|
20e71ec08a | ||
|
|
cda37b99b4 | ||
|
|
709bfda0cc | ||
|
|
20687d5184 | ||
|
|
0f998a4845 | ||
|
|
3464b2a59c | ||
|
|
64d4532a6b | ||
|
|
1eabacbaf4 | ||
|
|
9b8f7a091c | ||
|
|
56402b0d40 | ||
|
|
bd18eee662 | ||
|
|
522bdf04ef | ||
|
|
314edaf1df | ||
|
|
c05c939ee1 | ||
|
|
555bb79866 | ||
|
|
2ee7817685 | ||
|
|
803cfd1aa3 | ||
|
|
1c98d4f55c | ||
|
|
4460b46e47 | ||
|
|
a6237d8640 | ||
|
|
3f4cd130ed | ||
|
|
cf05345ccd | ||
|
|
9e305cb672 | ||
|
|
0d82a93f18 | ||
|
|
96b94a619e | ||
|
|
37fc00b55f | ||
|
|
b782bd8909 | ||
|
|
bcacb1d2b0 | ||
|
|
7f05ce3d05 | ||
|
|
5bdb0e84d1 | ||
|
|
82936f73a3 | ||
|
|
9aa8148269 | ||
|
|
86c6c43f95 | ||
|
|
3792f137fa | ||
|
|
389385324f | ||
|
|
9ad17c2d60 | ||
|
|
52a222e87a | ||
|
|
8433d81dc0 | ||
|
|
148889e198 | ||
|
|
a649ced337 | ||
|
|
3ba8ee1d26 | ||
|
|
316cbe484b | ||
|
|
ef74ef3526 | ||
|
|
bd6745dd66 | ||
|
|
754ba93df9 | ||
|
|
bb7ea8e8fb | ||
|
|
4a467435e9 | ||
|
|
014d4a2e7c | ||
|
|
84c4668b67 | ||
|
|
0cf56d8247 | ||
|
|
99f863f444 | ||
|
|
f3e077ce52 | ||
|
|
5af5cb0cf0 | ||
|
|
04a9de1e32 | ||
|
|
7415de4751 | ||
|
|
e5bb125a55 | ||
|
|
540568d29f | ||
|
|
a67d064418 | ||
|
|
1770e6a157 | ||
|
|
ef1bc0beec |
6
.github/workflows/build.yaml
vendored
6
.github/workflows/build.yaml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Install dependencies
|
||||
run: cd frontend && yarn install
|
||||
- name: Run ESLint
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Run tests
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Build EE query-service image
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
8
.github/workflows/codeql.yaml
vendored
8
.github/workflows/codeql.yaml
vendored
@@ -39,11 +39,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
@@ -68,4 +68,4 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
||||
@@ -12,11 +12,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Codebase
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: signoz/gh-bot
|
||||
- name: Use Node v16
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
- name: Setup Cache & Install Dependencies
|
||||
|
||||
10
.github/workflows/e2e-k3s.yaml
vendored
10
.github/workflows/e2e-k3s.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
DOCKER_TAG: pull-${{ github.event.number }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Build query-service image
|
||||
env:
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
kubectl create ns sample-application
|
||||
|
||||
# apply hotrod k8s manifest file
|
||||
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/main/sample-apps/hotrod/hotrod.yaml
|
||||
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/develop/sample-apps/hotrod/hotrod.yaml
|
||||
|
||||
# wait for all deployments in sample-application namespace to be READY
|
||||
kubectl -n sample-application get deploy --output name | xargs -r -n1 -t kubectl -n sample-application rollout status --timeout=300s
|
||||
@@ -69,12 +69,14 @@ jobs:
|
||||
--restart='OnFailure' -i --rm --command -- curl -X POST -F \
|
||||
'locust_count=6' -F 'hatch_rate=2' http://locust-master:8089/swarm
|
||||
|
||||
- name: Get short commit SHA and display tunnel URL
|
||||
- name: Get short commit SHA, display tunnel URL and IP Address of the worker node
|
||||
id: get-subdomain
|
||||
run: |
|
||||
subdomain="pr-$(git rev-parse --short HEAD)"
|
||||
echo "URL for tunnelling: https://$subdomain.loca.lt"
|
||||
echo "::set-output name=subdomain::$subdomain"
|
||||
echo "subdomain=$subdomain" >> $GITHUB_OUTPUT
|
||||
worker_ip="$(curl -4 -s ipconfig.io/ip)"
|
||||
echo "Worker node IP address: $worker_ip"
|
||||
|
||||
- name: Start tunnel
|
||||
env:
|
||||
|
||||
4
.github/workflows/playwright.yaml
vendored
4
.github/workflows/playwright.yaml
vendored
@@ -9,8 +9,8 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "16.x"
|
||||
- name: Install dependencies
|
||||
|
||||
2
.github/workflows/pr_verify_linked_issue.yml
vendored
2
.github/workflows/pr_verify_linked_issue.yml
vendored
@@ -14,6 +14,6 @@ jobs:
|
||||
name: Ensure Pull Request has a linked issue.
|
||||
steps:
|
||||
- name: Verify Linked Issue
|
||||
uses: srikanthccv/verify-linked-issue-action@v0.70
|
||||
uses: srikanthccv/verify-linked-issue-action@v0.71
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
28
.github/workflows/push.yaml
vendored
28
.github/workflows/push.yaml
vendored
@@ -14,19 +14,19 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- uses: benjlevesque/short-sha@v1.2
|
||||
- uses: benjlevesque/short-sha@v2.2
|
||||
id: short-sha
|
||||
- name: Get branch name
|
||||
id: branch-name
|
||||
@@ -49,19 +49,19 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- uses: benjlevesque/short-sha@v1.2
|
||||
- uses: benjlevesque/short-sha@v2.2
|
||||
id: short-sha
|
||||
- name: Get branch name
|
||||
id: branch-name
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Install dependencies
|
||||
working-directory: frontend
|
||||
run: yarn install
|
||||
@@ -97,15 +97,15 @@ jobs:
|
||||
run: npm run lint
|
||||
continue-on-error: true
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- uses: benjlevesque/short-sha@v1.2
|
||||
- uses: benjlevesque/short-sha@v2.2
|
||||
id: short-sha
|
||||
- name: Get branch name
|
||||
id: branch-name
|
||||
|
||||
6
.github/workflows/release-drafter.yml
vendored
6
.github/workflows/release-drafter.yml
vendored
@@ -12,6 +12,12 @@ on:
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
permissions:
|
||||
# write permission is required to create a github release
|
||||
contents: write
|
||||
# write permission is required for autolabeler
|
||||
# otherwise, read permission is required at least
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# (Optional) GitHub Enterprise requires GHE_HOST variable set
|
||||
|
||||
12
.github/workflows/remove-label.yaml
vendored
12
.github/workflows/remove-label.yaml
vendored
@@ -8,9 +8,15 @@ jobs:
|
||||
remove:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Remove label
|
||||
uses: buildsville/add-remove-label@v1
|
||||
- name: Remove label ok-to-test from PR
|
||||
uses: buildsville/add-remove-label@v2.0.0
|
||||
with:
|
||||
label: ok-to-test,testing-deploy
|
||||
label: ok-to-test
|
||||
type: remove
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Remove label testing-deploy from PR
|
||||
uses: buildsville/add-remove-label@v2.0.0
|
||||
with:
|
||||
label: testing-deploy
|
||||
type: remove
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.github/workflows/sonar.yml
vendored
4
.github/workflows/sonar.yml
vendored
@@ -3,7 +3,7 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- v*
|
||||
- develop
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
defaults:
|
||||
@@ -14,7 +14,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Sonar analysis
|
||||
|
||||
@@ -338,7 +338,7 @@ to make SigNoz UI available at [localhost:3301](http://localhost:3301)
|
||||
**5.1.1 To install the HotROD sample app:**
|
||||
|
||||
```bash
|
||||
curl -sL https://github.com/SigNoz/signoz/raw/main/sample-apps/hotrod/hotrod-install.sh \
|
||||
curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-install.sh \
|
||||
| HELM_RELEASE=my-release SIGNOZ_NAMESPACE=platform bash
|
||||
```
|
||||
|
||||
@@ -361,7 +361,7 @@ kubectl -n sample-application run strzal --image=djbingham/curl \
|
||||
**5.1.4 To delete the HotROD sample app:**
|
||||
|
||||
```bash
|
||||
curl -sL https://github.com/SigNoz/signoz/raw/main/sample-apps/hotrod/hotrod-delete.sh \
|
||||
curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-delete.sh \
|
||||
| HOTROD_NAMESPACE=sample-application bash
|
||||
```
|
||||
|
||||
|
||||
15
Makefile
15
Makefile
@@ -54,7 +54,7 @@ build-push-frontend:
|
||||
@echo "--> Building and pushing frontend docker image"
|
||||
@echo "------------------"
|
||||
@cd $(FRONTEND_DIRECTORY) && \
|
||||
docker buildx build --file Dockerfile --progress plane --push --platform linux/arm64,linux/amd64 \
|
||||
docker buildx build --file Dockerfile --progress plain --push --platform linux/arm64,linux/amd64 \
|
||||
--tag $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
# Steps to build and push docker image of query service
|
||||
@@ -73,7 +73,7 @@ build-push-query-service:
|
||||
@echo "------------------"
|
||||
@echo "--> Building and pushing query-service docker image"
|
||||
@echo "------------------"
|
||||
@docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plane \
|
||||
@docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plain \
|
||||
--push --platform linux/arm64,linux/amd64 --build-arg LD_FLAGS="$(LD_FLAGS)" \
|
||||
--tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
@@ -98,7 +98,7 @@ build-push-ee-query-service:
|
||||
@echo "--> Building and pushing query-service docker image"
|
||||
@echo "------------------"
|
||||
@docker buildx build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--progress plane --push --platform linux/arm64,linux/amd64 \
|
||||
--progress plain --push --platform linux/arm64,linux/amd64 \
|
||||
--build-arg LD_FLAGS="$(LD_FLAGS)" --tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
dev-setup:
|
||||
@@ -136,9 +136,18 @@ clear-swarm-data:
|
||||
@docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \
|
||||
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse*/* signoz/* zookeeper-*/*"
|
||||
|
||||
clear-standalone-ch:
|
||||
@docker run --rm -v "$(PWD)/$(STANDALONE_DIRECTORY)/data:/pwd" busybox \
|
||||
sh -c "cd /pwd && rm -rf clickhouse*/* zookeeper-*/*"
|
||||
|
||||
clear-swarm-ch:
|
||||
@docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \
|
||||
sh -c "cd /pwd && rm -rf clickhouse*/* zookeeper-*/*"
|
||||
|
||||
test:
|
||||
go test ./pkg/query-service/app/metrics/...
|
||||
go test ./pkg/query-service/cache/...
|
||||
go test ./pkg/query-service/app/...
|
||||
go test ./pkg/query-service/app/querier/...
|
||||
go test ./pkg/query-service/converter/...
|
||||
go test ./pkg/query-service/formatter/...
|
||||
|
||||
164
README.de-de.md
164
README.de-de.md
@@ -1,40 +1,75 @@
|
||||
<p align="center">
|
||||
<img src="https://res.cloudinary.com/dcv3epinx/image/upload/v1618904450/signoz-images/LogoGithub_sigfbu.svg" alt="SigNoz-logo" width="240" />
|
||||
|
||||
|
||||
<p align="center">Überwache deine Anwendungen und behebe Probleme in deinen bereitgestellten Anwendungen. SigNoz ist eine Open Source Alternative zu DataDog, New Relic, etc.</p>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/frontend?label=Downloads"> </a>
|
||||
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/query-service?label=Downloads"> </a>
|
||||
<img alt="GitHub issues" src="https://img.shields.io/github/issues/signoz/signoz"> </a>
|
||||
<a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNozHQ&hashtags=opensource,signoz,observability">
|
||||
<img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a>
|
||||
<a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNozHQ&hashtags=opensource,signoz,observability">
|
||||
<img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a>
|
||||
</p>
|
||||
|
||||
|
||||
|
||||
|
||||
<h3 align="center">
|
||||
<a href="https://signoz.io/docs"><b>Dokumentation</b></a> •
|
||||
<a href="https://github.com/SigNoz/signoz/blob/develop/README.md"><b>Readme auf Englisch </b></a> •
|
||||
<a href="https://github.com/SigNoz/signoz/blob/develop/README.zh-cn.md"><b>ReadMe auf Chinesisch</b></a> •
|
||||
<a href="https://github.com/SigNoz/signoz/blob/develop/README.pt-br.md"><b>ReadMe auf Portugiesisch</b></a> •
|
||||
<a href="https://signoz.io/slack"><b>Slack Community</b></a> •
|
||||
<a href="https://twitter.com/SigNozHQ"><b>Twitter</b></a>
|
||||
<a href="https://twitter.com/SigNozHq"><b>Twitter</b></a>
|
||||
</h3>
|
||||
|
||||
##
|
||||
##
|
||||
|
||||
SigNoz hilft Entwicklern, Anwendungen zu überwachen und Probleme in ihren bereitgestellten Anwendungen zu beheben. SigNoz benutzt verteilte Einzelschritt-Fehlersuchen, um Einblick in deinen Software-Stack zu bekommen.
|
||||
SigNoz hilft Entwicklern, Anwendungen zu überwachen und Probleme in ihren bereitgestellten Anwendungen zu beheben. Mit SigNoz können Sie Folgendes tun:
|
||||
|
||||
👉 Du kannst Werte wie die P99-Latenz und die Fehler Häufigkeit von deinen Services, externen API Aufrufen und einzelnen Endpunkten sehen.
|
||||
👉 Visualisieren Sie Metriken, Traces und Logs in einer einzigen Oberfläche.
|
||||
|
||||
👉 Du kannst die Ursache des Problems finden, indem du zu dem Einzelschritt gehst, der das Problem verursacht und dir detaillierte Flamegraphs von einzelnen Abfragefehlersuchen anzeigen lassen.
|
||||
👉 Sie können Metriken wie die p99-Latenz, Fehlerquoten für Ihre Dienste, externe API-Aufrufe und individuelle Endpunkte anzeigen.
|
||||
|
||||
👉 Erstelle Aggregate auf Basis von Fehlersuche Daten, um geschäftsrelevante Metriken zu erhalten.
|
||||
👉 Sie können die Ursache des Problems ermitteln, indem Sie zu den genauen Traces gehen, die das Problem verursachen, und detaillierte Flammenbilder einzelner Anfragetraces anzeigen.
|
||||
|
||||
👉 Führen Sie Aggregationen auf Trace-Daten durch, um geschäftsrelevante Metriken zu erhalten.
|
||||
|
||||
👉 Filtern und Abfragen von Logs, Erstellen von Dashboards und Benachrichtigungen basierend auf Attributen in den Logs.
|
||||
|
||||
👉 Automatische Aufzeichnung von Ausnahmen in Python, Java, Ruby und Javascript.
|
||||
|
||||
👉 Einfache Einrichtung von Benachrichtigungen mit dem selbst erstellbaren Abfrage-Builder.
|
||||
|
||||
##
|
||||
### Anwendung Metriken
|
||||
|
||||

|
||||
|
||||
|
||||
### Verteiltes Tracing
|
||||
<img width="2068" alt="distributed_tracing_2 2" src="https://user-images.githubusercontent.com/83692067/226536447-bae58321-6a22-4ed3-af80-e3e964cb3489.png">
|
||||
|
||||
<img width="2068" alt="distributed_tracing_1" src="https://user-images.githubusercontent.com/83692067/226536462-939745b6-4f9d-45a6-8016-814837e7f7b4.png">
|
||||
|
||||
### Log Verwaltung
|
||||
|
||||
<img width="2068" alt="logs_management" src="https://user-images.githubusercontent.com/83692067/226536482-b8a5c4af-b69c-43d5-969c-338bd5eaf1a5.png">
|
||||
|
||||
### Infrastruktur Überwachung
|
||||
|
||||
<img width="2068" alt="infrastructure_monitoring" src="https://user-images.githubusercontent.com/83692067/226536496-f38c4dbf-e03c-4158-8be0-32d4a61158c7.png">
|
||||
|
||||
### Exceptions Monitoring
|
||||
|
||||

|
||||
|
||||
|
||||
### Alarme
|
||||
|
||||
<img width="2068" alt="alerts_management" src="https://user-images.githubusercontent.com/83692067/226536548-2c81e2e8-c12d-47e8-bad7-c6be79055def.png">
|
||||
|
||||

|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/Contributing.svg" width="50px" />
|
||||
|
||||
## Werde Teil unserer Slack Community
|
||||
|
||||
@@ -42,20 +77,22 @@ Sag Hi zu uns auf [Slack](https://signoz.io/slack) 👋
|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/Features.svg" width="50px" />
|
||||
|
||||
## Funktionen:
|
||||
|
||||
- Übersichtsmetriken deiner Anwendung wie RPS, 50tes/90tes/99tes Quantil Latenzen und Fehler Häufigkeiten.
|
||||
- Übersicht der langsamsten Endpunkte deiner Anwendung.
|
||||
- Sieh dir die genaue Einzelschritt-Fehlersuche deiner Abfrage an, um Fehler in nachgelagerten Diensten, langsamen Datenbank Abfragen und Aufrufen von Drittanbieter Diensten wie Zahlungsportalen, etc. zu finden.
|
||||
- Filtere Einzelschritt-Fehlersuchen nach Dienstname, Latenz, Fehler, Stichworten/ Anmerkungen.
|
||||
- Führe Aggregate auf Basis von Einzelschritt-Fehlersuche Daten (Ereignisse/Abstände) aus, um geschäftsrelevante Metriken zu erhalten. Du kannst dir z. B. die Fehlerrate und 99tes Quantil Latenz von `customer_type: gold`, `deployment_version: v2` oder `external_call: paypal` ausgeben lassen.
|
||||
- Einheitliche Benutzeroberfläche für Metriken und Einzelschritt-Fehlersuchen. Du musst nicht zwischen Prometheus und Jaeger hin und her wechseln, um Fehler zu beheben.
|
||||
- Einheitliche Benutzeroberfläche für Metriken, Traces und Logs. Keine Notwendigkeit, zwischen Prometheus und Jaeger zu wechseln, um Probleme zu debuggen oder ein separates Log-Tool wie Elastic neben Ihrer Metriken- und Traces-Stack zu verwenden.
|
||||
- Überblick über Anwendungsmetriken wie RPS, Latenzzeiten des 50tes/90tes/99tes Perzentils und Fehlerquoten.
|
||||
- Langsamste Endpunkte in Ihrer Anwendung.
|
||||
- Zeigen Sie genaue Anfragetraces an, um Probleme in nachgelagerten Diensten, langsamen Datenbankabfragen oder Aufrufen von Drittanbieterdiensten wie Zahlungsgateways zu identifizieren.
|
||||
- Filtern Sie Traces nach Dienstname, Operation, Latenz, Fehler, Tags/Annotationen.
|
||||
- Führen Sie Aggregationen auf Trace-Daten (Ereignisse/Spans) durch, um geschäftsrelevante Metriken zu erhalten. Beispielsweise können Sie die Fehlerquote und die 99tes Perzentillatenz für `customer_type: gold` oder `deployment_version: v2` oder `external_call: paypal` erhalten.
|
||||
- Native Unterstützung für OpenTelemetry-Logs, erweiterten Log-Abfrage-Builder und automatische Log-Sammlung aus dem Kubernetes-Cluster.
|
||||
- Blitzschnelle Log-Analytik ([Logs Perf. Benchmark](https://signoz.io/blog/logs-performance-benchmark/))
|
||||
- End-to-End-Sichtbarkeit der Infrastrukturleistung, Aufnahme von Metriken aus allen Arten von Host-Umgebungen.
|
||||
- Einfache Einrichtung von Benachrichtigungen mit dem selbst erstellbaren Abfrage-Builder.
|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/WhatsCool.svg" width="50px" />
|
||||
|
||||
## Wieso SigNoz?
|
||||
|
||||
@@ -65,24 +102,28 @@ Wir wollten eine selbst gehostete, Open Source Variante von Lösungen wie DataDo
|
||||
|
||||
Open Source gibt dir außerdem die totale Kontrolle über deine Konfiguration, Stichprobenentnahme und Betriebszeit. Du kannst des Weiteren neue Module auf Basis von SigNoz bauen, die erweiterte, geschäftsspezifische Funktionen anbieten.
|
||||
|
||||
### Unterstützte Programmiersprachen:
|
||||
### Languages supported:
|
||||
|
||||
Wir unterstützen [OpenTelemetry](https://opentelemetry.io) als die Software Library, die du nutzen kannst um deine Anwendungen auszuführen. Jedes Framework und jede Sprache die von OpenTelemetry unterstützt wird, wird auch von SigNoz unterstützt. Einige der unterstützten, größeren Programmiersprachen sind:
|
||||
Wir unterstützen [OpenTelemetry](https://opentelemetry.io) als Bibliothek, mit der Sie Ihre Anwendungen instrumentieren können. Daher wird jedes von OpenTelemetry unterstützte Framework und jede Sprache auch von SignNoz unterstützt. Einige der wichtigsten unterstützten Sprachen sind:
|
||||
|
||||
- Java
|
||||
- Python
|
||||
- NodeJS
|
||||
- Go
|
||||
- PHP
|
||||
- .NET
|
||||
- Ruby
|
||||
- Elixir
|
||||
- Rust
|
||||
|
||||
|
||||
Hier findest du die vollständige Liste von unterstützten Programmiersprachen - https://opentelemetry.io/docs/
|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/Philosophy.svg" width="50px" />
|
||||
|
||||
## Erste Schritte mit SigNoz
|
||||
|
||||
|
||||
|
||||
### Bereitstellung mit Docker
|
||||
|
||||
Bitte folge den [hier](https://signoz.io/docs/install/docker/) aufgelisteten Schritten um deine Anwendung mit Docker bereitzustellen.
|
||||
@@ -90,20 +131,17 @@ Bitte folge den [hier](https://signoz.io/docs/install/docker/) aufgelisteten Sch
|
||||
Die [Anleitungen zur Fehlerbehebung](https://signoz.io/docs/install/troubleshooting/) könnten hilfreich sein, falls du auf irgendwelche Schwierigkeiten stößt.
|
||||
|
||||
<p>  </p>
|
||||
|
||||
|
||||
### Bereitstellung mit Kubernetes und Helm
|
||||
|
||||
### Deploy in Kubernetes using Helm
|
||||
|
||||
Bitte folge den [hier](https://signoz.io/docs/deployment/helm_chart) aufgelisteten Schritten, um deine Anwendung mit Helm Charts bereitzustellen.
|
||||
|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/UseSigNoz.svg" width="50px" />
|
||||
|
||||
## Vergleiche mit anderen Lösungen
|
||||
## Vergleiche mit bekannten Tools
|
||||
|
||||
### SigNoz vs. Prometheus
|
||||
### SigNoz vs Prometheus
|
||||
|
||||
Prometheus ist gut, falls du dich nur für Metriken interessierst. Wenn du eine nahtlose Integration von Metriken und Einzelschritt-Fehlersuchen haben möchtest, ist die Kombination aus Prometheus und Jaeger nicht das Richtige für dich.
|
||||
|
||||
@@ -111,49 +149,79 @@ Unser Ziel ist es, eine integrierte Benutzeroberfläche aus Metriken und Einzels
|
||||
|
||||
<p>  </p>
|
||||
|
||||
### SigNoz vs. Jaeger
|
||||
### SigNoz vs Jaeger
|
||||
|
||||
Jaeger kümmert sich nur um verteilte Einzelschritt-Fehlersuche. SigNoz erstellt sowohl Metriken als auch Einzelschritt-Fehlersuche, daneben haben wir auch Protokoll Verwaltung auf unserem Plan.
|
||||
|
||||
Außerdem hat SigNoz noch mehr spezielle Funktionen im Vergleich zu Jaeger:
|
||||
|
||||
- Jaeger UI zeigt keine Metriken für Einzelschritt-Fehlersuchen oder für gefilterte Einzelschritt-Fehlersuchen an
|
||||
- Jaeger erstellt keine Aggregate für gefilterte Einzelschritt-Fehlersuchen, z. B. die P99 Latenz von Abfragen mit dem Tag - customer_type='premium', was hingegen mit SigNoz leicht umsetzbar ist.
|
||||
- Jaeger UI zeigt keine Metriken für Einzelschritt-Fehlersuchen oder für gefilterte Einzelschritt-Fehlersuchen an.
|
||||
- Jaeger erstellt keine Aggregate für gefilterte Einzelschritt-Fehlersuchen, z. B. die P99 Latenz von Abfragen mit dem Tag `customer_type=premium`, was hingegen mit SigNoz leicht umsetzbar ist.
|
||||
|
||||
<p>  </p>
|
||||
|
||||
### SigNoz vs Elastic
|
||||
|
||||
- Die Verwaltung von SigNoz-Protokollen basiert auf 'ClickHouse', einem spaltenbasierten OLAP-Datenspeicher, der aggregierte Protokollanalyseabfragen wesentlich effizienter macht.
|
||||
- 50 % geringerer Ressourcenbedarf im Vergleich zu Elastic während der Aufnahme.
|
||||
|
||||
Wir haben Benchmarks veröffentlicht, die Elastic mit SignNoz vergleichen. Schauen Sie es sich [hier](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark)
|
||||
|
||||
<p>  </p>
|
||||
|
||||
### SigNoz vs Loki
|
||||
|
||||
- SigNoz unterstützt Aggregationen von Daten mit hoher Kardinalität über ein großes Volumen, Loki hingegen nicht.
|
||||
- SigNoz unterstützt Indizes über Daten mit hoher Kardinalität und hat keine Beschränkungen hinsichtlich der Anzahl der Indizes, während Loki maximale Streams erreicht, wenn ein paar Indizes hinzugefügt werden.
|
||||
- Das Durchsuchen großer Datenmengen ist in Loki im Vergleich zu SigNoz schwierig und langsam.
|
||||
|
||||
Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen Sie es sich [hier](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark)
|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/Contributors.svg" width="50px" />
|
||||
|
||||
## Zum Projekt beitragen
|
||||
|
||||
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
|
||||
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack)
|
||||
|
||||
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md) durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
|
||||
### Unsere Projektbetreuer
|
||||
|
||||
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem `#contributing` Kanal in unserer [Slack Community](https://signoz.io/slack).
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/DevelopingLocally.svg" width="50px" />
|
||||
|
||||
## Dokumentation
|
||||
|
||||
Du findest unsere Dokumentation unter https://signoz.io/docs/. Falls etwas unverständlich ist oder fehlt, öffne gerne ein Github Issue mit dem Label `documentation` oder schreib uns über den Community Slack Channel.
|
||||
|
||||
|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/Contributing.svg" width="50px" />
|
||||
|
||||
## Community
|
||||
## Gemeinschaft
|
||||
|
||||
Werde Teil der [Slack Community](https://signoz.io/slack) um mehr über verteilte Einzelschritt-Fehlersuche, Messung von Systemzuständen oder SigNoz zu erfahren und sich mit anderen Nutzern und Mitwirkenden in Verbindung zu setzen.
|
||||
Werde Teil der [slack community](https://signoz.io/slack) um mehr über verteilte Einzelschritt-Fehlersuche, Messung von Systemzuständen oder SigNoz zu erfahren und sich mit anderen Nutzern und Mitwirkenden in Verbindung zu setzen.
|
||||
|
||||
Falls du irgendwelche Ideen, Fragen oder Feedback hast, kannst du sie gerne über unsere [Github Discussions](https://github.com/SigNoz/signoz/discussions) mit uns teilen.
|
||||
|
||||
Wie immer, danke an unsere großartigen Unterstützer!
|
||||
Wie immer, Dank an unsere großartigen Mitwirkenden!
|
||||
|
||||
<a href="https://github.com/signoz/signoz/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=signoz/signoz" />
|
||||
</a>
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ from the HotROD application, you should see the data generated from hotrod in Si
|
||||
```sh
|
||||
kubectl create ns sample-application
|
||||
|
||||
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/main/sample-apps/hotrod/hotrod.yaml
|
||||
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/develop/sample-apps/hotrod/hotrod.yaml
|
||||
```
|
||||
|
||||
To generate load:
|
||||
@@ -66,7 +66,7 @@ To generate load:
|
||||
```sh
|
||||
kubectl -n sample-application run strzal --image=djbingham/curl \
|
||||
--restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \
|
||||
'locust_count=6' -F 'hatch_rate=2' http://locust-master:8089/swarm
|
||||
'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm
|
||||
```
|
||||
|
||||
To stop load:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
version: "3.9"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
x-clickhouse-defaults:
|
||||
&clickhouse-defaults
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
tty: true
|
||||
deploy:
|
||||
@@ -16,7 +17,14 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -26,7 +34,8 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-clickhouse-depend: &clickhouse-depend
|
||||
x-clickhouse-depend:
|
||||
&clickhouse-depend
|
||||
depends_on:
|
||||
- clickhouse
|
||||
# - clickhouse-2
|
||||
@@ -124,7 +133,7 @@ services:
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:0.23.1
|
||||
image: signoz/alertmanager:0.23.2
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
command:
|
||||
@@ -137,8 +146,8 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.20.1
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
image: signoz/query-service:0.26.0
|
||||
command: [ "-config=/root/config/prometheus.yml" ]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
@@ -156,7 +165,14 @@ services:
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -166,7 +182,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.20.1
|
||||
image: signoz/frontend:0.26.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -179,8 +195,12 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.76.1
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@@ -191,8 +211,8 @@ services:
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
@@ -208,8 +228,12 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.76.1
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-metrics-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
# ports:
|
||||
@@ -222,9 +246,22 @@ services:
|
||||
condition: on-failure
|
||||
<<: *clickhouse-depend
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
deploy:
|
||||
mode: global
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
command: ["all"]
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
logging:
|
||||
@@ -233,7 +270,7 @@ services:
|
||||
max-file: "3"
|
||||
|
||||
load-hotrod:
|
||||
image: "grubykarol/locust:1.2.3-python3.9-alpine3.12"
|
||||
image: "signoz/locust:1.2.3"
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
|
||||
@@ -1,29 +1,21 @@
|
||||
receivers:
|
||||
filelog/dockercontainers:
|
||||
include: [ "/var/lib/docker/containers/*/*.log" ]
|
||||
start_at: end
|
||||
include_file_path: true
|
||||
include_file_name: false
|
||||
tcplog/docker:
|
||||
listen_address: "0.0.0.0:2255"
|
||||
operators:
|
||||
- type: json_parser
|
||||
id: parser-docker
|
||||
output: extract_metadata_from_filepath
|
||||
timestamp:
|
||||
parse_from: attributes.time
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: regex_parser
|
||||
id: extract_metadata_from_filepath
|
||||
regex: '^.*containers/(?P<container_id>[^_]+)/.*log$'
|
||||
parse_from: attributes["log.file.path"]
|
||||
output: parse_body
|
||||
- type: move
|
||||
id: parse_body
|
||||
from: attributes.log
|
||||
to: body
|
||||
output: time
|
||||
- type: remove
|
||||
id: time
|
||||
field: attributes.time
|
||||
- type: regex_parser
|
||||
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
|
||||
timestamp:
|
||||
parse_from: attributes.timestamp
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: move
|
||||
from: attributes["body"]
|
||||
to: body
|
||||
- type: remove
|
||||
field: attributes.timestamp
|
||||
# please remove names from below if you want to collect logs from them
|
||||
- type: filter
|
||||
id: signoz_logs_filter
|
||||
expr: 'attributes.container_name matches "^signoz_(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
|
||||
opencensus:
|
||||
endpoint: 0.0.0.0:55678
|
||||
otlp/spanmetrics:
|
||||
@@ -69,6 +61,40 @@ receivers:
|
||||
job_name: otel-collector
|
||||
|
||||
processors:
|
||||
logstransform/internal:
|
||||
operators:
|
||||
- type: trace_parser
|
||||
if: '"trace_id" in attributes or "span_id" in attributes'
|
||||
trace_id:
|
||||
parse_from: attributes.trace_id
|
||||
span_id:
|
||||
parse_from: attributes.span_id
|
||||
output: remove_trace_id
|
||||
- type: trace_parser
|
||||
if: '"traceId" in attributes or "spanId" in attributes'
|
||||
trace_id:
|
||||
parse_from: attributes.traceId
|
||||
span_id:
|
||||
parse_from: attributes.spanId
|
||||
output: remove_traceId
|
||||
- id: remove_traceId
|
||||
type: remove
|
||||
if: '"traceId" in attributes'
|
||||
field: attributes.traceId
|
||||
output: remove_spanId
|
||||
- id: remove_spanId
|
||||
type: remove
|
||||
if: '"spanId" in attributes'
|
||||
field: attributes.spanId
|
||||
- id: remove_trace_id
|
||||
type: remove
|
||||
if: '"trace_id" in attributes'
|
||||
field: attributes.trace_id
|
||||
output: remove_span_id
|
||||
- id: remove_span_id
|
||||
type: remove
|
||||
if: '"span_id" in attributes'
|
||||
field: attributes.span_id
|
||||
batch:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
@@ -166,6 +192,6 @@ service:
|
||||
receivers: [otlp/spanmetrics]
|
||||
exporters: [prometheus]
|
||||
logs:
|
||||
receivers: [otlp, filelog/dockercontainers]
|
||||
processors: [batch]
|
||||
receivers: [otlp, tcplog/docker]
|
||||
processors: [logstransform/internal, batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
|
||||
@@ -3,7 +3,7 @@ version: "2.4"
|
||||
services:
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
container_name: clickhouse
|
||||
container_name: signoz-clickhouse
|
||||
# ports:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
@@ -20,14 +20,21 @@ services:
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
alertmanager:
|
||||
container_name: alertmanager
|
||||
image: signoz/alertmanager:0.23.1
|
||||
container_name: signoz-alertmanager
|
||||
image: signoz/alertmanager:0.23.2
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
@@ -40,9 +47,13 @@ services:
|
||||
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: otel-collector
|
||||
image: signoz/signoz-otel-collector:0.76.1
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
container_name: signoz-otel-collector
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
# user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@@ -50,8 +61,8 @@ services:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
@@ -66,9 +77,13 @@ services:
|
||||
condition: service_healthy
|
||||
|
||||
otel-collector-metrics:
|
||||
container_name: otel-collector-metrics
|
||||
image: signoz/signoz-otel-collector:0.76.1
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
container_name: signoz-otel-collector-metrics
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-metrics-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
# ports:
|
||||
@@ -81,6 +96,17 @@ services:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
@@ -88,12 +114,12 @@ services:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: ["all"]
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "grubykarol/locust:1.2.3-python3.9-alpine3.12"
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
|
||||
@@ -9,7 +9,7 @@ services:
|
||||
args:
|
||||
LDFLAGS: ""
|
||||
TARGETPLATFORM: "${LOCAL_GOOS}/${LOCAL_GOARCH}"
|
||||
container_name: query-service
|
||||
container_name: signoz-query-service
|
||||
environment:
|
||||
- ClickHouseUrl=tcp://clickhouse:9000
|
||||
- ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/
|
||||
@@ -22,13 +22,20 @@ services:
|
||||
- ./prometheus.yml:/root/config/prometheus.yml
|
||||
- ../dashboards:/root/config/dashboards
|
||||
- ./data/signoz/:/var/lib/signoz/
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
command: [ "-config=/root/config/prometheus.yml" ]
|
||||
ports:
|
||||
- "6060:6060"
|
||||
- "8080:8080"
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -43,7 +50,7 @@ services:
|
||||
args:
|
||||
TARGETOS: "${LOCAL_GOOS}"
|
||||
TARGETPLATFORM: "${LOCAL_GOARCH}"
|
||||
container_name: frontend
|
||||
container_name: signoz-frontend
|
||||
environment:
|
||||
- FRONTEND_API_ENDPOINT=http://query-service:8080
|
||||
restart: on-failure
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
version: "2.4"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
x-clickhouse-defaults:
|
||||
&clickhouse-defaults
|
||||
restart: on-failure
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
tty: true
|
||||
@@ -14,7 +15,14 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -24,7 +32,8 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-clickhouse-depend: &clickhouse-depend
|
||||
x-clickhouse-depend:
|
||||
&clickhouse-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
@@ -37,7 +46,7 @@ services:
|
||||
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: zookeeper-1
|
||||
container_name: signoz-zookeeper-1
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
@@ -54,7 +63,7 @@ services:
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-2
|
||||
# container_name: signoz-zookeeper-2
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
@@ -71,7 +80,7 @@ services:
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-3
|
||||
# container_name: signoz-zookeeper-3
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
@@ -88,7 +97,7 @@ services:
|
||||
|
||||
clickhouse:
|
||||
<<: *clickhouse-defaults
|
||||
container_name: clickhouse
|
||||
container_name: signoz-clickhouse
|
||||
hostname: clickhouse
|
||||
ports:
|
||||
- "9000:9000"
|
||||
@@ -105,7 +114,7 @@ services:
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-2
|
||||
# container_name: signoz-clickhouse-2
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
@@ -120,10 +129,10 @@ services:
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-3
|
||||
# container_name: signoz-clickhouse-3
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
@@ -139,7 +148,8 @@ services:
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.1}
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.2}
|
||||
container_name: signoz-alertmanager
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
@@ -153,9 +163,9 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.20.1}
|
||||
container_name: query-service
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.26.0}
|
||||
container_name: signoz-query-service
|
||||
command: [ "-config=/root/config/prometheus.yml" ]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
@@ -174,15 +184,22 @@ services:
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.20.1}
|
||||
container_name: frontend
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.26.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
- alertmanager
|
||||
@@ -193,8 +210,13 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.76.1}
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.5}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@@ -205,8 +227,8 @@ services:
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
@@ -219,8 +241,13 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.76.1}
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.5}
|
||||
container_name: signoz-otel-collector-metrics
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-metrics-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
# ports:
|
||||
@@ -231,6 +258,17 @@ services:
|
||||
restart: on-failure
|
||||
<<: *clickhouse-depend
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
@@ -238,12 +276,12 @@ services:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: ["all"]
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "grubykarol/locust:1.2.3-python3.9-alpine3.12"
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
|
||||
@@ -1,29 +1,21 @@
|
||||
receivers:
|
||||
filelog/dockercontainers:
|
||||
include: [ "/var/lib/docker/containers/*/*.log" ]
|
||||
start_at: end
|
||||
include_file_path: true
|
||||
include_file_name: false
|
||||
tcplog/docker:
|
||||
listen_address: "0.0.0.0:2255"
|
||||
operators:
|
||||
- type: json_parser
|
||||
id: parser-docker
|
||||
output: extract_metadata_from_filepath
|
||||
timestamp:
|
||||
parse_from: attributes.time
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: regex_parser
|
||||
id: extract_metadata_from_filepath
|
||||
regex: '^.*containers/(?P<container_id>[^_]+)/.*log$'
|
||||
parse_from: attributes["log.file.path"]
|
||||
output: parse_body
|
||||
- type: move
|
||||
id: parse_body
|
||||
from: attributes.log
|
||||
to: body
|
||||
output: time
|
||||
- type: remove
|
||||
id: time
|
||||
field: attributes.time
|
||||
- type: regex_parser
|
||||
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
|
||||
timestamp:
|
||||
parse_from: attributes.timestamp
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: move
|
||||
from: attributes["body"]
|
||||
to: body
|
||||
- type: remove
|
||||
field: attributes.timestamp
|
||||
# please remove names from below if you want to collect logs from them
|
||||
- type: filter
|
||||
id: signoz_logs_filter
|
||||
expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
|
||||
opencensus:
|
||||
endpoint: 0.0.0.0:55678
|
||||
otlp/spanmetrics:
|
||||
@@ -205,6 +197,6 @@ service:
|
||||
receivers: [otlp/spanmetrics]
|
||||
exporters: [prometheus]
|
||||
logs:
|
||||
receivers: [otlp, filelog/dockercontainers]
|
||||
receivers: [otlp, tcplog/docker]
|
||||
processors: [logstransform/internal, batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
@@ -36,9 +36,9 @@ is_mac() {
|
||||
[[ $OSTYPE == darwin* ]]
|
||||
}
|
||||
|
||||
# is_arm64(){
|
||||
# [[ `uname -m` == 'arm64' ]]
|
||||
# }
|
||||
is_arm64(){
|
||||
[[ `uname -m` == 'arm64' || `uname -m` == 'aarch64' ]]
|
||||
}
|
||||
|
||||
check_os() {
|
||||
if is_mac; then
|
||||
@@ -48,6 +48,16 @@ check_os() {
|
||||
return
|
||||
fi
|
||||
|
||||
if is_arm64; then
|
||||
arch="arm64"
|
||||
arch_official="aarch64"
|
||||
else
|
||||
arch="amd64"
|
||||
arch_official="x86_64"
|
||||
fi
|
||||
|
||||
platform=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
os_name="$(cat /etc/*-release | awk -F= '$1 == "NAME" { gsub(/"/, ""); print $2; exit }')"
|
||||
|
||||
case "$os_name" in
|
||||
@@ -143,7 +153,7 @@ install_docker() {
|
||||
$apt_cmd install software-properties-common gnupg-agent
|
||||
curl -fsSL "https://download.docker.com/linux/$os/gpg" | $sudo_cmd apt-key add -
|
||||
$sudo_cmd add-apt-repository \
|
||||
"deb [arch=amd64] https://download.docker.com/linux/$os $(lsb_release -cs) stable"
|
||||
"deb [arch=$arch] https://download.docker.com/linux/$os $(lsb_release -cs) stable"
|
||||
$apt_cmd update
|
||||
echo "Installing docker"
|
||||
$apt_cmd install docker-ce docker-ce-cli containerd.io
|
||||
@@ -178,12 +188,20 @@ install_docker() {
|
||||
|
||||
}
|
||||
|
||||
compose_version () {
|
||||
local compose_version
|
||||
compose_version="$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep 'tag_name' | cut -d\" -f4)"
|
||||
echo "${compose_version:-v2.18.1}"
|
||||
}
|
||||
|
||||
install_docker_compose() {
|
||||
if [[ $package_manager == "apt-get" || $package_manager == "zypper" || $package_manager == "yum" ]]; then
|
||||
if [[ ! -f /usr/bin/docker-compose ]];then
|
||||
echo "++++++++++++++++++++++++"
|
||||
echo "Installing docker-compose"
|
||||
$sudo_cmd curl -L "https://github.com/docker/compose/releases/download/1.26.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
|
||||
compose_url="https://github.com/docker/compose/releases/download/$(compose_version)/docker-compose-$platform-$arch_official"
|
||||
echo "Downloading docker-compose from $compose_url"
|
||||
$sudo_cmd curl -L "$compose_url" -o /usr/local/bin/docker-compose
|
||||
$sudo_cmd chmod +x /usr/local/bin/docker-compose
|
||||
$sudo_cmd ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
|
||||
echo "docker-compose installed!"
|
||||
|
||||
@@ -39,6 +39,9 @@ COPY --from=builder /go/src/github.com/signoz/signoz/ee/query-service/bin/query-
|
||||
# copy prometheus YAML config
|
||||
COPY pkg/query-service/config/prometheus.yml /root/config/prometheus.yml
|
||||
|
||||
# Make query-service executable for non-root users
|
||||
RUN chmod 755 /root /root/query-service
|
||||
|
||||
# run the binary
|
||||
ENTRYPOINT ["./query-service"]
|
||||
|
||||
|
||||
@@ -2,12 +2,14 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"go.signoz.io/signoz/ee/query-service/dao"
|
||||
"go.signoz.io/signoz/ee/query-service/interfaces"
|
||||
"go.signoz.io/signoz/ee/query-service/license"
|
||||
baseapp "go.signoz.io/signoz/pkg/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
rules "go.signoz.io/signoz/pkg/query-service/rules"
|
||||
@@ -15,11 +17,18 @@ import (
|
||||
)
|
||||
|
||||
type APIHandlerOptions struct {
|
||||
DataConnector interfaces.DataConnector
|
||||
AppDao dao.ModelDao
|
||||
RulesManager *rules.Manager
|
||||
FeatureFlags baseint.FeatureLookup
|
||||
LicenseManager *license.Manager
|
||||
DataConnector interfaces.DataConnector
|
||||
SkipConfig *basemodel.SkipConfig
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
DialTimeout time.Duration
|
||||
AppDao dao.ModelDao
|
||||
RulesManager *rules.Manager
|
||||
FeatureFlags baseint.FeatureLookup
|
||||
LicenseManager *license.Manager
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
}
|
||||
|
||||
type APIHandler struct {
|
||||
@@ -31,10 +40,18 @@ type APIHandler struct {
|
||||
func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
|
||||
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
AppDao: opts.AppDao,
|
||||
RuleManager: opts.RulesManager,
|
||||
FeatureFlags: opts.FeatureFlags})
|
||||
Reader: opts.DataConnector,
|
||||
SkipConfig: opts.SkipConfig,
|
||||
PerferDelta: opts.PreferDelta,
|
||||
PreferSpanMetrics: opts.PreferSpanMetrics,
|
||||
MaxIdleConns: opts.MaxIdleConns,
|
||||
MaxOpenConns: opts.MaxOpenConns,
|
||||
DialTimeout: opts.DialTimeout,
|
||||
AppDao: opts.AppDao,
|
||||
RuleManager: opts.RulesManager,
|
||||
FeatureFlags: opts.FeatureFlags,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -2,6 +2,8 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -10,5 +12,13 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
ah.HandleError(w, err, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if ah.opts.PreferSpanMetrics {
|
||||
for idx := range featureSet {
|
||||
feature := &featureSet[idx]
|
||||
if feature.Name == basemodel.UseSpanMetrics {
|
||||
featureSet[idx].Active = true
|
||||
}
|
||||
}
|
||||
}
|
||||
ah.Respond(w, featureSet)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
@@ -15,8 +17,15 @@ type ClickhouseReader struct {
|
||||
*basechr.ClickHouseReader
|
||||
}
|
||||
|
||||
func NewDataConnector(localDB *sqlx.DB, promConfigPath string, lm interfaces.FeatureLookup) *ClickhouseReader {
|
||||
ch := basechr.NewReader(localDB, promConfigPath, lm)
|
||||
func NewDataConnector(
|
||||
localDB *sqlx.DB,
|
||||
promConfigPath string,
|
||||
lm interfaces.FeatureLookup,
|
||||
maxIdleConns int,
|
||||
maxOpenConns int,
|
||||
dialTimeout time.Duration,
|
||||
) *ClickhouseReader {
|
||||
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout)
|
||||
return &ClickhouseReader{
|
||||
conn: ch.GetConn(),
|
||||
appdb: localDB,
|
||||
|
||||
@@ -31,6 +31,7 @@ import (
|
||||
baseapp "go.signoz.io/signoz/pkg/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
|
||||
baseexplorer "go.signoz.io/signoz/pkg/query-service/app/explorer"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model"
|
||||
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
|
||||
@@ -49,12 +50,18 @@ import (
|
||||
const AppDbEngine = "sqlite"
|
||||
|
||||
type ServerOptions struct {
|
||||
PromConfigPath string
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
PromConfigPath string
|
||||
SkipTopLvlOpsPath string
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
// alert specific params
|
||||
DisableRules bool
|
||||
RuleRepoURL string
|
||||
DisableRules bool
|
||||
RuleRepoURL string
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
DialTimeout time.Duration
|
||||
}
|
||||
|
||||
// Server runs HTTP api service
|
||||
@@ -75,6 +82,9 @@ type Server struct {
|
||||
// feature flags
|
||||
featureLookup baseint.FeatureLookup
|
||||
|
||||
// Usage manager
|
||||
usageManager *usage.Manager
|
||||
|
||||
unavailableChannel chan healthcheck.Status
|
||||
}
|
||||
|
||||
@@ -115,11 +125,26 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
storage := os.Getenv("STORAGE")
|
||||
if storage == "clickhouse" {
|
||||
zap.S().Info("Using ClickHouse as datastore ...")
|
||||
qb := db.NewDataConnector(localDB, serverOptions.PromConfigPath, lm)
|
||||
qb := db.NewDataConnector(
|
||||
localDB,
|
||||
serverOptions.PromConfigPath,
|
||||
lm,
|
||||
serverOptions.MaxIdleConns,
|
||||
serverOptions.MaxOpenConns,
|
||||
serverOptions.DialTimeout,
|
||||
)
|
||||
go qb.Start(readerReady)
|
||||
reader = qb
|
||||
} else {
|
||||
return nil, fmt.Errorf("Storage type: %s is not supported in query service", storage)
|
||||
return nil, fmt.Errorf("storage type: %s is not supported in query service", storage)
|
||||
}
|
||||
skipConfig := &basemodel.SkipConfig{}
|
||||
if serverOptions.SkipTopLvlOpsPath != "" {
|
||||
// read skip config
|
||||
skipConfig, err = basemodel.ReadSkipConfig(serverOptions.SkipTopLvlOpsPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
<-readerReady
|
||||
@@ -146,6 +171,12 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// ingestion pipelines manager
|
||||
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(localDB, "sqlite")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// start the usagemanager
|
||||
usageManager, err := usage.New("sqlite", localDB, lm.GetRepo(), reader.GetConn())
|
||||
if err != nil {
|
||||
@@ -159,11 +190,18 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
telemetry.GetInstance().SetReader(reader)
|
||||
|
||||
apiOpts := api.APIHandlerOptions{
|
||||
DataConnector: reader,
|
||||
AppDao: modelDao,
|
||||
RulesManager: rm,
|
||||
FeatureFlags: lm,
|
||||
LicenseManager: lm,
|
||||
DataConnector: reader,
|
||||
SkipConfig: skipConfig,
|
||||
PreferDelta: serverOptions.PreferDelta,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
MaxIdleConns: serverOptions.MaxIdleConns,
|
||||
MaxOpenConns: serverOptions.MaxOpenConns,
|
||||
DialTimeout: serverOptions.DialTimeout,
|
||||
AppDao: modelDao,
|
||||
RulesManager: rm,
|
||||
FeatureFlags: lm,
|
||||
LicenseManager: lm,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts)
|
||||
@@ -177,6 +215,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
ruleManager: rm,
|
||||
serverOptions: serverOptions,
|
||||
unavailableChannel: make(chan healthcheck.Status),
|
||||
usageManager: usageManager,
|
||||
}
|
||||
|
||||
httpServer, err := s.createPublicServer(apiHandler)
|
||||
@@ -277,7 +316,7 @@ func loggingMiddleware(next http.Handler) http.Handler {
|
||||
path, _ := route.GetPathTemplate()
|
||||
startTime := time.Now()
|
||||
next.ServeHTTP(w, r)
|
||||
zap.S().Info(path, "\ttimeTaken: ", time.Now().Sub(startTime))
|
||||
zap.L().Info(path+"\ttimeTaken:"+time.Now().Sub(startTime).String(), zap.Duration("timeTaken", time.Now().Sub(startTime)), zap.String("path", path))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -289,7 +328,7 @@ func loggingMiddlewarePrivate(next http.Handler) http.Handler {
|
||||
path, _ := route.GetPathTemplate()
|
||||
startTime := time.Now()
|
||||
next.ServeHTTP(w, r)
|
||||
zap.S().Info(path, "\tprivatePort: true", "\ttimeTaken: ", time.Now().Sub(startTime))
|
||||
zap.L().Info(path+"\tprivatePort: true \ttimeTaken"+time.Now().Sub(startTime).String(), zap.Duration("timeTaken", time.Now().Sub(startTime)), zap.String("path", path), zap.Bool("tprivatePort", true))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -538,6 +577,9 @@ func (s *Server) Stop() error {
|
||||
s.ruleManager.Stop()
|
||||
}
|
||||
|
||||
// stop usage manager
|
||||
s.usageManager.Stop()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -3,30 +3,78 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"log"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strconv"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"go.opentelemetry.io/otel/sdk/resource"
|
||||
semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
|
||||
"go.signoz.io/signoz/ee/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
|
||||
"go.signoz.io/signoz/pkg/query-service/version"
|
||||
"google.golang.org/grpc"
|
||||
|
||||
zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder"
|
||||
zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
)
|
||||
|
||||
func initZapLog() *zap.Logger {
|
||||
func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
|
||||
config := zap.NewDevelopmentConfig()
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
|
||||
defer stop()
|
||||
|
||||
config.EncoderConfig.EncodeDuration = zapcore.StringDurationEncoder
|
||||
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
|
||||
consoleEncoder := zapcore.NewConsoleEncoder(config.EncoderConfig)
|
||||
defaultLogLevel := zapcore.DebugLevel
|
||||
config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
logger, _ := config.Build()
|
||||
|
||||
res := resource.NewWithAttributes(
|
||||
semconv.SchemaURL,
|
||||
semconv.ServiceNameKey.String("query-service"),
|
||||
)
|
||||
|
||||
core := zapcore.NewTee(
|
||||
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
|
||||
)
|
||||
|
||||
if enableQueryServiceLogOTLPExport == true {
|
||||
conn, err := grpc.DialContext(ctx, constants.OTLPTarget, grpc.WithBlock(), grpc.WithInsecure(), grpc.WithTimeout(time.Second*30))
|
||||
if err != nil {
|
||||
log.Println("failed to connect to otlp collector to export query service logs with error:", err)
|
||||
} else {
|
||||
logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize)
|
||||
if err != nil {
|
||||
logExportBatchSizeInt = 1000
|
||||
}
|
||||
ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{
|
||||
BatchSize: logExportBatchSizeInt,
|
||||
ResourceSchema: semconv.SchemaURL,
|
||||
Resource: res,
|
||||
}))
|
||||
core = zapcore.NewTee(
|
||||
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
|
||||
zapcore.NewCore(otlpEncoder, zapcore.NewMultiWriteSyncer(ws), defaultLogLevel),
|
||||
)
|
||||
}
|
||||
}
|
||||
logger := zap.New(core, zap.AddCaller(), zap.AddStacktrace(zapcore.ErrorLevel))
|
||||
|
||||
return logger
|
||||
}
|
||||
|
||||
func main() {
|
||||
var promConfigPath string
|
||||
var promConfigPath, skipTopLvlOpsPath string
|
||||
|
||||
// disables rule execution but allows change to the rule definition
|
||||
var disableRules bool
|
||||
@@ -34,12 +82,27 @@ func main() {
|
||||
// the url used to build link in the alert messages in slack and other systems
|
||||
var ruleRepoURL string
|
||||
|
||||
var enableQueryServiceLogOTLPExport bool
|
||||
var preferDelta bool
|
||||
var preferSpanMetrics bool
|
||||
|
||||
var maxIdleConns int
|
||||
var maxOpenConns int
|
||||
var dialTimeout time.Duration
|
||||
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
|
||||
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
|
||||
flag.Parse()
|
||||
|
||||
loggerMgr := initZapLog()
|
||||
loggerMgr := initZapLog(enableQueryServiceLogOTLPExport)
|
||||
zap.ReplaceGlobals(loggerMgr)
|
||||
defer loggerMgr.Sync() // flushes buffer, if any
|
||||
|
||||
@@ -47,11 +110,17 @@ func main() {
|
||||
version.PrintVersion()
|
||||
|
||||
serverOptions := &app.ServerOptions{
|
||||
HTTPHostPort: baseconst.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
PrivateHostPort: baseconst.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
RuleRepoURL: ruleRepoURL,
|
||||
HTTPHostPort: baseconst.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
SkipTopLvlOpsPath: skipTopLvlOpsPath,
|
||||
PreferDelta: preferDelta,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: baseconst.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
RuleRepoURL: ruleRepoURL,
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
}
|
||||
|
||||
// Read the jwt secret key
|
||||
@@ -85,6 +154,7 @@ func main() {
|
||||
logger.Info("Received HealthCheck status: ", zap.Int("status", int(status)))
|
||||
case <-signalsChannel:
|
||||
logger.Fatal("Received OS Interrupt Signal ... ")
|
||||
server.Stop()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,6 +60,41 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
UsageLimit: 5,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var ProPlan = basemodel.FeatureSet{
|
||||
@@ -105,6 +140,41 @@ var ProPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var EnterprisePlan = basemodel.FeatureSet{
|
||||
@@ -150,4 +220,39 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/go-co-op/gocron"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
@@ -28,9 +29,6 @@ const (
|
||||
)
|
||||
|
||||
var (
|
||||
// send usage every 24 hour
|
||||
uploadFrequency = 24 * time.Hour
|
||||
|
||||
locker = stateUnlocked
|
||||
)
|
||||
|
||||
@@ -39,12 +37,7 @@ type Manager struct {
|
||||
|
||||
licenseRepo *license.Repo
|
||||
|
||||
// end the usage routine, this is important to gracefully
|
||||
// stopping usage reporting and protect in-consistent updates
|
||||
done chan struct{}
|
||||
|
||||
// terminated waits for the UsageExporter go routine to end
|
||||
terminated chan struct{}
|
||||
scheduler *gocron.Scheduler
|
||||
}
|
||||
|
||||
func New(dbType string, db *sqlx.DB, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn) (*Manager, error) {
|
||||
@@ -53,6 +46,7 @@ func New(dbType string, db *sqlx.DB, licenseRepo *license.Repo, clickhouseConn c
|
||||
// repository: repo,
|
||||
clickhouseConn: clickhouseConn,
|
||||
licenseRepo: licenseRepo,
|
||||
scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
@@ -64,37 +58,30 @@ func (lm *Manager) Start() error {
|
||||
return fmt.Errorf("usage exporter is locked")
|
||||
}
|
||||
|
||||
go lm.UsageExporter(context.Background())
|
||||
_, err := lm.scheduler.Do(func() { lm.UploadUsage() })
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// upload usage once when starting the service
|
||||
lm.UploadUsage()
|
||||
|
||||
lm.scheduler.StartAsync()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) UsageExporter(ctx context.Context) {
|
||||
defer close(lm.terminated)
|
||||
|
||||
uploadTicker := time.NewTicker(uploadFrequency)
|
||||
defer uploadTicker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
case <-uploadTicker.C:
|
||||
lm.UploadUsage(ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (lm *Manager) UploadUsage(ctx context.Context) error {
|
||||
func (lm *Manager) UploadUsage() {
|
||||
ctx := context.Background()
|
||||
// check if license is present or not
|
||||
license, err := lm.licenseRepo.GetActiveLicense(context.Background())
|
||||
license, err := lm.licenseRepo.GetActiveLicense(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get active license")
|
||||
zap.S().Errorf("failed to get active license: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
if license == nil {
|
||||
// we will not start the usage reporting if license is not present.
|
||||
zap.S().Info("no license present, skipping usage reporting")
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
usages := []model.UsageDB{}
|
||||
@@ -120,7 +107,8 @@ func (lm *Manager) UploadUsage(ctx context.Context) error {
|
||||
dbusages := []model.UsageDB{}
|
||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||
return err
|
||||
zap.S().Errorf("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
for _, u := range dbusages {
|
||||
u.Type = db
|
||||
@@ -130,7 +118,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) error {
|
||||
|
||||
if len(usages) <= 0 {
|
||||
zap.S().Info("no snapshots to upload, skipping.")
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
zap.S().Info("uploading usage data")
|
||||
@@ -139,13 +127,15 @@ func (lm *Manager) UploadUsage(ctx context.Context) error {
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
if err != nil {
|
||||
return err
|
||||
zap.S().Errorf("error while decrypting usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
usageData := model.Usage{}
|
||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||
if err != nil {
|
||||
return err
|
||||
zap.S().Errorf("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
usageData.CollectorID = usage.CollectorID
|
||||
@@ -160,20 +150,16 @@ func (lm *Manager) UploadUsage(ctx context.Context) error {
|
||||
LicenseKey: key,
|
||||
Usage: usagesPayload,
|
||||
}
|
||||
err = lm.UploadUsageWithExponentalBackOff(ctx, payload)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
lm.UploadUsageWithExponentalBackOff(ctx, payload)
|
||||
}
|
||||
|
||||
func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload model.UsagePayload) error {
|
||||
func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload model.UsagePayload) {
|
||||
for i := 1; i <= MaxRetries; i++ {
|
||||
apiErr := licenseserver.SendUsage(ctx, payload)
|
||||
if apiErr != nil && i == MaxRetries {
|
||||
zap.S().Errorf("retries stopped : %v", zap.Error(apiErr))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return nil
|
||||
return
|
||||
} else if apiErr != nil {
|
||||
// sleeping for exponential backoff
|
||||
sleepDuration := RetryInterval * time.Duration(i)
|
||||
@@ -183,11 +169,14 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
|
||||
break
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) Stop() {
|
||||
close(lm.done)
|
||||
lm.scheduler.Stop()
|
||||
|
||||
zap.S().Debug("sending usage data before shutting down")
|
||||
// send usage before shutting down
|
||||
lm.UploadUsage()
|
||||
|
||||
atomic.StoreUint32(&locker, stateUnlocked)
|
||||
<-lm.terminated
|
||||
}
|
||||
|
||||
56
frontend/CONTRIBUTIONS.md
Normal file
56
frontend/CONTRIBUTIONS.md
Normal file
@@ -0,0 +1,56 @@
|
||||
# **Frontend Guidelines**
|
||||
|
||||
Embrace the spirit of collaboration and contribute to the success of our open-source project by adhering to these frontend development guidelines with precision and passion.
|
||||
|
||||
### React and Components
|
||||
|
||||
- Strive to create small and modular components, ensuring they are divided into individual pieces for improved maintainability and reusability.
|
||||
- Avoid passing inline objects or functions as props to React components, as they are recreated with each render cycle.
|
||||
Utilize careful memoization of functions and variables, balancing optimization efforts to prevent potential performance issues. [When to useMemo and useCallback](https://kentcdodds.com/blog/usememo-and-usecallback) by Kent C. Dodds is quite helpful for this scenario.
|
||||
- Minimize the use of inline functions whenever possible to enhance code readability and improve overall comprehension.
|
||||
- Employ the appropriate usage of useMemo and useCallback hooks for effective memoization of values and functions.
|
||||
- Determine the appropriate placement of components:
|
||||
- Pages should contain an aggregation of all components and containers.
|
||||
- Commonly used components should reside in the 'components' directory.
|
||||
- Parent components responsible for data manipulation should be placed in the 'container' directory.
|
||||
- Strategically decide where to store data, either in global state or local components:
|
||||
- Begin by storing data in local components and gradually transition to global state as necessary.
|
||||
- Avoid importing default namespace `React` as the project is using `v18` and `import React from 'react'` is not needed anymore.
|
||||
- When a function requires more than three arguments (except when memoized), encapsulate them within an object to enhance readability and reduce potential parameter complexity.
|
||||
|
||||
### API and Services
|
||||
|
||||
- Avoid incorporating business logic within API/Service files to maintain flexibility for consumers to handle it according to their specific needs.
|
||||
- Employ the use of the useQuery hook for fetching data and the useMutation hook for updating data, ensuring a consistent and efficient approach.
|
||||
- Utilize the useQueryClient hook when updating the cache, facilitating smooth and effective management of data within the application.
|
||||
|
||||
**Note -** In our project, we are utilizing React Query v3. To gain a comprehensive understanding of its features and implementation, we recommend referring to the [official documentation](https://tanstack.com/query/v3/docs/react/overview) as a valuable resource.
|
||||
|
||||
### Styling
|
||||
|
||||
- Refrain from using inline styling within React components to maintain separation of concerns and promote a more maintainable codebase.
|
||||
- Opt for using the rem unit instead of px values to ensure better scalability and responsiveness across different devices and screen sizes.
|
||||
|
||||
### Linting and Setup
|
||||
|
||||
- It is crucial to refrain from disabling ESLint and TypeScript errors within the project. If there is a specific rule that needs to be disabled, provide a clear and justified explanation for doing so. Maintaining the integrity of the linting and type-checking processes ensures code quality and consistency throughout the codebase.
|
||||
- In our project, we rely on several essential ESLint plugins, namely:
|
||||
- [plugin:@typescript-eslint](https://typescript-eslint.io/rules/)
|
||||
- [airbnb styleguide](https://github.com/airbnb/javascript)
|
||||
- [plugin:sonarjs](https://github.com/SonarSource/eslint-plugin-sonarjs)
|
||||
|
||||
To ensure compliance with our coding standards and best practices, we encourage you to refer to the documentation of these plugins. Familiarizing yourself with the ESLint rules they provide will help maintain code quality and consistency throughout the project.
|
||||
|
||||
### Naming Conventions
|
||||
|
||||
- Ensure that component names are written in Capital Case, while the folder names should be in lowercase.
|
||||
- Keep all other elements, such as variables, functions, and file names, in lowercase.
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Ensure that functions are modularized and follow the Single Responsibility Principle (SRP). The function's name should accurately convey its purpose and functionality.
|
||||
- Semantic division of functions into smaller units should be prioritized for improved readability and maintainability.
|
||||
Aim to keep functions concise and avoid exceeding a maximum length of 40 lines to enhance code understandability and ease of maintenance.
|
||||
- Eliminate the use of hard-coded strings or enums, favoring a more flexible and maintainable approach.
|
||||
- Strive to internationalize all strings within the codebase to support localization and improve accessibility for users across different languages.
|
||||
- Minimize the usage of multiple if statements or switch cases within a function. Consider creating a mapper and separating logic into multiple functions for better code organization.
|
||||
@@ -24,7 +24,7 @@ COPY . .
|
||||
RUN yarn build
|
||||
|
||||
|
||||
FROM nginx:1.18-alpine
|
||||
FROM nginx:1.24.0-alpine
|
||||
|
||||
COPY conf/default.conf /etc/nginx/conf.d/default.conf
|
||||
|
||||
|
||||
@@ -21,7 +21,9 @@ const config: Config.InitialOptions = {
|
||||
'^.+\\.(ts|tsx)?$': 'ts-jest',
|
||||
'^.+\\.(js|jsx)$': 'babel-jest',
|
||||
},
|
||||
transformIgnorePatterns: ['node_modules/(?!(lodash-es)/)'],
|
||||
transformIgnorePatterns: [
|
||||
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend)/)',
|
||||
],
|
||||
setupFilesAfterEnv: ['<rootDir>jest.setup.ts'],
|
||||
testPathIgnorePatterns: ['/node_modules/', '/public/'],
|
||||
moduleDirectories: ['node_modules', 'src'],
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
"chartjs-adapter-date-fns": "^2.0.0",
|
||||
"chartjs-plugin-annotation": "^1.4.0",
|
||||
"color": "^4.2.1",
|
||||
"color-alpha": "1.1.3",
|
||||
"cross-env": "^7.0.3",
|
||||
"css-loader": "4.3.0",
|
||||
"css-minimizer-webpack-plugin": "^3.2.0",
|
||||
@@ -53,6 +54,7 @@
|
||||
"dompurify": "3.0.0",
|
||||
"dotenv": "8.2.0",
|
||||
"event-source-polyfill": "1.0.31",
|
||||
"eventemitter3": "5.0.1",
|
||||
"file-loader": "6.1.1",
|
||||
"fontfaceobserver": "2.3.0",
|
||||
"history": "4.10.1",
|
||||
@@ -68,9 +70,14 @@
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"papaparse": "5.4.1",
|
||||
"react": "18.2.0",
|
||||
"react-addons-update": "15.6.3",
|
||||
"react-dnd": "16.0.1",
|
||||
"react-dnd-html5-backend": "16.0.1",
|
||||
"react-dom": "18.2.0",
|
||||
"react-drag-listview": "2.0.0",
|
||||
"react-force-graph": "^1.41.0",
|
||||
"react-grid-layout": "^1.3.4",
|
||||
"react-helmet-async": "1.3.0",
|
||||
"react-i18next": "^11.16.1",
|
||||
"react-intersection-observer": "9.4.1",
|
||||
"react-query": "^3.34.19",
|
||||
@@ -132,8 +139,10 @@
|
||||
"@types/node": "^16.10.3",
|
||||
"@types/papaparse": "5.3.7",
|
||||
"@types/react": "18.0.26",
|
||||
"@types/react-addons-update": "0.14.21",
|
||||
"@types/react-dom": "18.0.10",
|
||||
"@types/react-grid-layout": "^1.1.2",
|
||||
"@types/react-helmet-async": "1.0.3",
|
||||
"@types/react-redux": "^7.1.11",
|
||||
"@types/react-resizable": "3.0.3",
|
||||
"@types/react-router-dom": "^5.1.6",
|
||||
|
||||
@@ -1,112 +1,113 @@
|
||||
{
|
||||
"target_missing": "Please enter a threshold to proceed",
|
||||
"rule_test_fired": "Test notification sent successfully",
|
||||
"no_alerts_found": "No alerts found during the evaluation. This happens when rule condition is unsatisfied. You may adjust the rule threshold and retry.",
|
||||
"button_testrule": "Test Notification",
|
||||
"label_channel_select": "Notification Channels",
|
||||
"placeholder_channel_select": "select one or more channels",
|
||||
"channel_select_tooltip": "Leave empty to send this alert on all the configured channels",
|
||||
"preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.",
|
||||
"preview_chart_threshold_label": "Threshold",
|
||||
"placeholder_label_key_pair": "Click here to enter a label (key value pairs)",
|
||||
"button_yes": "Yes",
|
||||
"button_no": "No",
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
"confirm_save_content_part2": "query will be saved. Press OK to confirm.",
|
||||
"unexpected_error": "Sorry, an unexpected error occurred. Please contact your admin",
|
||||
"rule_created": "Rule created successfully",
|
||||
"rule_edited": "Rule edited successfully",
|
||||
"expression_missing": "expression is missing in {{where}}",
|
||||
"metricname_missing": "metric name is missing in {{where}}",
|
||||
"condition_required": "at least one metric condition is required",
|
||||
"alertname_required": "alert name is required",
|
||||
"promql_required": "promql expression is required when query format is set to PromQL",
|
||||
"chquery_required": "query is required when query format is set to ClickHouse",
|
||||
"button_savechanges": "Save Rule",
|
||||
"button_createrule": "Create Rule",
|
||||
"button_returntorules": "Return to rules",
|
||||
"button_cancelchanges": "Cancel",
|
||||
"button_discard": "Discard",
|
||||
"text_condition1": "Send a notification when the metric is",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_4hours": "4 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_equal": "is equal to",
|
||||
"option_notequal": "not equal to",
|
||||
"button_query": "Query",
|
||||
"button_formula": "Formula",
|
||||
"tab_qb": "Query Builder",
|
||||
"tab_promql": "PromQL",
|
||||
"tab_chquery": "ClickHouse Query",
|
||||
"title_confirm": "Confirm",
|
||||
"button_ok": "Yes",
|
||||
"button_cancel": "No",
|
||||
"field_promql_expr": "PromQL Expression",
|
||||
"field_alert_name": "Alert Name",
|
||||
"field_alert_desc": "Alert Description",
|
||||
"field_labels": "Labels",
|
||||
"field_severity": "Severity",
|
||||
"option_critical": "Critical",
|
||||
"option_error": "Error",
|
||||
"option_warning": "Warning",
|
||||
"option_info": "Info",
|
||||
"user_guide_headline": "Steps to create an Alert",
|
||||
"user_guide_qb_step1": "Step 1 - Define the metric",
|
||||
"user_guide_qb_step1a": "Choose a metric which you want to create an alert on",
|
||||
"user_guide_qb_step1b": "Filter it based on WHERE field or GROUPBY if needed",
|
||||
"user_guide_qb_step1c": "Apply an aggregatiion function like COUNT, SUM, etc. or choose NOOP to plot the raw metric",
|
||||
"user_guide_qb_step1d": "Create a formula based on Queries if needed",
|
||||
"user_guide_qb_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_qb_step2a": "Select the evaluation interval, threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_qb_step2b": "Enter the Alert threshold",
|
||||
"user_guide_qb_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_qb_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_qb_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_guide_pql_step1": "Step 1 - Define the metric",
|
||||
"user_guide_pql_step1a": "Write a PromQL query for the metric",
|
||||
"user_guide_pql_step1b": "Format the legends based on labels you want to highlight",
|
||||
"user_guide_pql_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_pql_step2a": "Select the threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_pql_step2b": "Enter the Alert threshold",
|
||||
"user_guide_pql_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_pql_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_pql_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_guide_ch_step1": "Step 1 - Define the metric",
|
||||
"user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial</0> to learn about query format and supported vars.",
|
||||
"user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart",
|
||||
"user_guide_ch_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_ch_step2b": "Enter the Alert threshold",
|
||||
"user_guide_ch_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_tooltip_more_help": "More details on how to create alerts",
|
||||
"choose_alert_type": "Choose a type for the alert:",
|
||||
"metric_based_alert": "Metric based Alert",
|
||||
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
|
||||
"log_based_alert": "Log-based Alert",
|
||||
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
|
||||
"traces_based_alert": "Trace-based Alert",
|
||||
"traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.",
|
||||
"exceptions_based_alert": "Exceptions-based Alert",
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data."
|
||||
}
|
||||
"target_missing": "Please enter a threshold to proceed",
|
||||
"rule_test_fired": "Test notification sent successfully",
|
||||
"no_alerts_found": "No alerts found during the evaluation. This happens when rule condition is unsatisfied. You may adjust the rule threshold and retry.",
|
||||
"button_testrule": "Test Notification",
|
||||
"label_channel_select": "Notification Channels",
|
||||
"placeholder_channel_select": "select one or more channels",
|
||||
"channel_select_tooltip": "Leave empty to send this alert on all the configured channels",
|
||||
"preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.",
|
||||
"preview_chart_threshold_label": "Threshold",
|
||||
"placeholder_label_key_pair": "Click here to enter a label (key value pairs)",
|
||||
"button_yes": "Yes",
|
||||
"button_no": "No",
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
"confirm_save_content_part2": "query will be saved. Press OK to confirm.",
|
||||
"unexpected_error": "Sorry, an unexpected error occurred. Please contact your admin",
|
||||
"rule_created": "Rule created successfully",
|
||||
"rule_edited": "Rule edited successfully",
|
||||
"expression_missing": "expression is missing in {{where}}",
|
||||
"metricname_missing": "metric name is missing in {{where}}",
|
||||
"condition_required": "at least one metric condition is required",
|
||||
"alertname_required": "alert name is required",
|
||||
"promql_required": "promql expression is required when query format is set to PromQL",
|
||||
"chquery_required": "query is required when query format is set to ClickHouse",
|
||||
"button_savechanges": "Save Rule",
|
||||
"button_createrule": "Create Rule",
|
||||
"button_returntorules": "Return to rules",
|
||||
"button_cancelchanges": "Cancel",
|
||||
"button_discard": "Discard",
|
||||
"text_condition1": "Send a notification when the metric is",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_4hours": "4 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_equal": "is equal to",
|
||||
"option_notequal": "not equal to",
|
||||
"button_query": "Query",
|
||||
"button_formula": "Formula",
|
||||
"tab_qb": "Query Builder",
|
||||
"tab_promql": "PromQL",
|
||||
"tab_chquery": "ClickHouse Query",
|
||||
"title_confirm": "Confirm",
|
||||
"button_ok": "Yes",
|
||||
"button_cancel": "No",
|
||||
"field_promql_expr": "PromQL Expression",
|
||||
"field_alert_name": "Alert Name",
|
||||
"field_alert_desc": "Alert Description",
|
||||
"field_labels": "Labels",
|
||||
"field_severity": "Severity",
|
||||
"option_critical": "Critical",
|
||||
"option_error": "Error",
|
||||
"option_warning": "Warning",
|
||||
"option_info": "Info",
|
||||
"user_guide_headline": "Steps to create an Alert",
|
||||
"user_guide_qb_step1": "Step 1 - Define the metric",
|
||||
"user_guide_qb_step1a": "Choose a metric which you want to create an alert on",
|
||||
"user_guide_qb_step1b": "Filter it based on WHERE field or GROUPBY if needed",
|
||||
"user_guide_qb_step1c": "Apply an aggregatiion function like COUNT, SUM, etc. or choose NOOP to plot the raw metric",
|
||||
"user_guide_qb_step1d": "Create a formula based on Queries if needed",
|
||||
"user_guide_qb_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_qb_step2a": "Select the evaluation interval, threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_qb_step2b": "Enter the Alert threshold",
|
||||
"user_guide_qb_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_qb_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_qb_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_guide_pql_step1": "Step 1 - Define the metric",
|
||||
"user_guide_pql_step1a": "Write a PromQL query for the metric",
|
||||
"user_guide_pql_step1b": "Format the legends based on labels you want to highlight",
|
||||
"user_guide_pql_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_pql_step2a": "Select the threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_pql_step2b": "Enter the Alert threshold",
|
||||
"user_guide_pql_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_pql_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_pql_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_guide_ch_step1": "Step 1 - Define the metric",
|
||||
"user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial</0> to learn about query format and supported vars.",
|
||||
"user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart",
|
||||
"user_guide_ch_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_ch_step2b": "Enter the Alert threshold",
|
||||
"user_guide_ch_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_tooltip_more_help": "More details on how to create alerts",
|
||||
"choose_alert_type": "Choose a type for the alert:",
|
||||
"metric_based_alert": "Metric based Alert",
|
||||
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
|
||||
"log_based_alert": "Log-based Alert",
|
||||
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
|
||||
"traces_based_alert": "Trace-based Alert",
|
||||
"traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.",
|
||||
"exceptions_based_alert": "Exceptions-based Alert",
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit"
|
||||
}
|
||||
|
||||
@@ -5,5 +5,9 @@
|
||||
"my_settings": "My Settings",
|
||||
"overview_metrics": "Overview Metrics",
|
||||
"dbcall_metrics": "Database Calls",
|
||||
"external_metrics": "External Calls"
|
||||
"external_metrics": "External Calls",
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
}
|
||||
|
||||
36
frontend/public/locales/en-GB/titles.json
Normal file
36
frontend/public/locales/en-GB/titles.json
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"SIGN_UP": "SigNoz | Sign Up",
|
||||
"LOGIN": "SigNoz | Login",
|
||||
"SERVICE_METRICS": "SigNoz | Service Metrics",
|
||||
"SERVICE_MAP": "SigNoz | Service Map",
|
||||
"TRACE": "SigNoz | Trace",
|
||||
"TRACE_DETAIL": "SigNoz | Trace Detail",
|
||||
"TRACES_EXPLORER": "SigNoz | Traces Explorer",
|
||||
"SETTINGS": "SigNoz | Settings",
|
||||
"INSTRUMENTATION": "SigNoz | Get Started",
|
||||
"USAGE_EXPLORER": "SigNoz | Usage Explorer",
|
||||
"APPLICATION": "SigNoz | Home",
|
||||
"ALL_DASHBOARD": "SigNoz | All Dashboards",
|
||||
"DASHBOARD": "SigNoz | Dashboard",
|
||||
"DASHBOARD_WIDGET": "SigNoz | Dashboard Widget",
|
||||
"EDIT_ALERTS": "SigNoz | Edit Alerts",
|
||||
"LIST_ALL_ALERT": "SigNoz | All Alerts",
|
||||
"ALERTS_NEW": "SigNoz | New Alert",
|
||||
"ALL_CHANNELS": "SigNoz | All Channels",
|
||||
"CHANNELS_NEW": "SigNoz | New Channel",
|
||||
"CHANNELS_EDIT": "SigNoz | Edit Channel",
|
||||
"ALL_ERROR": "SigNoz | All Errors",
|
||||
"ERROR_DETAIL": "SigNoz | Error Detail",
|
||||
"VERSION": "SigNoz | Version",
|
||||
"MY_SETTINGS": "SigNoz | My Settings",
|
||||
"ORG_SETTINGS": "SigNoz | Organization Settings",
|
||||
"SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong",
|
||||
"UN_AUTHORIZED": "SigNoz | Unauthorized",
|
||||
"NOT_FOUND": "SigNoz | Page Not Found",
|
||||
"LOGS": "SigNoz | Logs",
|
||||
"LOGS_EXPLORER": "SigNoz | Logs Explorer",
|
||||
"HOME_PAGE": "Open source Observability Platform | SigNoz",
|
||||
"PASSWORD_RESET": "SigNoz | Password Reset",
|
||||
"LIST_LICENSES": "SigNoz | List of Licenses",
|
||||
"DEFAULT": "Open source Observability Platform | SigNoz"
|
||||
}
|
||||
11
frontend/public/locales/en-GB/trace.json
Normal file
11
frontend/public/locales/en-GB/trace.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"options_menu": {
|
||||
"options": "Options",
|
||||
"format": "Format",
|
||||
"raw": "Raw",
|
||||
"default": "Default",
|
||||
"column": "Column",
|
||||
"maxLines": "Max lines per Row",
|
||||
"addColumn": "Add a column"
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,8 @@
|
||||
"routes": {
|
||||
"general": "General",
|
||||
"alert_channels": "Alert Channels",
|
||||
"all_errors": "All Exceptions"
|
||||
"all_errors": "All Exceptions",
|
||||
"index_fields": "Index Fields",
|
||||
"pipelines": "Pipelines"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,112 +1,113 @@
|
||||
{
|
||||
"target_missing": "Please enter a threshold to proceed",
|
||||
"rule_test_fired": "Test notification sent successfully",
|
||||
"no_alerts_found": "No alerts found during the evaluation. This happens when rule condition is unsatisfied. You may adjust the rule threshold and retry.",
|
||||
"button_testrule": "Test Notification",
|
||||
"label_channel_select": "Notification Channels",
|
||||
"placeholder_channel_select": "select one or more channels",
|
||||
"channel_select_tooltip": "Leave empty to send this alert on all the configured channels",
|
||||
"preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.",
|
||||
"preview_chart_threshold_label": "Threshold",
|
||||
"placeholder_label_key_pair": "Click here to enter a label (key value pairs)",
|
||||
"button_yes": "Yes",
|
||||
"button_no": "No",
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
"confirm_save_content_part2": "query will be saved. Press OK to confirm.",
|
||||
"unexpected_error": "Sorry, an unexpected error occurred. Please contact your admin",
|
||||
"rule_created": "Rule created successfully",
|
||||
"rule_edited": "Rule edited successfully",
|
||||
"expression_missing": "expression is missing in {{where}}",
|
||||
"metricname_missing": "metric name is missing in {{where}}",
|
||||
"condition_required": "at least one metric condition is required",
|
||||
"alertname_required": "alert name is required",
|
||||
"promql_required": "promql expression is required when query format is set to PromQL",
|
||||
"chquery_required": "query is required when query format is set to ClickHouse",
|
||||
"button_savechanges": "Save Rule",
|
||||
"button_createrule": "Create Rule",
|
||||
"button_returntorules": "Return to rules",
|
||||
"button_cancelchanges": "Cancel",
|
||||
"button_discard": "Discard",
|
||||
"text_condition1": "Send a notification when the metric is",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_4hours": "4 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_equal": "is equal to",
|
||||
"option_notequal": "not equal to",
|
||||
"button_query": "Query",
|
||||
"button_formula": "Formula",
|
||||
"tab_qb": "Query Builder",
|
||||
"tab_promql": "PromQL",
|
||||
"tab_chquery": "ClickHouse Query",
|
||||
"title_confirm": "Confirm",
|
||||
"button_ok": "Yes",
|
||||
"button_cancel": "No",
|
||||
"field_promql_expr": "PromQL Expression",
|
||||
"field_alert_name": "Alert Name",
|
||||
"field_alert_desc": "Alert Description",
|
||||
"field_labels": "Labels",
|
||||
"field_severity": "Severity",
|
||||
"option_critical": "Critical",
|
||||
"option_error": "Error",
|
||||
"option_warning": "Warning",
|
||||
"option_info": "Info",
|
||||
"user_guide_headline": "Steps to create an Alert",
|
||||
"user_guide_qb_step1": "Step 1 - Define the metric",
|
||||
"user_guide_qb_step1a": "Choose a metric which you want to create an alert on",
|
||||
"user_guide_qb_step1b": "Filter it based on WHERE field or GROUPBY if needed",
|
||||
"user_guide_qb_step1c": "Apply an aggregatiion function like COUNT, SUM, etc. or choose NOOP to plot the raw metric",
|
||||
"user_guide_qb_step1d": "Create a formula based on Queries if needed",
|
||||
"user_guide_qb_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_qb_step2a": "Select the evaluation interval, threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_qb_step2b": "Enter the Alert threshold",
|
||||
"user_guide_qb_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_qb_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_qb_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_guide_pql_step1": "Step 1 - Define the metric",
|
||||
"user_guide_pql_step1a": "Write a PromQL query for the metric",
|
||||
"user_guide_pql_step1b": "Format the legends based on labels you want to highlight",
|
||||
"user_guide_pql_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_pql_step2a": "Select the threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_pql_step2b": "Enter the Alert threshold",
|
||||
"user_guide_pql_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_pql_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_pql_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_guide_ch_step1": "Step 1 - Define the metric",
|
||||
"user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial</0> to learn about query format and supported vars.",
|
||||
"user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart",
|
||||
"user_guide_ch_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_ch_step2b": "Enter the Alert threshold",
|
||||
"user_guide_ch_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_tooltip_more_help": "More details on how to create alerts",
|
||||
"choose_alert_type": "Choose a type for the alert:",
|
||||
"metric_based_alert": "Metric based Alert",
|
||||
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
|
||||
"log_based_alert": "Log-based Alert",
|
||||
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
|
||||
"traces_based_alert": "Trace-based Alert",
|
||||
"traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.",
|
||||
"exceptions_based_alert": "Exceptions-based Alert",
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data."
|
||||
}
|
||||
"target_missing": "Please enter a threshold to proceed",
|
||||
"rule_test_fired": "Test notification sent successfully",
|
||||
"no_alerts_found": "No alerts found during the evaluation. This happens when rule condition is unsatisfied. You may adjust the rule threshold and retry.",
|
||||
"button_testrule": "Test Notification",
|
||||
"label_channel_select": "Notification Channels",
|
||||
"placeholder_channel_select": "select one or more channels",
|
||||
"channel_select_tooltip": "Leave empty to send this alert on all the configured channels",
|
||||
"preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.",
|
||||
"preview_chart_threshold_label": "Threshold",
|
||||
"placeholder_label_key_pair": "Click here to enter a label (key value pairs)",
|
||||
"button_yes": "Yes",
|
||||
"button_no": "No",
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
"confirm_save_content_part2": "query will be saved. Press OK to confirm.",
|
||||
"unexpected_error": "Sorry, an unexpected error occurred. Please contact your admin",
|
||||
"rule_created": "Rule created successfully",
|
||||
"rule_edited": "Rule edited successfully",
|
||||
"expression_missing": "expression is missing in {{where}}",
|
||||
"metricname_missing": "metric name is missing in {{where}}",
|
||||
"condition_required": "at least one metric condition is required",
|
||||
"alertname_required": "alert name is required",
|
||||
"promql_required": "promql expression is required when query format is set to PromQL",
|
||||
"chquery_required": "query is required when query format is set to ClickHouse",
|
||||
"button_savechanges": "Save Rule",
|
||||
"button_createrule": "Create Rule",
|
||||
"button_returntorules": "Return to rules",
|
||||
"button_cancelchanges": "Cancel",
|
||||
"button_discard": "Discard",
|
||||
"text_condition1": "Send a notification when the metric is",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_4hours": "4 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_equal": "is equal to",
|
||||
"option_notequal": "not equal to",
|
||||
"button_query": "Query",
|
||||
"button_formula": "Formula",
|
||||
"tab_qb": "Query Builder",
|
||||
"tab_promql": "PromQL",
|
||||
"tab_chquery": "ClickHouse Query",
|
||||
"title_confirm": "Confirm",
|
||||
"button_ok": "Yes",
|
||||
"button_cancel": "No",
|
||||
"field_promql_expr": "PromQL Expression",
|
||||
"field_alert_name": "Alert Name",
|
||||
"field_alert_desc": "Alert Description",
|
||||
"field_labels": "Labels",
|
||||
"field_severity": "Severity",
|
||||
"option_critical": "Critical",
|
||||
"option_error": "Error",
|
||||
"option_warning": "Warning",
|
||||
"option_info": "Info",
|
||||
"user_guide_headline": "Steps to create an Alert",
|
||||
"user_guide_qb_step1": "Step 1 - Define the metric",
|
||||
"user_guide_qb_step1a": "Choose a metric which you want to create an alert on",
|
||||
"user_guide_qb_step1b": "Filter it based on WHERE field or GROUPBY if needed",
|
||||
"user_guide_qb_step1c": "Apply an aggregatiion function like COUNT, SUM, etc. or choose NOOP to plot the raw metric",
|
||||
"user_guide_qb_step1d": "Create a formula based on Queries if needed",
|
||||
"user_guide_qb_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_qb_step2a": "Select the evaluation interval, threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_qb_step2b": "Enter the Alert threshold",
|
||||
"user_guide_qb_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_qb_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_qb_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_guide_pql_step1": "Step 1 - Define the metric",
|
||||
"user_guide_pql_step1a": "Write a PromQL query for the metric",
|
||||
"user_guide_pql_step1b": "Format the legends based on labels you want to highlight",
|
||||
"user_guide_pql_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_pql_step2a": "Select the threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_pql_step2b": "Enter the Alert threshold",
|
||||
"user_guide_pql_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_pql_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_pql_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_guide_ch_step1": "Step 1 - Define the metric",
|
||||
"user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial</0> to learn about query format and supported vars.",
|
||||
"user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart",
|
||||
"user_guide_ch_step2": "Step 2 - Define Alert Conditions",
|
||||
"user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value",
|
||||
"user_guide_ch_step2b": "Enter the Alert threshold",
|
||||
"user_guide_ch_step3": "Step 3 -Alert Configuration",
|
||||
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
|
||||
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
|
||||
"user_tooltip_more_help": "More details on how to create alerts",
|
||||
"choose_alert_type": "Choose a type for the alert:",
|
||||
"metric_based_alert": "Metric based Alert",
|
||||
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
|
||||
"log_based_alert": "Log-based Alert",
|
||||
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
|
||||
"traces_based_alert": "Trace-based Alert",
|
||||
"traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.",
|
||||
"exceptions_based_alert": "Exceptions-based Alert",
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit"
|
||||
}
|
||||
|
||||
44
frontend/public/locales/en/pipeline.json
Normal file
44
frontend/public/locales/en/pipeline.json
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"delete": "Delete",
|
||||
"filter": "Filter",
|
||||
"update": "Update",
|
||||
"create": "Create",
|
||||
"reorder": "Reorder",
|
||||
"cancel": "Cancel",
|
||||
"reorder_pipeline": "Do you want to reorder pipeline?",
|
||||
"reorder_pipeline_description": "Logs are processed sequentially in processors and pipelines. Reordering it may change how data is processed by them.",
|
||||
"delete_pipeline": "Do you want to delete pipeline",
|
||||
"delete_pipeline_description": "Logs are processed sequentially in processors and pipelines. Deleting a pipeline may change content of data processed by other pipelines & processors",
|
||||
"add_new_pipeline": "Add a New Pipeline",
|
||||
"new_pipeline": "New Pipeline",
|
||||
"enter_edit_mode": "Enter Edit Mode",
|
||||
"save_configuration": "Save Configuration",
|
||||
"edit_pipeline": "Edit Pipeline",
|
||||
"create_pipeline": "Create New Pipeline",
|
||||
"add_new_processor": "Add Processor",
|
||||
"edit_processor": "Edit Processor",
|
||||
"create_processor": "Create New Processor",
|
||||
"processor_type": "Select Processor Type",
|
||||
"reorder_processor": "Do you want to reorder processor?",
|
||||
"reorder_processor_description": "Logs are processed sequentially in processors. Reordering it may change how data is processed by them.",
|
||||
"delete_processor": "Do you want to delete processor",
|
||||
"delete_processor_description": "Logs are processed sequentially in processors. Deleting a processor may change content of data processed by other processors",
|
||||
"search_pipeline_placeholder": "Filter Pipelines",
|
||||
"pipeline_name_placeholder": "Name",
|
||||
"pipeline_tags_placeholder": "Tags",
|
||||
"pipeline_description_placeholder": "Enter description for your pipeline",
|
||||
"processor_name_placeholder": "Name",
|
||||
"processor_regex_placeholder": "Regex",
|
||||
"processor_parsefrom_placeholder": "Parse From",
|
||||
"processor_parseto_placeholder": "Parse From",
|
||||
"processor_onerror_placeholder": "on Error",
|
||||
"processor_pattern_placeholder": "Pattern",
|
||||
"processor_field_placeholder": "Field",
|
||||
"processor_value_placeholder": "Value",
|
||||
"processor_description_placeholder": "example rule: %{word:first}",
|
||||
"processor_trace_id_placeholder": "Trace Id Parce From",
|
||||
"processor_span_id_placeholder": "Span id Parse From",
|
||||
"processor_trace_flags_placeholder": "Trace flags parse from",
|
||||
"processor_from_placeholder": "From",
|
||||
"processor_to_placeholder": "To"
|
||||
}
|
||||
@@ -5,5 +5,9 @@
|
||||
"my_settings": "My Settings",
|
||||
"overview_metrics": "Overview Metrics",
|
||||
"dbcall_metrics": "Database Calls",
|
||||
"external_metrics": "External Calls"
|
||||
"external_metrics": "External Calls",
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
}
|
||||
|
||||
36
frontend/public/locales/en/titles.json
Normal file
36
frontend/public/locales/en/titles.json
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"SIGN_UP": "SigNoz | Sign Up",
|
||||
"LOGIN": "SigNoz | Login",
|
||||
"SERVICE_METRICS": "SigNoz | Service Metrics",
|
||||
"SERVICE_MAP": "SigNoz | Service Map",
|
||||
"TRACE": "SigNoz | Trace",
|
||||
"TRACE_DETAIL": "SigNoz | Trace Detail",
|
||||
"TRACES_EXPLORER": "SigNoz | Traces Explorer",
|
||||
"SETTINGS": "SigNoz | Settings",
|
||||
"INSTRUMENTATION": "SigNoz | Get Started",
|
||||
"USAGE_EXPLORER": "SigNoz | Usage Explorer",
|
||||
"APPLICATION": "SigNoz | Home",
|
||||
"ALL_DASHBOARD": "SigNoz | All Dashboards",
|
||||
"DASHBOARD": "SigNoz | Dashboard",
|
||||
"DASHBOARD_WIDGET": "SigNoz | Dashboard Widget",
|
||||
"EDIT_ALERTS": "SigNoz | Edit Alerts",
|
||||
"LIST_ALL_ALERT": "SigNoz | All Alerts",
|
||||
"ALERTS_NEW": "SigNoz | New Alert",
|
||||
"ALL_CHANNELS": "SigNoz | All Channels",
|
||||
"CHANNELS_NEW": "SigNoz | New Channel",
|
||||
"CHANNELS_EDIT": "SigNoz | Edit Channel",
|
||||
"ALL_ERROR": "SigNoz | All Errors",
|
||||
"ERROR_DETAIL": "SigNoz | Error Detail",
|
||||
"VERSION": "SigNoz | Version",
|
||||
"MY_SETTINGS": "SigNoz | My Settings",
|
||||
"ORG_SETTINGS": "SigNoz | Organization Settings",
|
||||
"SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong",
|
||||
"UN_AUTHORIZED": "SigNoz | Unauthorized",
|
||||
"NOT_FOUND": "SigNoz | Page Not Found",
|
||||
"LOGS": "SigNoz | Logs",
|
||||
"LOGS_EXPLORER": "SigNoz | Logs Explorer",
|
||||
"HOME_PAGE": "Open source Observability Platform | SigNoz",
|
||||
"PASSWORD_RESET": "SigNoz | Password Reset",
|
||||
"LIST_LICENSES": "SigNoz | List of Licenses",
|
||||
"DEFAULT": "Open source Observability Platform | SigNoz"
|
||||
}
|
||||
11
frontend/public/locales/en/trace.json
Normal file
11
frontend/public/locales/en/trace.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"options_menu": {
|
||||
"options": "Options",
|
||||
"format": "Format",
|
||||
"raw": "Raw",
|
||||
"default": "Default",
|
||||
"column": "Column",
|
||||
"maxLines": "Max lines per Row",
|
||||
"addColumn": "Add a column"
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,8 @@
|
||||
"routes": {
|
||||
"general": "General",
|
||||
"alert_channels": "Alert Channels",
|
||||
"all_errors": "All Exceptions"
|
||||
"all_errors": "All Exceptions",
|
||||
"index_fields": "Index Fields",
|
||||
"pipelines": "Pipelines"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import Loadable from 'components/Loadable';
|
||||
|
||||
export const ServicesTablePage = Loadable(
|
||||
() => import(/* webpackChunkName: "ServicesTablePage" */ 'pages/Metrics'),
|
||||
() => import(/* webpackChunkName: "ServicesTablePage" */ 'pages/Services'),
|
||||
);
|
||||
|
||||
export const ServiceMetricsPage = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "ServiceMetricsPage" */ 'pages/MetricApplication'
|
||||
/* webpackChunkName: "ServiceMetricsPage" */ 'pages/MetricsApplication'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -15,6 +15,11 @@ export const ServiceMapPage = Loadable(
|
||||
() => import(/* webpackChunkName: "ServiceMapPage" */ 'modules/Servicemap'),
|
||||
);
|
||||
|
||||
export const TracesExplorer = Loadable(
|
||||
() =>
|
||||
import(/* webpackChunkName: "Traces Explorer Page" */ 'pages/TracesExplorer'),
|
||||
);
|
||||
|
||||
export const TraceFilter = Loadable(
|
||||
() => import(/* webpackChunkName: "Trace Filter Page" */ 'pages/Trace'),
|
||||
);
|
||||
@@ -101,6 +106,10 @@ export const Logs = Loadable(
|
||||
() => import(/* webpackChunkName: "Logs" */ 'pages/Logs'),
|
||||
);
|
||||
|
||||
export const LogsExplorer = Loadable(
|
||||
() => import(/* webpackChunkName: "Logs Explorer" */ 'pages/LogsExplorer'),
|
||||
);
|
||||
|
||||
export const Login = Loadable(
|
||||
() => import(/* webpackChunkName: "Login" */ 'pages/Login'),
|
||||
);
|
||||
@@ -123,3 +132,12 @@ export const SomethingWentWrong = Loadable(
|
||||
export const LicensePage = Loadable(
|
||||
() => import(/* webpackChunkName: "All Channels" */ 'pages/License'),
|
||||
);
|
||||
|
||||
export const LogsIndexToFields = Loadable(
|
||||
() =>
|
||||
import(/* webpackChunkName: "LogsIndexToFields Page" */ 'pages/LogsSettings'),
|
||||
);
|
||||
|
||||
export const PipelinePage = Loadable(
|
||||
() => import(/* webpackChunkName: "Pipelines" */ 'pages/Pipelines'),
|
||||
);
|
||||
|
||||
@@ -16,10 +16,13 @@ import {
|
||||
ListAllALertsPage,
|
||||
Login,
|
||||
Logs,
|
||||
LogsExplorer,
|
||||
LogsIndexToFields,
|
||||
MySettings,
|
||||
NewDashboardPage,
|
||||
OrganizationSettings,
|
||||
PasswordReset,
|
||||
PipelinePage,
|
||||
ServiceMapPage,
|
||||
ServiceMetricsPage,
|
||||
ServicesTablePage,
|
||||
@@ -29,6 +32,7 @@ import {
|
||||
StatusPage,
|
||||
TraceDetail,
|
||||
TraceFilter,
|
||||
TracesExplorer,
|
||||
UnAuthorized,
|
||||
UsageExplorerPage,
|
||||
} from './pageComponents';
|
||||
@@ -41,6 +45,13 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: false,
|
||||
key: 'SIGN_UP',
|
||||
},
|
||||
{
|
||||
component: LogsIndexToFields,
|
||||
path: ROUTES.LOGS_INDEX_FIELDS,
|
||||
exact: true,
|
||||
isPrivate: true,
|
||||
key: 'LOGS_INDEX_FIELDS',
|
||||
},
|
||||
{
|
||||
component: ServicesTablePage,
|
||||
path: ROUTES.APPLICATION,
|
||||
@@ -139,6 +150,13 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'TRACE',
|
||||
},
|
||||
{
|
||||
path: ROUTES.TRACES_EXPLORER,
|
||||
exact: true,
|
||||
component: TracesExplorer,
|
||||
isPrivate: true,
|
||||
key: 'TRACES_EXPLORER',
|
||||
},
|
||||
{
|
||||
path: ROUTES.CHANNELS_NEW,
|
||||
exact: true,
|
||||
@@ -209,6 +227,13 @@ const routes: AppRoutes[] = [
|
||||
key: 'LOGS',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.LOGS_EXPLORER,
|
||||
exact: true,
|
||||
component: LogsExplorer,
|
||||
key: 'LOGS_EXPLORER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.LOGIN,
|
||||
exact: true,
|
||||
@@ -237,6 +262,13 @@ const routes: AppRoutes[] = [
|
||||
key: 'SOMETHING_WENT_WRONG',
|
||||
isPrivate: false,
|
||||
},
|
||||
{
|
||||
path: ROUTES.PIPELINES,
|
||||
exact: true,
|
||||
component: PipelinePage,
|
||||
key: 'PIPELINES',
|
||||
isPrivate: true,
|
||||
},
|
||||
];
|
||||
|
||||
export interface AppRoutes {
|
||||
|
||||
@@ -16,7 +16,7 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
|
||||
return {
|
||||
statusCode,
|
||||
payload: null,
|
||||
error: 'Not Found',
|
||||
error: data.errorType,
|
||||
message: null,
|
||||
};
|
||||
}
|
||||
|
||||
34
frontend/src/api/channels/createMsTeams.ts
Normal file
34
frontend/src/api/channels/createMsTeams.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createMsTeams';
|
||||
|
||||
const create = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/channels', {
|
||||
name: props.name,
|
||||
msteams_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
webhook_url: props.webhook_url,
|
||||
title: props.title,
|
||||
text: props.text,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default create;
|
||||
34
frontend/src/api/channels/editMsTeams.ts
Normal file
34
frontend/src/api/channels/editMsTeams.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/editMsTeams';
|
||||
|
||||
const editMsTeams = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.put(`/channels/${props.id}`, {
|
||||
name: props.name,
|
||||
msteams_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
webhook_url: props.webhook_url,
|
||||
title: props.title,
|
||||
text: props.text,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default editMsTeams;
|
||||
34
frontend/src/api/channels/testMsTeams.ts
Normal file
34
frontend/src/api/channels/testMsTeams.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/channels/createMsTeams';
|
||||
|
||||
const testMsTeams = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/testChannel', {
|
||||
name: props.name,
|
||||
msteams_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
webhook_url: props.webhook_url,
|
||||
title: props.title,
|
||||
text: props.text,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default testMsTeams;
|
||||
16
frontend/src/api/metrics/ApDex/apDexSettings.ts
Normal file
16
frontend/src/api/metrics/ApDex/apDexSettings.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import axios from 'api';
|
||||
import {
|
||||
ApDexPayloadAndSettingsProps,
|
||||
SetApDexPayloadProps,
|
||||
} from 'types/api/metrics/getApDex';
|
||||
|
||||
export const setApDexSettings = async ({
|
||||
servicename,
|
||||
threshold,
|
||||
excludeStatusCode,
|
||||
}: ApDexPayloadAndSettingsProps): Promise<SetApDexPayloadProps> =>
|
||||
axios.post('/settings/apdex', {
|
||||
servicename,
|
||||
threshold,
|
||||
excludeStatusCode,
|
||||
});
|
||||
8
frontend/src/api/metrics/ApDex/getApDexSettings.ts
Normal file
8
frontend/src/api/metrics/ApDex/getApDexSettings.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { ApDexPayloadAndSettingsProps } from 'types/api/metrics/getApDex';
|
||||
|
||||
export const getApDexSettings = (
|
||||
servicename: string,
|
||||
): Promise<AxiosResponse<ApDexPayloadAndSettingsProps[]>> =>
|
||||
axios.get(`/settings/apdex?services=${servicename}`);
|
||||
8
frontend/src/api/metrics/ApDex/getMetricMeta.ts
Normal file
8
frontend/src/api/metrics/ApDex/getMetricMeta.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { MetricMetaProps } from 'types/api/metrics/getApDex';
|
||||
|
||||
export const getMetricMeta = (
|
||||
metricName: string,
|
||||
): Promise<AxiosResponse<MetricMetaProps>> =>
|
||||
axios.get(`/metric_meta?metricName=${metricName}`);
|
||||
@@ -18,6 +18,7 @@ export const getMetricsQueryRange = async (
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
params: props,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
|
||||
@@ -1,28 +1,13 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getService';
|
||||
|
||||
const getService = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
const getService = async (props: Props): Promise<PayloadProps> => {
|
||||
const response = await axios.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
return response.data;
|
||||
};
|
||||
|
||||
export default getService;
|
||||
|
||||
@@ -1,30 +1,16 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getServiceOverview';
|
||||
|
||||
const getServiceOverview = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post(`/service/overview`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
step: props.step,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
const getServiceOverview = async (props: Props): Promise<PayloadProps> => {
|
||||
const response = await axios.post(`/service/overview`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
step: props.step,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
return response.data;
|
||||
};
|
||||
|
||||
export default getServiceOverview;
|
||||
|
||||
@@ -1,24 +1,12 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getTopLevelOperations';
|
||||
|
||||
const getTopLevelOperations = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post(`/service/top_level_operations`);
|
||||
const getTopLevelOperations = async (): Promise<ServiceDataProps> => {
|
||||
const response = await axios.post(`/service/top_level_operations`);
|
||||
return response.data;
|
||||
};
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data[props.service],
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
export type ServiceDataProps = {
|
||||
[serviceName: string]: string[];
|
||||
};
|
||||
|
||||
export default getTopLevelOperations;
|
||||
|
||||
@@ -1,29 +1,15 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
|
||||
|
||||
const getTopOperations = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post(`/service/top_operations`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
const getTopOperations = async (props: Props): Promise<PayloadProps> => {
|
||||
const response = await axios.post(`/service/top_operations`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
return response.data;
|
||||
};
|
||||
|
||||
export default getTopOperations;
|
||||
|
||||
25
frontend/src/api/pipeline/get.ts
Normal file
25
frontend/src/api/pipeline/get.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { Pipeline } from 'types/api/pipeline/def';
|
||||
import { Props } from 'types/api/pipeline/get';
|
||||
|
||||
const get = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<Pipeline> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(`/logs/pipelines/${props.version}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response?.data?.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default get;
|
||||
25
frontend/src/api/pipeline/post.ts
Normal file
25
frontend/src/api/pipeline/post.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { Pipeline } from 'types/api/pipeline/def';
|
||||
import { Props } from 'types/api/pipeline/post';
|
||||
|
||||
const post = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<Pipeline> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/logs/pipelines', props.data);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default post;
|
||||
@@ -9,9 +9,9 @@ const loginPrecheck = async (
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`/loginPrecheck?email=${props.email}&ref=${encodeURIComponent(
|
||||
window.location.href,
|
||||
)}`,
|
||||
`/loginPrecheck?email=${encodeURIComponent(
|
||||
props.email,
|
||||
)}&ref=${encodeURIComponent(window.location.href)}`,
|
||||
);
|
||||
|
||||
return {
|
||||
|
||||
18
frontend/src/assets/Dashboard/Table.tsx
Normal file
18
frontend/src/assets/Dashboard/Table.tsx
Normal file
@@ -0,0 +1,18 @@
|
||||
function Table(): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
width="48"
|
||||
height="48"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M41.0667 0H6.39993C2.87982 0 0 2.87982 0 6.39993V41.6001C0 45.1202 2.87982 48 6.39993 48H41.0667C44.5868 48 47.4667 45.1202 47.4667 41.6001V6.39993C47.4667 2.87982 44.5868 0 41.0667 0ZM44.2669 6.39993V9.60013H32.0002V3.2002H41.0668C42.8268 3.2002 44.267 4.63992 44.267 6.40003L44.2669 6.39993ZM17.6002 9.60013V3.2002H29.8669V9.60013H17.6002ZM29.8669 11.7333V44.8001H17.6002L17.6005 11.7333H29.8669ZM6.40012 3.20011H15.4667V9.60004H3.20001V6.39984C3.20001 4.63983 4.64011 3.20001 6.40022 3.20001L6.40012 3.20011ZM3.19992 41.6003V11.7335H15.4666V44.8003H6.40003C4.64002 44.8003 3.19982 43.3606 3.19982 41.6005L3.19992 41.6003ZM41.0667 44.8001H32.0001V11.7333H44.2668V41.6001C44.2668 43.3601 42.8267 44.7999 41.0666 44.7999L41.0667 44.8001ZM5.33326 18.6666C5.33326 18.08 5.81317 17.6001 6.39983 17.6001H12.2667C12.8534 17.6001 13.3333 18.08 13.3333 18.6666C13.3333 19.2533 12.8534 19.7332 12.2667 19.7332H6.39983C5.81315 19.7332 5.33326 19.2533 5.33326 18.6666ZM13.3333 25.0666C13.3333 25.6533 12.8534 26.1332 12.2667 26.1332H6.39983C5.81315 26.1332 5.33326 25.6532 5.33326 25.0666C5.33326 24.4799 5.81317 24 6.39983 24H12.2667C12.8534 24 13.3333 24.4799 13.3333 25.0666ZM13.3333 31.4665C13.3333 32.0532 12.8534 32.5331 12.2667 32.5331H6.39983C5.81315 32.5331 5.33326 32.0532 5.33326 31.4665C5.33326 30.8798 5.81317 30.3999 6.39983 30.3999H12.2667C12.8534 30.3999 13.3333 30.8798 13.3333 31.4665ZM13.3333 37.8668C13.3333 38.4535 12.8534 38.9334 12.2667 38.9334H6.39983C5.81315 38.9334 5.33326 38.4535 5.33326 37.8668C5.33326 37.2801 5.81317 36.8002 6.39983 36.8002H12.2667C12.8534 36.7999 13.3333 37.2802 13.3333 37.8668ZM19.7332 18.6667C19.7332 18.0801 20.2131 17.6002 20.7998 17.6002H26.6667C27.2534 17.6002 27.7333 18.0801 27.7333 18.6667C27.7333 19.2534 27.2533 19.7333 26.6667 19.7333H20.7998C20.2131 19.7333 19.7332 19.2534 19.7332 18.6667ZM19.7332 25.0667C19.7332 24.48 20.2131 24.0001 20.7998 24.0001H26.6667C27.2534 24.0001 27.7333 24.48 27.7333 25.0667C27.7333 25.6534 27.2533 26.1332 26.6667 26.1332H20.7998C20.2131 26.1332 19.7332 25.6533 19.7332 25.0667ZM19.7332 31.4666C19.7332 30.8799 20.2131 30.4 20.7998 30.4H26.6667C27.2534 30.4 27.7333 30.8799 27.7333 31.4666C27.7333 32.0533 27.2533 32.5332 26.6667 32.5332H20.7998C20.2131 32.5336 19.7332 32.0533 19.7332 31.4666ZM27.7333 37.8669C27.7333 38.4536 27.2533 38.9335 26.6667 38.9335H20.7998C20.2131 38.9335 19.7332 38.4536 19.7332 37.8669C19.7332 37.2802 20.2131 36.8003 20.7998 36.8003H26.6667C27.2534 36.8 27.7333 37.2803 27.7333 37.8669ZM42.1333 18.6668C42.1333 19.2535 41.6534 19.7334 41.0667 19.7334H35.1999C34.6132 19.7334 34.1333 19.2535 34.1333 18.6668C34.1333 18.0802 34.6132 17.6003 35.1999 17.6003H41.0667C41.6534 17.6003 42.1333 18.0802 42.1333 18.6668ZM42.1333 25.0668C42.1333 25.6535 41.6534 26.1333 41.0667 26.1333H35.1999C34.6132 26.1333 34.1333 25.6534 34.1333 25.0668C34.1333 24.4801 34.6132 24.0002 35.1999 24.0002H41.0667C41.6534 24.0002 42.1333 24.4801 42.1333 25.0668ZM42.1333 31.4667C42.1333 32.0534 41.6534 32.5333 41.0667 32.5333H35.1999C34.6132 32.5333 34.1333 32.0534 34.1333 31.4667C34.1333 30.88 34.6132 30.4001 35.1999 30.4001H41.0667C41.6534 30.4001 42.1333 30.88 42.1333 31.4667ZM42.1333 37.867C42.1333 38.4537 41.6534 38.9336 41.0667 38.9336H35.1999C34.6132 38.9336 34.1333 38.4537 34.1333 37.867C34.1333 37.2803 34.6132 36.8004 35.1999 36.8004H41.0667C41.6534 36.8001 42.1333 37.2803 42.1333 37.867Z"
|
||||
fill="#1668DC"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
export default Table;
|
||||
File diff suppressed because one or more lines are too long
@@ -1,26 +1,22 @@
|
||||
import { CSSProperties } from 'react';
|
||||
|
||||
function Value(props: ValueProps): JSX.Element {
|
||||
const { fillColor } = props;
|
||||
|
||||
function Value(): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
width="78"
|
||||
height="32"
|
||||
viewBox="0 0 78 32"
|
||||
width="68"
|
||||
height="48"
|
||||
viewBox="0 0 68 48"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M15.0215 17.875C14.2285 18.8184 13.2783 19.5771 12.1709 20.1514C11.0771 20.7256 9.87402 21.0127 8.56152 21.0127C6.83887 21.0127 5.33496 20.5889 4.0498 19.7412C2.77832 18.8936 1.79395 17.7041 1.09668 16.1729C0.399414 14.6279 0.0507812 12.9258 0.0507812 11.0664C0.0507812 9.07031 0.426758 7.27246 1.17871 5.67285C1.94434 4.07324 3.02441 2.84961 4.41895 2.00195C5.81348 1.1543 7.44043 0.730469 9.2998 0.730469C12.2529 0.730469 14.5771 1.83789 16.2725 4.05273C17.9814 6.25391 18.8359 9.26172 18.8359 13.0762V14.1836C18.8359 19.9941 17.6875 24.2393 15.3906 26.9189C13.0938 29.585 9.62793 30.9521 4.99316 31.0205H4.25488V27.8213H5.05469C8.18555 27.7666 10.5918 26.9531 12.2734 25.3809C13.9551 23.7949 14.8711 21.293 15.0215 17.875ZM9.17676 17.875C10.4482 17.875 11.6172 17.4854 12.6836 16.7061C13.7637 15.9268 14.5498 14.9629 15.042 13.8145V12.2969C15.042 9.80859 14.502 7.78516 13.4219 6.22656C12.3418 4.66797 10.9746 3.88867 9.32031 3.88867C7.65234 3.88867 6.3125 4.53125 5.30078 5.81641C4.28906 7.08789 3.7832 8.76953 3.7832 10.8613C3.7832 12.8984 4.26855 14.5801 5.23926 15.9062C6.22363 17.2188 7.53613 17.875 9.17676 17.875ZM24.5371 29.0107C24.5371 28.3545 24.7285 27.8076 25.1113 27.3701C25.5078 26.9326 26.0957 26.7139 26.875 26.7139C27.6543 26.7139 28.2422 26.9326 28.6387 27.3701C29.0488 27.8076 29.2539 28.3545 29.2539 29.0107C29.2539 29.6396 29.0488 30.166 28.6387 30.5898C28.2422 31.0137 27.6543 31.2256 26.875 31.2256C26.0957 31.2256 25.5078 31.0137 25.1113 30.5898C24.7285 30.166 24.5371 29.6396 24.5371 29.0107ZM51.1562 20.9717H55.2988V24.0684H51.1562V31H47.3418V24.0684H33.7451V21.833L47.1162 1.14062H51.1562V20.9717ZM38.0518 20.9717H47.3418V6.3291L46.8906 7.14941L38.0518 20.9717ZM73.6123 1.12012V4.33984H72.915C69.9619 4.39453 67.6104 5.26953 65.8604 6.96484C64.1104 8.66016 63.0986 11.0459 62.8252 14.1221C64.3975 12.3174 66.5439 11.415 69.2646 11.415C71.8623 11.415 73.9336 12.3311 75.4785 14.1631C77.0371 15.9951 77.8164 18.3604 77.8164 21.2588C77.8164 24.335 76.9756 26.7959 75.2939 28.6416C73.626 30.4873 71.3838 31.4102 68.5674 31.4102C65.71 31.4102 63.3926 30.3164 61.6152 28.1289C59.8379 25.9277 58.9492 23.0977 58.9492 19.6387V18.1826C58.9492 12.6865 60.1182 8.48926 62.4561 5.59082C64.8076 2.67871 68.3008 1.18848 72.9355 1.12012H73.6123ZM68.6289 14.5732C67.3301 14.5732 66.1338 14.9629 65.04 15.7422C63.9463 16.5215 63.1875 17.499 62.7637 18.6748V20.0693C62.7637 22.5303 63.3174 24.5127 64.4248 26.0166C65.5322 27.5205 66.9131 28.2725 68.5674 28.2725C70.2764 28.2725 71.6162 27.6436 72.5869 26.3857C73.5713 25.1279 74.0635 23.4805 74.0635 21.4434C74.0635 19.3926 73.5645 17.7383 72.5664 16.4805C71.582 15.209 70.2695 14.5732 68.6289 14.5732Z"
|
||||
fill={fillColor}
|
||||
d="M0 5.914V42.086C0 43.6542 0.62289 45.1585 1.73183 46.2675C2.84078 47.3771 4.34511 48 5.91329 48H61.5019C63.0701 48 64.5744 47.3771 65.6834 46.2675C66.7923 45.1585 67.4152 43.6542 67.4152 42.086V5.914C67.4152 4.34578 66.7923 2.84152 65.6834 1.73253C64.5744 0.623576 63.0701 0 61.5019 0H5.91329C4.34508 0 2.84082 0.623576 1.73183 1.73253C0.622872 2.84149 0 4.34581 0 5.914ZM63.4735 5.914V42.086C63.4735 42.6092 63.2659 43.1104 62.896 43.4803C62.5261 43.8495 62.0249 44.0571 61.5024 44.0571H5.91382C4.82549 44.0571 3.94277 43.175 3.94277 42.086V5.91403C3.94277 4.8257 4.82553 3.94298 5.91382 3.94298H61.5024C62.0249 3.94298 62.5261 4.15061 62.896 4.52048C63.2659 4.88968 63.4735 5.39148 63.4735 5.914Z"
|
||||
fill="#1554AD"
|
||||
/>
|
||||
<path
|
||||
d="M13.7695 17.668C10.1016 17.668 7.48828 20.1758 7.48828 23.6094V23.6328C7.48828 26.8438 9.76172 29.2109 13.0078 29.2109C15.3281 29.2109 16.8047 28.0273 17.4258 26.6914H17.6602C17.6602 26.8203 17.6484 26.9492 17.6484 27.0781C17.5195 30.3125 16.3828 32.9375 13.6992 32.9375C12.2109 32.9375 11.168 32.1641 10.7227 30.9805L10.6875 30.8633H7.71094L7.73438 30.9922C8.27344 33.582 10.5938 35.4219 13.6992 35.4219C17.9531 35.4219 20.5195 32.0469 20.5195 26.3516V26.3281C20.5195 20.2344 17.3789 17.668 13.7695 17.668ZM13.7578 26.8906C11.8359 26.8906 10.4414 25.4844 10.4414 23.5273V23.5039C10.4414 21.6172 11.9297 20.1289 13.793 20.1289C15.668 20.1289 17.1328 21.6406 17.1328 23.5742V23.5977C17.1328 25.5078 15.668 26.8906 13.7578 26.8906ZM24.832 35.2344C25.9102 35.2344 26.6953 34.4258 26.6953 33.3828C26.6953 32.3398 25.9102 31.5312 24.832 31.5312C23.7656 31.5312 22.9688 32.3398 22.9688 33.3828C22.9688 34.4258 23.7656 35.2344 24.832 35.2344ZM37.8633 35H40.7578V31.7539H43.0312V29.2578H40.7578V18.0898H36.4805C34.1836 21.582 31.7812 25.4727 29.5898 29.2812V31.7539H37.8633V35ZM32.4023 29.3281V29.1523C34.043 26.2812 36 23.1523 37.7344 20.5039H37.9102V29.3281H32.4023ZM52.6406 35.4219C56.3086 35.4219 58.9219 32.9141 58.9219 29.4805V29.457C58.9219 26.2461 56.6484 23.8789 53.4023 23.8789C51.082 23.8789 49.6055 25.0625 48.9844 26.3984H48.75C48.75 26.2695 48.7617 26.1406 48.7617 26.0117C48.8906 22.7773 50.0273 20.1523 52.7109 20.1523C54.1992 20.1523 55.2422 20.9258 55.6875 22.1094L55.7344 22.2266H58.6992L58.6758 22.0977C58.1367 19.5078 55.8164 17.668 52.7109 17.668C48.457 17.668 45.8906 21.043 45.8906 26.7383V26.7617C45.8906 32.8555 49.0312 35.4219 52.6406 35.4219ZM49.2773 29.5156V29.4922C49.2773 27.582 50.7422 26.1992 52.6523 26.1992C54.5742 26.1992 55.9688 27.6055 55.9688 29.5625V29.5859C55.9688 31.4727 54.4922 32.9609 52.6172 32.9609C50.7422 32.9609 49.2773 31.4492 49.2773 29.5156Z"
|
||||
fill="#1668DC"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
interface ValueProps {
|
||||
fillColor: CSSProperties['color'];
|
||||
}
|
||||
|
||||
export default Value;
|
||||
|
||||
54
frontend/src/components/DraggableTableRow/index.tsx
Normal file
54
frontend/src/components/DraggableTableRow/index.tsx
Normal file
@@ -0,0 +1,54 @@
|
||||
import React, { useCallback, useRef } from 'react';
|
||||
import { useDrag, useDrop } from 'react-dnd';
|
||||
|
||||
import { dragHandler, dropHandler } from './utils';
|
||||
|
||||
const type = 'DraggableTableRow';
|
||||
|
||||
function DraggableTableRow({
|
||||
index,
|
||||
moveRow,
|
||||
className,
|
||||
style,
|
||||
...restProps
|
||||
}: DraggableTableRowProps): JSX.Element {
|
||||
const ref = useRef<HTMLTableRowElement>(null);
|
||||
|
||||
const handleDrop = useCallback(
|
||||
(item: { index: number }) => {
|
||||
if (moveRow) moveRow(item.index, index);
|
||||
},
|
||||
[moveRow, index],
|
||||
);
|
||||
|
||||
const [, drop] = useDrop({
|
||||
accept: type,
|
||||
collect: dropHandler,
|
||||
drop: handleDrop,
|
||||
});
|
||||
|
||||
const [, drag] = useDrag({
|
||||
type,
|
||||
item: { index },
|
||||
collect: dragHandler,
|
||||
});
|
||||
drop(drag(ref));
|
||||
|
||||
return (
|
||||
<tr
|
||||
ref={ref}
|
||||
className={className}
|
||||
style={{ ...style }}
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...restProps}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
interface DraggableTableRowProps
|
||||
extends React.HTMLAttributes<HTMLTableRowElement> {
|
||||
index: number;
|
||||
moveRow: (dragIndex: number, hoverIndex: number) => void;
|
||||
}
|
||||
|
||||
export default DraggableTableRow;
|
||||
@@ -0,0 +1,38 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import { Table } from 'antd';
|
||||
import { matchMedia } from 'container/PipelinePage/tests/AddNewPipeline.test';
|
||||
import { I18nextProvider } from 'react-i18next';
|
||||
import { Provider } from 'react-redux';
|
||||
import i18n from 'ReactI18';
|
||||
import store from 'store';
|
||||
|
||||
import DraggableTableRow from '..';
|
||||
|
||||
beforeAll(() => {
|
||||
matchMedia();
|
||||
});
|
||||
|
||||
jest.mock('react-dnd', () => ({
|
||||
useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
}));
|
||||
|
||||
describe('DraggableTableRow Snapshot test', () => {
|
||||
it('should render DraggableTableRow', async () => {
|
||||
const { asFragment } = render(
|
||||
<Provider store={store}>
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<Table
|
||||
components={{
|
||||
body: {
|
||||
row: DraggableTableRow,
|
||||
},
|
||||
}}
|
||||
pagination={false}
|
||||
/>
|
||||
</I18nextProvider>
|
||||
</Provider>,
|
||||
);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,103 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`DraggableTableRow Snapshot test should render DraggableTableRow 1`] = `
|
||||
<DocumentFragment>
|
||||
<div
|
||||
class="ant-table-wrapper css-dev-only-do-not-override-1i536d8"
|
||||
>
|
||||
<div
|
||||
class="ant-spin-nested-loading css-dev-only-do-not-override-1i536d8"
|
||||
>
|
||||
<div
|
||||
class="ant-spin-container"
|
||||
>
|
||||
<div
|
||||
class="ant-table ant-table-empty"
|
||||
>
|
||||
<div
|
||||
class="ant-table-container"
|
||||
>
|
||||
<div
|
||||
class="ant-table-content"
|
||||
>
|
||||
<table
|
||||
style="table-layout: auto;"
|
||||
>
|
||||
<colgroup />
|
||||
<thead
|
||||
class="ant-table-thead"
|
||||
>
|
||||
<tr>
|
||||
<th
|
||||
class="ant-table-cell"
|
||||
/>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody
|
||||
class="ant-table-tbody"
|
||||
>
|
||||
<tr
|
||||
class="ant-table-placeholder"
|
||||
>
|
||||
<td
|
||||
class="ant-table-cell"
|
||||
>
|
||||
<div
|
||||
class="css-dev-only-do-not-override-1i536d8 ant-empty ant-empty-normal"
|
||||
>
|
||||
<div
|
||||
class="ant-empty-image"
|
||||
>
|
||||
<svg
|
||||
height="41"
|
||||
viewBox="0 0 64 41"
|
||||
width="64"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<g
|
||||
fill="none"
|
||||
fill-rule="evenodd"
|
||||
transform="translate(0 1)"
|
||||
>
|
||||
<ellipse
|
||||
cx="32"
|
||||
cy="33"
|
||||
fill="#f5f5f5"
|
||||
rx="32"
|
||||
ry="7"
|
||||
/>
|
||||
<g
|
||||
fill-rule="nonzero"
|
||||
stroke="#d9d9d9"
|
||||
>
|
||||
<path
|
||||
d="M55 12.76L44.854 1.258C44.367.474 43.656 0 42.907 0H21.093c-.749 0-1.46.474-1.947 1.257L9 12.761V22h46v-9.24z"
|
||||
/>
|
||||
<path
|
||||
d="M41.613 15.931c0-1.605.994-2.93 2.227-2.931H55v18.137C55 33.26 53.68 35 52.05 35h-40.1C10.32 35 9 33.259 9 31.137V13h11.16c1.233 0 2.227 1.323 2.227 2.928v.022c0 1.605 1.005 2.901 2.237 2.901h14.752c1.232 0 2.237-1.308 2.237-2.913v-.007z"
|
||||
fill="#fafafa"
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
</div>
|
||||
<div
|
||||
class="ant-empty-description"
|
||||
>
|
||||
No data
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`PipelinePage container test should render AddNewPipeline section 1`] = `<DocumentFragment />`;
|
||||
@@ -0,0 +1,44 @@
|
||||
import { dragHandler, dropHandler } from '../utils';
|
||||
|
||||
jest.mock('react-dnd', () => ({
|
||||
useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
}));
|
||||
|
||||
describe('Utils testing of DraggableTableRow component', () => {
|
||||
test('Should dropHandler return true', () => {
|
||||
const monitor = {
|
||||
isOver: jest.fn().mockReturnValueOnce(true),
|
||||
} as never;
|
||||
const dropDataTruthy = dropHandler(monitor);
|
||||
|
||||
expect(dropDataTruthy).toEqual({ isOver: true });
|
||||
});
|
||||
|
||||
test('Should dropHandler return false', () => {
|
||||
const monitor = {
|
||||
isOver: jest.fn().mockReturnValueOnce(false),
|
||||
} as never;
|
||||
const dropDataFalsy = dropHandler(monitor);
|
||||
|
||||
expect(dropDataFalsy).toEqual({ isOver: false });
|
||||
});
|
||||
|
||||
test('Should dragHandler return true', () => {
|
||||
const monitor = {
|
||||
isDragging: jest.fn().mockReturnValueOnce(true),
|
||||
} as never;
|
||||
const dragDataTruthy = dragHandler(monitor);
|
||||
|
||||
expect(dragDataTruthy).toEqual({ isDragging: true });
|
||||
});
|
||||
|
||||
test('Should dragHandler return false', () => {
|
||||
const monitor = {
|
||||
isDragging: jest.fn().mockReturnValueOnce(false),
|
||||
} as never;
|
||||
const dragDataFalsy = dragHandler(monitor);
|
||||
|
||||
expect(dragDataFalsy).toEqual({ isDragging: false });
|
||||
});
|
||||
});
|
||||
15
frontend/src/components/DraggableTableRow/utils.ts
Normal file
15
frontend/src/components/DraggableTableRow/utils.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { DragSourceMonitor, DropTargetMonitor } from 'react-dnd';
|
||||
|
||||
export function dropHandler(monitor: DropTargetMonitor): { isOver: boolean } {
|
||||
return {
|
||||
isOver: monitor.isOver(),
|
||||
};
|
||||
}
|
||||
|
||||
export function dragHandler(
|
||||
monitor: DragSourceMonitor,
|
||||
): { isDragging: boolean } {
|
||||
return {
|
||||
isDragging: monitor.isDragging(),
|
||||
};
|
||||
}
|
||||
27
frontend/src/components/ExplorerCard/index.tsx
Normal file
27
frontend/src/components/ExplorerCard/index.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Card, Space, Typography } from 'antd';
|
||||
import TextToolTip from 'components/TextToolTip';
|
||||
|
||||
function ExplorerCard({ children }: Props): JSX.Element {
|
||||
return (
|
||||
<Card
|
||||
size="small"
|
||||
title={
|
||||
<Space>
|
||||
<Typography>Query Builder</Typography>
|
||||
<TextToolTip
|
||||
url="https://signoz.io/docs/userguide/query-builder/?utm_source=product&utm_medium=new-query-builder"
|
||||
text="More details on how to use query builder"
|
||||
/>
|
||||
</Space>
|
||||
}
|
||||
>
|
||||
{children}
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
interface Props {
|
||||
children: React.ReactNode;
|
||||
}
|
||||
|
||||
export default ExplorerCard;
|
||||
@@ -1,6 +1,8 @@
|
||||
import { Chart, ChartType, Plugin } from 'chart.js';
|
||||
import { Events } from 'constants/events';
|
||||
import { colors } from 'lib/getRandomColor';
|
||||
import { get } from 'lodash-es';
|
||||
import { eventEmitter } from 'utils/getEventEmitter';
|
||||
|
||||
const getOrCreateLegendList = (
|
||||
chart: Chart,
|
||||
@@ -19,6 +21,7 @@ const getOrCreateLegendList = (
|
||||
listContainer.style.overflowY = 'scroll';
|
||||
listContainer.style.justifyContent = isLonger ? 'start' : 'center';
|
||||
listContainer.style.alignItems = isLonger ? 'start' : 'center';
|
||||
listContainer.style.minHeight = '2rem';
|
||||
listContainer.style.height = '100%';
|
||||
listContainer.style.flexWrap = 'wrap';
|
||||
listContainer.style.justifyContent = 'center';
|
||||
@@ -73,6 +76,10 @@ export const legend = (id: string, isLonger: boolean): Plugin<ChartType> => ({
|
||||
item.datasetIndex,
|
||||
!chart.isDatasetVisible(item.datasetIndex),
|
||||
);
|
||||
eventEmitter.emit(Events.UPDATE_GRAPH_MANAGER_TABLE, {
|
||||
name: id,
|
||||
index: item.datasetIndex,
|
||||
});
|
||||
}
|
||||
chart.update();
|
||||
};
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
import {
|
||||
ActiveElement,
|
||||
BarController,
|
||||
BarElement,
|
||||
CategoryScale,
|
||||
Chart,
|
||||
ChartData,
|
||||
ChartEvent,
|
||||
ChartOptions,
|
||||
ChartType,
|
||||
Decimation,
|
||||
Filler,
|
||||
Legend,
|
||||
@@ -21,33 +16,30 @@ import {
|
||||
Title,
|
||||
Tooltip,
|
||||
} from 'chart.js';
|
||||
import * as chartjsAdapter from 'chartjs-adapter-date-fns';
|
||||
import annotationPlugin from 'chartjs-plugin-annotation';
|
||||
import dayjs from 'dayjs';
|
||||
import { generateGridTitle } from 'container/GridPanelSwitch/utils';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import isEqual from 'lodash-es/isEqual';
|
||||
import { memo, useCallback, useEffect, useRef } from 'react';
|
||||
import {
|
||||
forwardRef,
|
||||
memo,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useImperativeHandle,
|
||||
useMemo,
|
||||
useRef,
|
||||
} from 'react';
|
||||
|
||||
import { hasData } from './hasData';
|
||||
import { getAxisLabelColor } from './helpers';
|
||||
import { legend } from './Plugin';
|
||||
import {
|
||||
createDragSelectPlugin,
|
||||
createDragSelectPluginOptions,
|
||||
dragSelectPluginId,
|
||||
DragSelectPluginOptions,
|
||||
} from './Plugin/DragSelect';
|
||||
import { createDragSelectPlugin } from './Plugin/DragSelect';
|
||||
import { emptyGraph } from './Plugin/EmptyGraph';
|
||||
import {
|
||||
createIntersectionCursorPlugin,
|
||||
createIntersectionCursorPluginOptions,
|
||||
intersectionCursorPluginId,
|
||||
IntersectionCursorPluginOptions,
|
||||
} from './Plugin/IntersectionCursor';
|
||||
import { createIntersectionCursorPlugin } from './Plugin/IntersectionCursor';
|
||||
import { TooltipPosition as TooltipPositionHandler } from './Plugin/Tooltip';
|
||||
import { LegendsContainer } from './styles';
|
||||
import { CustomChartOptions, GraphProps, ToggleGraphProps } from './types';
|
||||
import { getGraphOptions, toggleGraph } from './utils';
|
||||
import { useXAxisTimeUnit } from './xAxisConfig';
|
||||
import { getToolTipValue, getYAxisFormattedValue } from './yAxisConfig';
|
||||
|
||||
Chart.register(
|
||||
LineElement,
|
||||
@@ -70,263 +62,126 @@ Chart.register(
|
||||
|
||||
Tooltip.positioners.custom = TooltipPositionHandler;
|
||||
|
||||
function Graph({
|
||||
animate = true,
|
||||
data,
|
||||
type,
|
||||
title,
|
||||
isStacked,
|
||||
onClickHandler,
|
||||
name,
|
||||
yAxisUnit = 'short',
|
||||
forceReRender,
|
||||
staticLine,
|
||||
containerHeight,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
}: GraphProps): JSX.Element {
|
||||
const nearestDatasetIndex = useRef<null | number>(null);
|
||||
const chartRef = useRef<HTMLCanvasElement>(null);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
|
||||
(
|
||||
{
|
||||
animate = true,
|
||||
data,
|
||||
type,
|
||||
title,
|
||||
isStacked,
|
||||
onClickHandler,
|
||||
name,
|
||||
yAxisUnit = 'short',
|
||||
forceReRender,
|
||||
staticLine,
|
||||
containerHeight,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
},
|
||||
ref,
|
||||
): JSX.Element => {
|
||||
const nearestDatasetIndex = useRef<null | number>(null);
|
||||
const chartRef = useRef<HTMLCanvasElement>(null);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const gridTitle = useMemo(() => generateGridTitle(title), [title]);
|
||||
|
||||
const currentTheme = isDarkMode ? 'dark' : 'light';
|
||||
const xAxisTimeUnit = useXAxisTimeUnit(data); // Computes the relevant time unit for x axis by analyzing the time stamp data
|
||||
const currentTheme = isDarkMode ? 'dark' : 'light';
|
||||
const xAxisTimeUnit = useXAxisTimeUnit(data); // Computes the relevant time unit for x axis by analyzing the time stamp data
|
||||
|
||||
const lineChartRef = useRef<Chart>();
|
||||
const getGridColor = useCallback(() => {
|
||||
if (currentTheme === undefined) {
|
||||
return 'rgba(231,233,237,0.1)';
|
||||
}
|
||||
const lineChartRef = useRef<Chart>();
|
||||
|
||||
if (currentTheme === 'dark') {
|
||||
return 'rgba(231,233,237,0.1)';
|
||||
}
|
||||
|
||||
return 'rgba(231,233,237,0.8)';
|
||||
}, [currentTheme]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const buildChart = useCallback(() => {
|
||||
if (lineChartRef.current !== undefined) {
|
||||
lineChartRef.current.destroy();
|
||||
}
|
||||
|
||||
if (chartRef.current !== null) {
|
||||
const options: CustomChartOptions = {
|
||||
animation: {
|
||||
duration: animate ? 200 : 0,
|
||||
useImperativeHandle(
|
||||
ref,
|
||||
(): ToggleGraphProps => ({
|
||||
toggleGraph(graphIndex: number, isVisible: boolean): void {
|
||||
toggleGraph(graphIndex, isVisible, lineChartRef);
|
||||
},
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
interaction: {
|
||||
mode: 'index',
|
||||
intersect: false,
|
||||
},
|
||||
plugins: {
|
||||
annotation: staticLine
|
||||
? {
|
||||
annotations: [
|
||||
{
|
||||
type: 'line',
|
||||
yMin: staticLine.yMin,
|
||||
yMax: staticLine.yMax,
|
||||
borderColor: staticLine.borderColor,
|
||||
borderWidth: staticLine.borderWidth,
|
||||
label: {
|
||||
content: staticLine.lineText,
|
||||
enabled: true,
|
||||
font: {
|
||||
size: 10,
|
||||
},
|
||||
borderWidth: 0,
|
||||
position: 'start',
|
||||
backgroundColor: 'transparent',
|
||||
color: staticLine.textColor,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: undefined,
|
||||
title: {
|
||||
display: title !== undefined,
|
||||
text: title,
|
||||
},
|
||||
legend: {
|
||||
display: false,
|
||||
},
|
||||
tooltip: {
|
||||
callbacks: {
|
||||
title(context) {
|
||||
const date = dayjs(context[0].parsed.x);
|
||||
return date.format('MMM DD, YYYY, HH:mm:ss');
|
||||
},
|
||||
label(context) {
|
||||
let label = context.dataset.label || '';
|
||||
}),
|
||||
);
|
||||
|
||||
if (label) {
|
||||
label += ': ';
|
||||
}
|
||||
if (context.parsed.y !== null) {
|
||||
label += getToolTipValue(context.parsed.y.toString(), yAxisUnit);
|
||||
}
|
||||
|
||||
return label;
|
||||
},
|
||||
labelTextColor(labelData) {
|
||||
if (labelData.datasetIndex === nearestDatasetIndex.current) {
|
||||
return 'rgba(255, 255, 255, 1)';
|
||||
}
|
||||
|
||||
return 'rgba(255, 255, 255, 0.75)';
|
||||
},
|
||||
},
|
||||
position: 'custom',
|
||||
},
|
||||
[dragSelectPluginId]: createDragSelectPluginOptions(
|
||||
!!onDragSelect,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
),
|
||||
[intersectionCursorPluginId]: createIntersectionCursorPluginOptions(
|
||||
!!onDragSelect,
|
||||
currentTheme === 'dark' ? 'white' : 'black',
|
||||
),
|
||||
},
|
||||
layout: {
|
||||
padding: 0,
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
grid: {
|
||||
display: true,
|
||||
color: getGridColor(),
|
||||
drawTicks: true,
|
||||
},
|
||||
adapters: {
|
||||
date: chartjsAdapter,
|
||||
},
|
||||
time: {
|
||||
unit: xAxisTimeUnit?.unitName || 'minute',
|
||||
stepSize: xAxisTimeUnit?.stepSize || 1,
|
||||
displayFormats: {
|
||||
millisecond: 'HH:mm:ss',
|
||||
second: 'HH:mm:ss',
|
||||
minute: 'HH:mm',
|
||||
hour: 'MM/dd HH:mm',
|
||||
day: 'MM/dd',
|
||||
week: 'MM/dd',
|
||||
month: 'yy-MM',
|
||||
year: 'yy',
|
||||
},
|
||||
},
|
||||
type: 'time',
|
||||
ticks: { color: getAxisLabelColor(currentTheme) },
|
||||
},
|
||||
y: {
|
||||
display: true,
|
||||
grid: {
|
||||
display: true,
|
||||
color: getGridColor(),
|
||||
},
|
||||
ticks: {
|
||||
color: getAxisLabelColor(currentTheme),
|
||||
// Include a dollar sign in the ticks
|
||||
callback(value) {
|
||||
return getYAxisFormattedValue(value.toString(), yAxisUnit);
|
||||
},
|
||||
},
|
||||
},
|
||||
stacked: {
|
||||
display: isStacked === undefined ? false : 'auto',
|
||||
},
|
||||
},
|
||||
elements: {
|
||||
line: {
|
||||
tension: 0,
|
||||
cubicInterpolationMode: 'monotone',
|
||||
},
|
||||
point: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
hoverBackgroundColor: (ctx: any) => {
|
||||
if (ctx?.element?.options?.borderColor) {
|
||||
return ctx.element.options.borderColor;
|
||||
}
|
||||
return 'rgba(0,0,0,0.1)';
|
||||
},
|
||||
hoverRadius: 5,
|
||||
},
|
||||
},
|
||||
onClick: (event, element, chart) => {
|
||||
if (onClickHandler) {
|
||||
onClickHandler(event, element, chart, data);
|
||||
}
|
||||
},
|
||||
onHover: (event, _, chart) => {
|
||||
if (event.native) {
|
||||
const interactions = chart.getElementsAtEventForMode(
|
||||
event.native,
|
||||
'nearest',
|
||||
{
|
||||
intersect: false,
|
||||
},
|
||||
true,
|
||||
);
|
||||
|
||||
if (interactions[0]) {
|
||||
nearestDatasetIndex.current = interactions[0].datasetIndex;
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const chartHasData = hasData(data);
|
||||
const chartPlugins = [];
|
||||
|
||||
if (chartHasData) {
|
||||
chartPlugins.push(createIntersectionCursorPlugin());
|
||||
chartPlugins.push(createDragSelectPlugin());
|
||||
} else {
|
||||
chartPlugins.push(emptyGraph);
|
||||
const getGridColor = useCallback(() => {
|
||||
if (currentTheme === undefined) {
|
||||
return 'rgba(231,233,237,0.1)';
|
||||
}
|
||||
|
||||
chartPlugins.push(legend(name, data.datasets.length > 3));
|
||||
if (currentTheme === 'dark') {
|
||||
return 'rgba(231,233,237,0.1)';
|
||||
}
|
||||
|
||||
lineChartRef.current = new Chart(chartRef.current, {
|
||||
type,
|
||||
data,
|
||||
options,
|
||||
plugins: chartPlugins,
|
||||
});
|
||||
}
|
||||
}, [
|
||||
animate,
|
||||
title,
|
||||
getGridColor,
|
||||
xAxisTimeUnit?.unitName,
|
||||
xAxisTimeUnit?.stepSize,
|
||||
isStacked,
|
||||
type,
|
||||
data,
|
||||
name,
|
||||
yAxisUnit,
|
||||
onClickHandler,
|
||||
staticLine,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
currentTheme,
|
||||
]);
|
||||
return 'rgba(231,233,237,0.8)';
|
||||
}, [currentTheme]);
|
||||
|
||||
useEffect(() => {
|
||||
buildChart();
|
||||
}, [buildChart, forceReRender]);
|
||||
const buildChart = useCallback(() => {
|
||||
if (lineChartRef.current !== undefined) {
|
||||
lineChartRef.current.destroy();
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ height: containerHeight }}>
|
||||
<canvas ref={chartRef} />
|
||||
<LegendsContainer id={name} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
if (chartRef.current !== null) {
|
||||
const options: CustomChartOptions = getGraphOptions(
|
||||
animate,
|
||||
staticLine,
|
||||
gridTitle,
|
||||
nearestDatasetIndex,
|
||||
yAxisUnit,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
currentTheme,
|
||||
getGridColor,
|
||||
xAxisTimeUnit,
|
||||
isStacked,
|
||||
onClickHandler,
|
||||
data,
|
||||
);
|
||||
|
||||
const chartHasData = hasData(data);
|
||||
const chartPlugins = [];
|
||||
|
||||
if (chartHasData) {
|
||||
chartPlugins.push(createIntersectionCursorPlugin());
|
||||
chartPlugins.push(createDragSelectPlugin());
|
||||
} else {
|
||||
chartPlugins.push(emptyGraph);
|
||||
}
|
||||
|
||||
chartPlugins.push(legend(name, data.datasets.length > 3));
|
||||
|
||||
lineChartRef.current = new Chart(chartRef.current, {
|
||||
type,
|
||||
data,
|
||||
options,
|
||||
plugins: chartPlugins,
|
||||
});
|
||||
}
|
||||
}, [
|
||||
animate,
|
||||
staticLine,
|
||||
gridTitle,
|
||||
yAxisUnit,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
currentTheme,
|
||||
getGridColor,
|
||||
xAxisTimeUnit,
|
||||
isStacked,
|
||||
onClickHandler,
|
||||
data,
|
||||
name,
|
||||
type,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
buildChart();
|
||||
}, [buildChart, forceReRender]);
|
||||
|
||||
return (
|
||||
<div style={{ height: containerHeight }}>
|
||||
<canvas ref={chartRef} />
|
||||
<LegendsContainer id={name} />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
declare module 'chart.js' {
|
||||
interface TooltipPositionerMap {
|
||||
@@ -334,45 +189,6 @@ declare module 'chart.js' {
|
||||
}
|
||||
}
|
||||
|
||||
type CustomChartOptions = ChartOptions & {
|
||||
plugins: {
|
||||
[dragSelectPluginId]: DragSelectPluginOptions | false;
|
||||
[intersectionCursorPluginId]: IntersectionCursorPluginOptions | false;
|
||||
};
|
||||
};
|
||||
|
||||
interface GraphProps {
|
||||
animate?: boolean;
|
||||
type: ChartType;
|
||||
data: Chart['data'];
|
||||
title?: string;
|
||||
isStacked?: boolean;
|
||||
onClickHandler?: GraphOnClickHandler;
|
||||
name: string;
|
||||
yAxisUnit?: string;
|
||||
forceReRender?: boolean | null | number;
|
||||
staticLine?: StaticLineProps | undefined;
|
||||
containerHeight?: string | number;
|
||||
onDragSelect?: (start: number, end: number) => void;
|
||||
dragSelectColor?: string;
|
||||
}
|
||||
|
||||
export interface StaticLineProps {
|
||||
yMin: number | undefined;
|
||||
yMax: number | undefined;
|
||||
borderColor: string;
|
||||
borderWidth: number;
|
||||
lineText: string;
|
||||
textColor: string;
|
||||
}
|
||||
|
||||
export type GraphOnClickHandler = (
|
||||
event: ChartEvent,
|
||||
elements: ActiveElement[],
|
||||
chart: Chart,
|
||||
data: ChartData,
|
||||
) => void;
|
||||
|
||||
Graph.defaultProps = {
|
||||
animate: undefined,
|
||||
title: undefined,
|
||||
@@ -386,6 +202,8 @@ Graph.defaultProps = {
|
||||
dragSelectColor: undefined,
|
||||
};
|
||||
|
||||
Graph.displayName = 'Graph';
|
||||
|
||||
export default memo(Graph, (prevProps, nextProps) =>
|
||||
isEqual(prevProps.data, nextProps.data),
|
||||
);
|
||||
|
||||
78
frontend/src/components/Graph/types.ts
Normal file
78
frontend/src/components/Graph/types.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import {
|
||||
ActiveElement,
|
||||
Chart,
|
||||
ChartData,
|
||||
ChartEvent,
|
||||
ChartOptions,
|
||||
ChartType,
|
||||
TimeUnit,
|
||||
} from 'chart.js';
|
||||
import { ForwardedRef, ReactNode } from 'react';
|
||||
|
||||
import {
|
||||
dragSelectPluginId,
|
||||
DragSelectPluginOptions,
|
||||
} from './Plugin/DragSelect';
|
||||
import {
|
||||
intersectionCursorPluginId,
|
||||
IntersectionCursorPluginOptions,
|
||||
} from './Plugin/IntersectionCursor';
|
||||
|
||||
export interface StaticLineProps {
|
||||
yMin: number | undefined;
|
||||
yMax: number | undefined;
|
||||
borderColor: string;
|
||||
borderWidth: number;
|
||||
lineText: string;
|
||||
textColor: string;
|
||||
}
|
||||
|
||||
export type GraphOnClickHandler = (
|
||||
event: ChartEvent,
|
||||
elements: ActiveElement[],
|
||||
chart: Chart,
|
||||
data: ChartData,
|
||||
) => void;
|
||||
|
||||
export type ToggleGraphProps = {
|
||||
toggleGraph(graphIndex: number, isVisible: boolean): void;
|
||||
};
|
||||
|
||||
export type CustomChartOptions = ChartOptions & {
|
||||
plugins: {
|
||||
[dragSelectPluginId]: DragSelectPluginOptions | false;
|
||||
[intersectionCursorPluginId]: IntersectionCursorPluginOptions | false;
|
||||
};
|
||||
};
|
||||
|
||||
export interface GraphProps {
|
||||
animate?: boolean;
|
||||
type: ChartType;
|
||||
data: Chart['data'];
|
||||
title?: ReactNode;
|
||||
isStacked?: boolean;
|
||||
onClickHandler?: GraphOnClickHandler;
|
||||
name: string;
|
||||
yAxisUnit?: string;
|
||||
forceReRender?: boolean | null | number;
|
||||
staticLine?: StaticLineProps | undefined;
|
||||
containerHeight?: string | number;
|
||||
onDragSelect?: (start: number, end: number) => void;
|
||||
dragSelectColor?: string;
|
||||
ref?: ForwardedRef<ToggleGraphProps | undefined>;
|
||||
}
|
||||
|
||||
export interface IAxisTimeUintConfig {
|
||||
unitName: TimeUnit;
|
||||
multiplier: number;
|
||||
}
|
||||
|
||||
export interface IAxisTimeConfig {
|
||||
unitName: TimeUnit;
|
||||
stepSize: number;
|
||||
}
|
||||
|
||||
export interface ITimeRange {
|
||||
minTime: number | null;
|
||||
maxTime: number | null;
|
||||
}
|
||||
223
frontend/src/components/Graph/utils.ts
Normal file
223
frontend/src/components/Graph/utils.ts
Normal file
@@ -0,0 +1,223 @@
|
||||
import { Chart, ChartConfiguration, ChartData, Color } from 'chart.js';
|
||||
import * as chartjsAdapter from 'chartjs-adapter-date-fns';
|
||||
import dayjs from 'dayjs';
|
||||
import { MutableRefObject } from 'react';
|
||||
|
||||
import { getAxisLabelColor } from './helpers';
|
||||
import {
|
||||
createDragSelectPluginOptions,
|
||||
dragSelectPluginId,
|
||||
} from './Plugin/DragSelect';
|
||||
import {
|
||||
createIntersectionCursorPluginOptions,
|
||||
intersectionCursorPluginId,
|
||||
} from './Plugin/IntersectionCursor';
|
||||
import {
|
||||
CustomChartOptions,
|
||||
GraphOnClickHandler,
|
||||
IAxisTimeConfig,
|
||||
StaticLineProps,
|
||||
} from './types';
|
||||
import { getToolTipValue, getYAxisFormattedValue } from './yAxisConfig';
|
||||
|
||||
export const toggleGraph = (
|
||||
graphIndex: number,
|
||||
isVisible: boolean,
|
||||
lineChartRef: MutableRefObject<Chart | undefined>,
|
||||
): void => {
|
||||
if (lineChartRef && lineChartRef.current) {
|
||||
const { type } = lineChartRef.current?.config as ChartConfiguration;
|
||||
if (type === 'pie' || type === 'doughnut') {
|
||||
lineChartRef.current?.toggleDataVisibility(graphIndex);
|
||||
} else {
|
||||
lineChartRef.current?.setDatasetVisibility(graphIndex, isVisible);
|
||||
}
|
||||
lineChartRef.current?.update();
|
||||
}
|
||||
};
|
||||
|
||||
export const getGraphOptions = (
|
||||
animate: boolean,
|
||||
staticLine: StaticLineProps | undefined,
|
||||
title: string | undefined,
|
||||
nearestDatasetIndex: MutableRefObject<number | null>,
|
||||
yAxisUnit: string,
|
||||
onDragSelect: ((start: number, end: number) => void) | undefined,
|
||||
dragSelectColor: string | undefined,
|
||||
currentTheme: 'dark' | 'light',
|
||||
getGridColor: () => 'rgba(231,233,237,0.1)' | 'rgba(231,233,237,0.8)',
|
||||
xAxisTimeUnit: IAxisTimeConfig,
|
||||
isStacked: boolean | undefined,
|
||||
onClickHandler: GraphOnClickHandler | undefined,
|
||||
data: ChartData,
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): CustomChartOptions => ({
|
||||
animation: {
|
||||
duration: animate ? 200 : 0,
|
||||
},
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
interaction: {
|
||||
mode: 'index',
|
||||
intersect: false,
|
||||
},
|
||||
plugins: {
|
||||
annotation: staticLine
|
||||
? {
|
||||
annotations: [
|
||||
{
|
||||
type: 'line',
|
||||
yMin: staticLine.yMin,
|
||||
yMax: staticLine.yMax,
|
||||
borderColor: staticLine.borderColor,
|
||||
borderWidth: staticLine.borderWidth,
|
||||
label: {
|
||||
content: staticLine.lineText,
|
||||
enabled: true,
|
||||
font: {
|
||||
size: 10,
|
||||
},
|
||||
borderWidth: 0,
|
||||
position: 'start',
|
||||
backgroundColor: 'transparent',
|
||||
color: staticLine.textColor,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: undefined,
|
||||
title: {
|
||||
display: title !== undefined,
|
||||
text: title,
|
||||
},
|
||||
legend: {
|
||||
display: false,
|
||||
},
|
||||
tooltip: {
|
||||
callbacks: {
|
||||
title(context): string | string[] {
|
||||
const date = dayjs(context[0].parsed.x);
|
||||
return date.format('MMM DD, YYYY, HH:mm:ss');
|
||||
},
|
||||
label(context): string | string[] {
|
||||
let label = context.dataset.label || '';
|
||||
|
||||
if (label) {
|
||||
label += ': ';
|
||||
}
|
||||
if (context.parsed.y !== null) {
|
||||
label += getToolTipValue(context.parsed.y.toString(), yAxisUnit);
|
||||
}
|
||||
|
||||
return label;
|
||||
},
|
||||
labelTextColor(labelData): Color {
|
||||
if (labelData.datasetIndex === nearestDatasetIndex.current) {
|
||||
return 'rgba(255, 255, 255, 1)';
|
||||
}
|
||||
|
||||
return 'rgba(255, 255, 255, 0.75)';
|
||||
},
|
||||
},
|
||||
position: 'custom',
|
||||
itemSort(item1, item2): number {
|
||||
return item2.parsed.y - item1.parsed.y;
|
||||
},
|
||||
},
|
||||
[dragSelectPluginId]: createDragSelectPluginOptions(
|
||||
!!onDragSelect,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
),
|
||||
[intersectionCursorPluginId]: createIntersectionCursorPluginOptions(
|
||||
!!onDragSelect,
|
||||
currentTheme === 'dark' ? 'white' : 'black',
|
||||
),
|
||||
},
|
||||
layout: {
|
||||
padding: 0,
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
grid: {
|
||||
display: true,
|
||||
color: getGridColor(),
|
||||
drawTicks: true,
|
||||
},
|
||||
adapters: {
|
||||
date: chartjsAdapter,
|
||||
},
|
||||
time: {
|
||||
unit: xAxisTimeUnit?.unitName || 'minute',
|
||||
stepSize: xAxisTimeUnit?.stepSize || 1,
|
||||
displayFormats: {
|
||||
millisecond: 'HH:mm:ss',
|
||||
second: 'HH:mm:ss',
|
||||
minute: 'HH:mm',
|
||||
hour: 'MM/dd HH:mm',
|
||||
day: 'MM/dd',
|
||||
week: 'MM/dd',
|
||||
month: 'yy-MM',
|
||||
year: 'yy',
|
||||
},
|
||||
},
|
||||
type: 'time',
|
||||
ticks: { color: getAxisLabelColor(currentTheme) },
|
||||
},
|
||||
y: {
|
||||
display: true,
|
||||
grid: {
|
||||
display: true,
|
||||
color: getGridColor(),
|
||||
},
|
||||
ticks: {
|
||||
color: getAxisLabelColor(currentTheme),
|
||||
// Include a dollar sign in the ticks
|
||||
callback(value): string {
|
||||
return getYAxisFormattedValue(value.toString(), yAxisUnit);
|
||||
},
|
||||
},
|
||||
},
|
||||
stacked: {
|
||||
display: isStacked === undefined ? false : 'auto',
|
||||
},
|
||||
},
|
||||
elements: {
|
||||
line: {
|
||||
tension: 0,
|
||||
cubicInterpolationMode: 'monotone',
|
||||
},
|
||||
point: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
hoverBackgroundColor: (ctx: any): string => {
|
||||
if (ctx?.element?.options?.borderColor) {
|
||||
return ctx.element.options.borderColor;
|
||||
}
|
||||
return 'rgba(0,0,0,0.1)';
|
||||
},
|
||||
hoverRadius: 5,
|
||||
},
|
||||
},
|
||||
onClick: (event, element, chart): void => {
|
||||
if (onClickHandler) {
|
||||
onClickHandler(event, element, chart, data);
|
||||
}
|
||||
},
|
||||
onHover: (event, _, chart): void => {
|
||||
if (event.native) {
|
||||
const interactions = chart.getElementsAtEventForMode(
|
||||
event.native,
|
||||
'nearest',
|
||||
{
|
||||
intersect: false,
|
||||
},
|
||||
true,
|
||||
);
|
||||
|
||||
if (interactions[0]) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
nearestDatasetIndex.current = interactions[0].datasetIndex;
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
@@ -4,20 +4,7 @@ import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
interface IAxisTimeUintConfig {
|
||||
unitName: TimeUnit;
|
||||
multiplier: number;
|
||||
}
|
||||
|
||||
interface IAxisTimeConfig {
|
||||
unitName: TimeUnit;
|
||||
stepSize: number;
|
||||
}
|
||||
|
||||
export interface ITimeRange {
|
||||
minTime: number | null;
|
||||
maxTime: number | null;
|
||||
}
|
||||
import { IAxisTimeConfig, IAxisTimeUintConfig, ITimeRange } from './types';
|
||||
|
||||
export const TIME_UNITS: Record<TimeUnit, TimeUnit> = {
|
||||
millisecond: 'millisecond',
|
||||
|
||||
10
frontend/src/components/LogDetail/LogDetail.interfaces.ts
Normal file
10
frontend/src/components/LogDetail/LogDetail.interfaces.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { DrawerProps } from 'antd';
|
||||
import { AddToQueryHOCProps } from 'components/Logs/AddToQueryHOC';
|
||||
import { ActionItemProps } from 'container/LogDetailedView/ActionItem';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
export type LogDetailProps = {
|
||||
log: ILog | null;
|
||||
} & Pick<AddToQueryHOCProps, 'onAddToQuery'> &
|
||||
Pick<ActionItemProps, 'onClickActionItem'> &
|
||||
Pick<DrawerProps, 'onClose'>;
|
||||
52
frontend/src/components/LogDetail/index.tsx
Normal file
52
frontend/src/components/LogDetail/index.tsx
Normal file
@@ -0,0 +1,52 @@
|
||||
import { Drawer, Tabs } from 'antd';
|
||||
import JSONView from 'container/LogDetailedView/JsonView';
|
||||
import TableView from 'container/LogDetailedView/TableView';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { LogDetailProps } from './LogDetail.interfaces';
|
||||
|
||||
function LogDetail({
|
||||
log,
|
||||
onClose,
|
||||
onAddToQuery,
|
||||
onClickActionItem,
|
||||
}: LogDetailProps): JSX.Element {
|
||||
const items = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: 'Table',
|
||||
key: '1',
|
||||
children: log && (
|
||||
<TableView
|
||||
logData={log}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onClickActionItem}
|
||||
/>
|
||||
),
|
||||
},
|
||||
{
|
||||
label: 'JSON',
|
||||
key: '2',
|
||||
children: log && <JSONView logData={log} />,
|
||||
},
|
||||
],
|
||||
[log, onAddToQuery, onClickActionItem],
|
||||
);
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
title="Log Details"
|
||||
placement="right"
|
||||
closable
|
||||
onClose={onClose}
|
||||
open={log !== null}
|
||||
style={{ overscrollBehavior: 'contain' }}
|
||||
destroyOnClose
|
||||
>
|
||||
<Tabs defaultActiveKey="1" items={items} />
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
export default LogDetail;
|
||||
@@ -1,39 +1,18 @@
|
||||
import { Popover } from 'antd';
|
||||
import ROUTES from 'constants/routes';
|
||||
import history from 'lib/history';
|
||||
import { generateFilterQuery } from 'lib/logs/generateFilterQuery';
|
||||
import { OPERATORS } from 'constants/queryBuilder';
|
||||
import { memo, ReactNode, useCallback, useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
import { ButtonContainer } from './styles';
|
||||
|
||||
function AddToQueryHOC({
|
||||
fieldKey,
|
||||
fieldValue,
|
||||
onAddToQuery,
|
||||
children,
|
||||
}: AddToQueryHOCProps): JSX.Element {
|
||||
const {
|
||||
searchFilter: { queryString },
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
|
||||
const generatedQuery = useMemo(
|
||||
() => generateFilterQuery({ fieldKey, fieldValue, type: 'IN' }),
|
||||
[fieldKey, fieldValue],
|
||||
);
|
||||
|
||||
const handleQueryAdd = useCallback(() => {
|
||||
let updatedQueryString = queryString || '';
|
||||
|
||||
if (updatedQueryString.length === 0) {
|
||||
updatedQueryString += `${generatedQuery}`;
|
||||
} else {
|
||||
updatedQueryString += ` AND ${generatedQuery}`;
|
||||
}
|
||||
|
||||
history.replace(`${ROUTES.LOGS}?q=${updatedQueryString}`);
|
||||
}, [generatedQuery, queryString]);
|
||||
onAddToQuery(fieldKey, fieldValue, OPERATORS.IN);
|
||||
}, [fieldKey, fieldValue, onAddToQuery]);
|
||||
|
||||
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
|
||||
fieldKey,
|
||||
@@ -48,9 +27,10 @@ function AddToQueryHOC({
|
||||
);
|
||||
}
|
||||
|
||||
interface AddToQueryHOCProps {
|
||||
export interface AddToQueryHOCProps {
|
||||
fieldKey: string;
|
||||
fieldValue: string;
|
||||
onAddToQuery: (fieldKey: string, fieldValue: string, operator: string) => void;
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,23 +1,29 @@
|
||||
import { blue, grey, orange } from '@ant-design/colors';
|
||||
import { CopyFilled, ExpandAltOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
CopyFilled,
|
||||
ExpandAltOutlined,
|
||||
LinkOutlined,
|
||||
MonitorOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import Convert from 'ansi-to-html';
|
||||
import { Button, Divider, Row, Typography } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import LogsExplorerContext from 'container/LogsExplorerContext';
|
||||
import dayjs from 'dayjs';
|
||||
import dompurify from 'dompurify';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
// utils
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
// interfaces
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SET_DETAILED_LOG_DATA } from 'types/actions/logs';
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
// components
|
||||
import AddToQueryHOC from '../AddToQueryHOC';
|
||||
import AddToQueryHOC, { AddToQueryHOCProps } from '../AddToQueryHOC';
|
||||
import CopyClipboardHOC from '../CopyClipboardHOC';
|
||||
// styles
|
||||
import {
|
||||
@@ -36,6 +42,10 @@ interface LogFieldProps {
|
||||
fieldKey: string;
|
||||
fieldValue: string;
|
||||
}
|
||||
|
||||
type LogSelectedFieldProps = LogFieldProps &
|
||||
Pick<AddToQueryHOCProps, 'onAddToQuery'>;
|
||||
|
||||
function LogGeneralField({ fieldKey, fieldValue }: LogFieldProps): JSX.Element {
|
||||
const html = useMemo(
|
||||
() => ({
|
||||
@@ -59,10 +69,15 @@ function LogGeneralField({ fieldKey, fieldValue }: LogFieldProps): JSX.Element {
|
||||
function LogSelectedField({
|
||||
fieldKey = '',
|
||||
fieldValue = '',
|
||||
}: LogFieldProps): JSX.Element {
|
||||
onAddToQuery,
|
||||
}: LogSelectedFieldProps): JSX.Element {
|
||||
return (
|
||||
<SelectedLog>
|
||||
<AddToQueryHOC fieldKey={fieldKey} fieldValue={fieldValue}>
|
||||
<AddToQueryHOC
|
||||
fieldKey={fieldKey}
|
||||
fieldValue={fieldValue}
|
||||
onAddToQuery={onAddToQuery}
|
||||
>
|
||||
<Typography.Text>
|
||||
<span style={{ color: blue[4] }}>{fieldKey}</span>
|
||||
</Typography.Text>
|
||||
@@ -77,26 +92,41 @@ function LogSelectedField({
|
||||
);
|
||||
}
|
||||
|
||||
interface ListLogViewProps {
|
||||
type ListLogViewProps = {
|
||||
logData: ILog;
|
||||
}
|
||||
function ListLogView({ logData }: ListLogViewProps): JSX.Element {
|
||||
const {
|
||||
fields: { selected },
|
||||
} = useSelector<AppState, ILogsReducer>((state) => state.logs);
|
||||
selectedFields: IField[];
|
||||
};
|
||||
|
||||
const dispatch = useDispatch();
|
||||
function ListLogView({
|
||||
logData,
|
||||
selectedFields,
|
||||
}: ListLogViewProps): JSX.Element {
|
||||
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
|
||||
|
||||
const [, setCopy] = useCopyToClipboard();
|
||||
const { notifications } = useNotifications();
|
||||
const { isHighlighted, isLogsExplorerPage, onLogCopy } = useCopyLogLink(
|
||||
logData.id,
|
||||
);
|
||||
const {
|
||||
activeLog: activeContextLog,
|
||||
onSetActiveLog: handleSetActiveContextLog,
|
||||
onClearActiveLog: handleClearActiveContextLog,
|
||||
} = useActiveLog();
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
|
||||
const handleDetailedView = useCallback(() => {
|
||||
dispatch({
|
||||
type: SET_DETAILED_LOG_DATA,
|
||||
payload: logData,
|
||||
});
|
||||
}, [dispatch, logData]);
|
||||
onSetActiveLog(logData);
|
||||
}, [logData, onSetActiveLog]);
|
||||
|
||||
const handleShowContext = useCallback(() => {
|
||||
handleSetActiveContextLog(logData);
|
||||
}, [logData, handleSetActiveContextLog]);
|
||||
|
||||
const handleCopyJSON = (): void => {
|
||||
setCopy(JSON.stringify(logData, null, 2));
|
||||
@@ -106,12 +136,20 @@ function ListLogView({ logData }: ListLogViewProps): JSX.Element {
|
||||
};
|
||||
|
||||
const updatedSelecedFields = useMemo(
|
||||
() => selected.filter((e) => e.name !== 'id'),
|
||||
[selected],
|
||||
() => selectedFields.filter((e) => e.name !== 'id'),
|
||||
[selectedFields],
|
||||
);
|
||||
|
||||
const timestampValue = useMemo(
|
||||
() =>
|
||||
typeof flattenLogData.timestamp === 'string'
|
||||
? dayjs(flattenLogData.timestamp).format()
|
||||
: dayjs(flattenLogData.timestamp / 1e6).format(),
|
||||
[flattenLogData.timestamp],
|
||||
);
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<Container $isActiveLog={isHighlighted}>
|
||||
<div>
|
||||
<LogContainer>
|
||||
<>
|
||||
@@ -119,10 +157,7 @@ function ListLogView({ logData }: ListLogViewProps): JSX.Element {
|
||||
{flattenLogData.stream && (
|
||||
<LogGeneralField fieldKey="stream" fieldValue={flattenLogData.stream} />
|
||||
)}
|
||||
<LogGeneralField
|
||||
fieldKey="timestamp"
|
||||
fieldValue={dayjs((flattenLogData.timestamp as never) / 1e6).format()}
|
||||
/>
|
||||
<LogGeneralField fieldKey="timestamp" fieldValue={timestampValue} />
|
||||
</>
|
||||
</LogContainer>
|
||||
<div>
|
||||
@@ -132,6 +167,7 @@ function ListLogView({ logData }: ListLogViewProps): JSX.Element {
|
||||
key={field.name}
|
||||
fieldKey={field.name}
|
||||
fieldValue={flattenLogData[field.name] as never}
|
||||
onAddToQuery={onAddToQuery}
|
||||
/>
|
||||
) : null,
|
||||
)}
|
||||
@@ -157,6 +193,42 @@ function ListLogView({ logData }: ListLogViewProps): JSX.Element {
|
||||
>
|
||||
Copy JSON
|
||||
</Button>
|
||||
|
||||
{isLogsExplorerPage && (
|
||||
<>
|
||||
<Button
|
||||
size="small"
|
||||
type="text"
|
||||
onClick={handleShowContext}
|
||||
style={{ color: grey[1] }}
|
||||
icon={<MonitorOutlined />}
|
||||
>
|
||||
Show in Context
|
||||
</Button>
|
||||
<Button
|
||||
size="small"
|
||||
type="text"
|
||||
onClick={onLogCopy}
|
||||
style={{ color: grey[1] }}
|
||||
icon={<LinkOutlined />}
|
||||
>
|
||||
Copy Link
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
|
||||
{activeContextLog && (
|
||||
<LogsExplorerContext
|
||||
log={activeContextLog}
|
||||
onClose={handleClearActiveContextLog}
|
||||
/>
|
||||
)}
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
</Row>
|
||||
</Container>
|
||||
);
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
import { Card, Typography } from 'antd';
|
||||
import styled, { keyframes } from 'styled-components';
|
||||
import styled from 'styled-components';
|
||||
import { getActiveLogBackground } from 'utils/logs';
|
||||
|
||||
const fadeInAnimation = keyframes`
|
||||
0% { opacity: 0; }
|
||||
100% { opacity: 1;}
|
||||
`;
|
||||
|
||||
export const Container = styled(Card)`
|
||||
export const Container = styled(Card)<{
|
||||
$isActiveLog: boolean;
|
||||
}>`
|
||||
width: 100% !important;
|
||||
margin-bottom: 0.3rem;
|
||||
.ant-card-body {
|
||||
padding: 0.3rem 0.6rem;
|
||||
}
|
||||
animation-name: ${fadeInAnimation};
|
||||
animation-duration: 0.2s;
|
||||
animation-timing-function: ease-in;
|
||||
|
||||
${({ $isActiveLog }): string => getActiveLogBackground($isActiveLog)}
|
||||
`;
|
||||
|
||||
export const Text = styled(Typography.Text)`
|
||||
|
||||
@@ -1,16 +1,32 @@
|
||||
import { ExpandAltOutlined } from '@ant-design/icons';
|
||||
// const Convert = require('ansi-to-html');
|
||||
import {
|
||||
ExpandAltOutlined,
|
||||
LinkOutlined,
|
||||
MonitorOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import Convert from 'ansi-to-html';
|
||||
import { Button, DrawerProps, Tooltip } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import LogsExplorerContext from 'container/LogsExplorerContext';
|
||||
import dayjs from 'dayjs';
|
||||
import dompurify from 'dompurify';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
// hooks
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import {
|
||||
KeyboardEvent,
|
||||
MouseEvent,
|
||||
MouseEventHandler,
|
||||
useCallback,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react';
|
||||
// interfaces
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
// styles
|
||||
import {
|
||||
ActionButtonsWrapper,
|
||||
ExpandIconWrapper,
|
||||
RawLogContent,
|
||||
RawLogViewContainer,
|
||||
@@ -19,24 +35,81 @@ import {
|
||||
const convert = new Convert();
|
||||
|
||||
interface RawLogViewProps {
|
||||
isActiveLog?: boolean;
|
||||
isReadOnly?: boolean;
|
||||
data: ILog;
|
||||
linesPerRow: number;
|
||||
onClickExpand: (log: ILog) => void;
|
||||
}
|
||||
|
||||
function RawLogView(props: RawLogViewProps): JSX.Element {
|
||||
const { data, linesPerRow, onClickExpand } = props;
|
||||
const { isActiveLog = false, isReadOnly = false, data, linesPerRow } = props;
|
||||
|
||||
const { isHighlighted, isLogsExplorerPage, onLogCopy } = useCopyLogLink(
|
||||
data.id,
|
||||
);
|
||||
const {
|
||||
activeLog: activeContextLog,
|
||||
onSetActiveLog: handleSetActiveContextLog,
|
||||
onClearActiveLog: handleClearActiveContextLog,
|
||||
} = useActiveLog();
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
|
||||
const [hasActionButtons, setHasActionButtons] = useState<boolean>(false);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const isReadOnlyLog = !isLogsExplorerPage || isReadOnly;
|
||||
|
||||
const text = useMemo(
|
||||
() => `${dayjs(data.timestamp / 1e6).format()} | ${data.body}`,
|
||||
() =>
|
||||
typeof data.timestamp === 'string'
|
||||
? `${dayjs(data.timestamp).format()} | ${data.body}`
|
||||
: `${dayjs(data.timestamp / 1e6).format()} | ${data.body}`,
|
||||
[data.timestamp, data.body],
|
||||
);
|
||||
|
||||
const handleClickExpand = useCallback(() => {
|
||||
onClickExpand(data);
|
||||
}, [onClickExpand, data]);
|
||||
if (activeContextLog || isReadOnly) return;
|
||||
|
||||
onSetActiveLog(data);
|
||||
}, [activeContextLog, isReadOnly, data, onSetActiveLog]);
|
||||
|
||||
const handleCloseLogDetail: DrawerProps['onClose'] = useCallback(
|
||||
(
|
||||
event: MouseEvent<Element, globalThis.MouseEvent> | KeyboardEvent<Element>,
|
||||
) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
|
||||
onClearActiveLog();
|
||||
},
|
||||
[onClearActiveLog],
|
||||
);
|
||||
|
||||
const handleMouseEnter = useCallback(() => {
|
||||
if (isReadOnlyLog) return;
|
||||
|
||||
setHasActionButtons(true);
|
||||
}, [isReadOnlyLog]);
|
||||
|
||||
const handleMouseLeave = useCallback(() => {
|
||||
if (isReadOnlyLog) return;
|
||||
|
||||
setHasActionButtons(false);
|
||||
}, [isReadOnlyLog]);
|
||||
|
||||
const handleShowContext: MouseEventHandler<HTMLElement> = useCallback(
|
||||
(event) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
handleSetActiveContextLog(data);
|
||||
},
|
||||
[data, handleSetActiveContextLog],
|
||||
);
|
||||
|
||||
const html = useMemo(
|
||||
() => ({
|
||||
@@ -45,19 +118,69 @@ function RawLogView(props: RawLogViewProps): JSX.Element {
|
||||
[text],
|
||||
);
|
||||
|
||||
const mouseActions = useMemo(
|
||||
() => ({ onMouseEnter: handleMouseEnter, onMouseLeave: handleMouseLeave }),
|
||||
[handleMouseEnter, handleMouseLeave],
|
||||
);
|
||||
|
||||
return (
|
||||
<RawLogViewContainer
|
||||
onClick={handleClickExpand}
|
||||
wrap={false}
|
||||
align="middle"
|
||||
$isDarkMode={isDarkMode}
|
||||
$isReadOnly={isReadOnly}
|
||||
$isActiveLog={isHighlighted}
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...mouseActions}
|
||||
>
|
||||
<ExpandIconWrapper flex="30px">
|
||||
<ExpandAltOutlined />
|
||||
</ExpandIconWrapper>
|
||||
<RawLogContent linesPerRow={linesPerRow} dangerouslySetInnerHTML={html} />
|
||||
{!isReadOnly && (
|
||||
<ExpandIconWrapper flex="30px">
|
||||
<ExpandAltOutlined />
|
||||
</ExpandIconWrapper>
|
||||
)}
|
||||
|
||||
<RawLogContent
|
||||
$isReadOnly={isReadOnly}
|
||||
$isActiveLog={isActiveLog}
|
||||
linesPerRow={linesPerRow}
|
||||
dangerouslySetInnerHTML={html}
|
||||
/>
|
||||
|
||||
{hasActionButtons && (
|
||||
<ActionButtonsWrapper>
|
||||
<Tooltip title="Show Context">
|
||||
<Button
|
||||
size="small"
|
||||
icon={<MonitorOutlined />}
|
||||
onClick={handleShowContext}
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip title="Copy Link">
|
||||
<Button size="small" icon={<LinkOutlined />} onClick={onLogCopy} />
|
||||
</Tooltip>
|
||||
</ActionButtonsWrapper>
|
||||
)}
|
||||
|
||||
{activeContextLog && (
|
||||
<LogsExplorerContext
|
||||
log={activeContextLog}
|
||||
onClose={handleClearActiveContextLog}
|
||||
/>
|
||||
)}
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
</RawLogViewContainer>
|
||||
);
|
||||
}
|
||||
|
||||
RawLogView.defaultProps = {
|
||||
isActiveLog: false,
|
||||
isReadOnly: false,
|
||||
};
|
||||
|
||||
export default RawLogView;
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
import { blue } from '@ant-design/colors';
|
||||
import { Col, Row } from 'antd';
|
||||
import { Col, Row, Space } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
import { getActiveLogBackground, getDefaultLogBackground } from 'utils/logs';
|
||||
|
||||
export const RawLogViewContainer = styled(Row)<{ $isDarkMode: boolean }>`
|
||||
export const RawLogViewContainer = styled(Row)<{
|
||||
$isDarkMode: boolean;
|
||||
$isReadOnly: boolean;
|
||||
$isActiveLog: boolean;
|
||||
}>`
|
||||
position: relative;
|
||||
width: 100%;
|
||||
font-weight: 700;
|
||||
font-size: 0.625rem;
|
||||
@@ -10,10 +16,12 @@ export const RawLogViewContainer = styled(Row)<{ $isDarkMode: boolean }>`
|
||||
|
||||
transition: background-color 0.2s ease-in;
|
||||
|
||||
&:hover {
|
||||
background-color: ${({ $isDarkMode }): string =>
|
||||
$isDarkMode ? 'rgba(255,255,255,0.1)' : 'rgba(0, 0, 0, 0.1)'};
|
||||
}
|
||||
${({ $isActiveLog }): string => getActiveLogBackground($isActiveLog)}
|
||||
|
||||
${({ $isReadOnly, $isDarkMode, $isActiveLog }): string =>
|
||||
$isActiveLog
|
||||
? getActiveLogBackground()
|
||||
: getDefaultLogBackground($isReadOnly, $isDarkMode)}
|
||||
`;
|
||||
|
||||
export const ExpandIconWrapper = styled(Col)`
|
||||
@@ -25,6 +33,8 @@ export const ExpandIconWrapper = styled(Col)`
|
||||
|
||||
interface RawLogContentProps {
|
||||
linesPerRow: number;
|
||||
$isReadOnly: boolean;
|
||||
$isActiveLog: boolean;
|
||||
}
|
||||
|
||||
export const RawLogContent = styled.div<RawLogContentProps>`
|
||||
@@ -42,5 +52,17 @@ export const RawLogContent = styled.div<RawLogContentProps>`
|
||||
font-size: 1rem;
|
||||
line-height: 2rem;
|
||||
|
||||
cursor: ${(props): string =>
|
||||
props.$isActiveLog || props.$isReadOnly ? 'initial' : 'pointer'};
|
||||
|
||||
${(props): string =>
|
||||
props.$isReadOnly && !props.$isActiveLog ? 'padding: 0 1.5rem;' : ''}
|
||||
`;
|
||||
|
||||
export const ActionButtonsWrapper = styled(Space)`
|
||||
position: absolute;
|
||||
transform: translate(-50%, -50%);
|
||||
top: 50%;
|
||||
right: 0;
|
||||
cursor: pointer;
|
||||
`;
|
||||
|
||||
@@ -1,121 +1,18 @@
|
||||
import { ExpandAltOutlined } from '@ant-design/icons';
|
||||
import Convert from 'ansi-to-html';
|
||||
import { Table, Typography } from 'antd';
|
||||
import { ColumnsType, ColumnType } from 'antd/es/table';
|
||||
import dayjs from 'dayjs';
|
||||
import dompurify from 'dompurify';
|
||||
// utils
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
import { useMemo } from 'react';
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
// interfaces
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { Table } from 'antd';
|
||||
|
||||
// styles
|
||||
import { ExpandIconWrapper } from '../RawLogView/styles';
|
||||
// config
|
||||
import { defaultCellStyle, defaultTableStyle, tableScroll } from './config';
|
||||
import { TableBodyContent } from './styles';
|
||||
|
||||
type ColumnTypeRender<T = unknown> = ReturnType<
|
||||
NonNullable<ColumnType<T>['render']>
|
||||
>;
|
||||
|
||||
type LogsTableViewProps = {
|
||||
logs: ILog[];
|
||||
fields: IField[];
|
||||
linesPerRow: number;
|
||||
onClickExpand: (log: ILog) => void;
|
||||
};
|
||||
|
||||
const convert = new Convert();
|
||||
import { tableScroll } from './config';
|
||||
import { LogsTableViewProps } from './types';
|
||||
import { useTableView } from './useTableView';
|
||||
|
||||
function LogsTableView(props: LogsTableViewProps): JSX.Element {
|
||||
const { logs, fields, linesPerRow, onClickExpand } = props;
|
||||
|
||||
const flattenLogData = useMemo(() => logs.map((log) => FlatLogData(log)), [
|
||||
logs,
|
||||
]);
|
||||
|
||||
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
||||
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
|
||||
.filter((e) => e.name !== 'id')
|
||||
.map(({ name }) => ({
|
||||
title: name,
|
||||
dataIndex: name,
|
||||
key: name,
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: defaultCellStyle,
|
||||
},
|
||||
children: (
|
||||
<Typography.Paragraph ellipsis={{ rows: linesPerRow }}>
|
||||
{field}
|
||||
</Typography.Paragraph>
|
||||
),
|
||||
}),
|
||||
}));
|
||||
|
||||
return [
|
||||
{
|
||||
title: '',
|
||||
dataIndex: 'id',
|
||||
key: 'expand',
|
||||
// https://github.com/ant-design/ant-design/discussions/36886
|
||||
render: (_, item): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: defaultCellStyle,
|
||||
},
|
||||
children: (
|
||||
<ExpandIconWrapper
|
||||
onClick={(): void => {
|
||||
onClickExpand((item as unknown) as ILog);
|
||||
}}
|
||||
>
|
||||
<ExpandAltOutlined />
|
||||
</ExpandIconWrapper>
|
||||
),
|
||||
}),
|
||||
},
|
||||
{
|
||||
title: 'timestamp',
|
||||
dataIndex: 'timestamp',
|
||||
key: 'timestamp',
|
||||
// https://github.com/ant-design/ant-design/discussions/36886
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => {
|
||||
const date = dayjs(field / 1e6).format();
|
||||
return {
|
||||
children: <Typography.Paragraph ellipsis>{date}</Typography.Paragraph>,
|
||||
};
|
||||
},
|
||||
},
|
||||
...fieldColumns,
|
||||
{
|
||||
title: 'body',
|
||||
dataIndex: 'body',
|
||||
key: 'body',
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: defaultTableStyle,
|
||||
},
|
||||
children: (
|
||||
<TableBodyContent
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: convert.toHtml(dompurify.sanitize(field)),
|
||||
}}
|
||||
linesPerRow={linesPerRow}
|
||||
/>
|
||||
),
|
||||
}),
|
||||
},
|
||||
];
|
||||
}, [fields, linesPerRow, onClickExpand]);
|
||||
const { dataSource, columns } = useTableView(props);
|
||||
|
||||
return (
|
||||
<Table
|
||||
size="small"
|
||||
columns={columns}
|
||||
dataSource={flattenLogData}
|
||||
dataSource={dataSource}
|
||||
pagination={false}
|
||||
rowKey="id"
|
||||
bordered
|
||||
|
||||
31
frontend/src/components/Logs/TableView/types.ts
Normal file
31
frontend/src/components/Logs/TableView/types.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { ColumnsType, ColumnType } from 'antd/es/table';
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
export type ColumnTypeRender<T = unknown> = ReturnType<
|
||||
NonNullable<ColumnType<T>['render']>
|
||||
>;
|
||||
|
||||
export type LogsTableViewProps = {
|
||||
logs: ILog[];
|
||||
fields: IField[];
|
||||
linesPerRow: number;
|
||||
onClickExpand?: (log: ILog) => void;
|
||||
};
|
||||
|
||||
export type UseTableViewResult = {
|
||||
columns: ColumnsType<Record<string, unknown>>;
|
||||
dataSource: Record<string, string>[];
|
||||
};
|
||||
|
||||
export type UseTableViewProps = {
|
||||
appendTo?: 'center' | 'end';
|
||||
onOpenLogsContext?: (log: ILog) => void;
|
||||
onClickExpand?: (log: ILog) => void;
|
||||
} & LogsTableViewProps;
|
||||
|
||||
export type ActionsColumnProps = {
|
||||
logId: string;
|
||||
logs: ILog[];
|
||||
onOpenLogsContext?: (log: ILog) => void;
|
||||
};
|
||||
186
frontend/src/components/Logs/TableView/useTableView.tsx
Normal file
186
frontend/src/components/Logs/TableView/useTableView.tsx
Normal file
@@ -0,0 +1,186 @@
|
||||
import {
|
||||
ExpandAltOutlined,
|
||||
LinkOutlined,
|
||||
MonitorOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import Convert from 'ansi-to-html';
|
||||
import { Button, Space, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import dayjs from 'dayjs';
|
||||
import dompurify from 'dompurify';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
|
||||
import { ExpandIconWrapper } from '../RawLogView/styles';
|
||||
import { defaultCellStyle, defaultTableStyle } from './config';
|
||||
import { TableBodyContent } from './styles';
|
||||
import {
|
||||
ActionsColumnProps,
|
||||
ColumnTypeRender,
|
||||
UseTableViewProps,
|
||||
UseTableViewResult,
|
||||
} from './types';
|
||||
|
||||
const convert = new Convert();
|
||||
|
||||
function ActionsColumn({
|
||||
logId,
|
||||
logs,
|
||||
onOpenLogsContext,
|
||||
}: ActionsColumnProps): JSX.Element {
|
||||
const currentLog = useMemo(() => logs.find(({ id }) => id === logId), [
|
||||
logs,
|
||||
logId,
|
||||
]);
|
||||
|
||||
const { onLogCopy } = useCopyLogLink(currentLog?.id);
|
||||
|
||||
const handleShowContext = useCallback(() => {
|
||||
if (!onOpenLogsContext || !currentLog) return;
|
||||
|
||||
onOpenLogsContext(currentLog);
|
||||
}, [currentLog, onOpenLogsContext]);
|
||||
|
||||
return (
|
||||
<Space>
|
||||
<Button
|
||||
size="small"
|
||||
onClick={handleShowContext}
|
||||
icon={<MonitorOutlined />}
|
||||
/>
|
||||
<Button size="small" onClick={onLogCopy} icon={<LinkOutlined />} />
|
||||
</Space>
|
||||
);
|
||||
}
|
||||
|
||||
export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
const {
|
||||
logs,
|
||||
fields,
|
||||
linesPerRow,
|
||||
appendTo = 'center',
|
||||
onOpenLogsContext,
|
||||
onClickExpand,
|
||||
} = props;
|
||||
const { isLogsExplorerPage } = useCopyLogLink();
|
||||
|
||||
const flattenLogData = useMemo(() => logs.map((log) => FlatLogData(log)), [
|
||||
logs,
|
||||
]);
|
||||
|
||||
const handleClickExpand = useCallback(
|
||||
(index: number): void => {
|
||||
if (!onClickExpand) return;
|
||||
|
||||
onClickExpand(logs[index]);
|
||||
},
|
||||
[logs, onClickExpand],
|
||||
);
|
||||
|
||||
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
||||
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
|
||||
.filter((e) => e.name !== 'id')
|
||||
.map(({ name }) => ({
|
||||
title: name,
|
||||
dataIndex: name,
|
||||
key: name,
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: defaultCellStyle,
|
||||
},
|
||||
children: (
|
||||
<Typography.Paragraph ellipsis={{ rows: linesPerRow }}>
|
||||
{field}
|
||||
</Typography.Paragraph>
|
||||
),
|
||||
}),
|
||||
}));
|
||||
|
||||
return [
|
||||
{
|
||||
title: '',
|
||||
dataIndex: 'id',
|
||||
key: 'expand',
|
||||
// https://github.com/ant-design/ant-design/discussions/36886
|
||||
render: (_, item, index): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: defaultCellStyle,
|
||||
},
|
||||
children: (
|
||||
<ExpandIconWrapper
|
||||
onClick={(): void => {
|
||||
handleClickExpand(index);
|
||||
}}
|
||||
>
|
||||
<ExpandAltOutlined />
|
||||
</ExpandIconWrapper>
|
||||
),
|
||||
}),
|
||||
},
|
||||
{
|
||||
title: 'timestamp',
|
||||
dataIndex: 'timestamp',
|
||||
key: 'timestamp',
|
||||
// https://github.com/ant-design/ant-design/discussions/36886
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => {
|
||||
const date =
|
||||
typeof field === 'string'
|
||||
? dayjs(field).format()
|
||||
: dayjs(field / 1e6).format();
|
||||
return {
|
||||
children: <Typography.Paragraph ellipsis>{date}</Typography.Paragraph>,
|
||||
};
|
||||
},
|
||||
},
|
||||
...(appendTo === 'center' ? fieldColumns : []),
|
||||
{
|
||||
title: 'body',
|
||||
dataIndex: 'body',
|
||||
key: 'body',
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: defaultTableStyle,
|
||||
},
|
||||
children: (
|
||||
<TableBodyContent
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: convert.toHtml(dompurify.sanitize(field)),
|
||||
}}
|
||||
linesPerRow={linesPerRow}
|
||||
/>
|
||||
),
|
||||
}),
|
||||
},
|
||||
...(appendTo === 'end' ? fieldColumns : []),
|
||||
...(isLogsExplorerPage
|
||||
? ([
|
||||
{
|
||||
title: 'actions',
|
||||
dataIndex: 'actions',
|
||||
key: 'actions',
|
||||
render: (_, log): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
children: (
|
||||
<ActionsColumn
|
||||
logId={(log.id as unknown) as string}
|
||||
logs={logs}
|
||||
onOpenLogsContext={onOpenLogsContext}
|
||||
/>
|
||||
),
|
||||
}),
|
||||
},
|
||||
] as ColumnsType<Record<string, unknown>>)
|
||||
: []),
|
||||
];
|
||||
}, [
|
||||
logs,
|
||||
fields,
|
||||
appendTo,
|
||||
linesPerRow,
|
||||
isLogsExplorerPage,
|
||||
handleClickExpand,
|
||||
onOpenLogsContext,
|
||||
]);
|
||||
|
||||
return { columns, dataSource: flattenLogData };
|
||||
};
|
||||
@@ -1,14 +1,28 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
|
||||
import { Table } from 'antd';
|
||||
import type { TableProps } from 'antd/es/table';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import { SyntheticEvent, useCallback, useMemo, useState } from 'react';
|
||||
import { dragColumnParams } from 'hooks/useDragColumns/configs';
|
||||
import {
|
||||
SyntheticEvent,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react';
|
||||
import ReactDragListView from 'react-drag-listview';
|
||||
import { ResizeCallbackData } from 'react-resizable';
|
||||
|
||||
import ResizableHeader from './ResizableHeader';
|
||||
import { DragSpanStyle } from './styles';
|
||||
import { ResizeTableProps } from './types';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function ResizeTable({ columns, ...restprops }: TableProps<any>): JSX.Element {
|
||||
const [columnsData, setColumns] = useState<ColumnsType>(columns || []);
|
||||
function ResizeTable({
|
||||
columns,
|
||||
onDragColumn,
|
||||
...restProps
|
||||
}: ResizeTableProps): JSX.Element {
|
||||
const [columnsData, setColumns] = useState<ColumnsType>([]);
|
||||
|
||||
const handleResize = useCallback(
|
||||
(index: number) => (
|
||||
@@ -25,27 +39,51 @@ function ResizeTable({ columns, ...restprops }: TableProps<any>): JSX.Element {
|
||||
[columnsData],
|
||||
);
|
||||
|
||||
const mergeColumns = useMemo(
|
||||
const mergedColumns = useMemo(
|
||||
() =>
|
||||
columnsData.map((col, index) => ({
|
||||
...col,
|
||||
...(onDragColumn && {
|
||||
title: (
|
||||
<DragSpanStyle className="dragHandler">
|
||||
{col?.title?.toString() || ''}
|
||||
</DragSpanStyle>
|
||||
),
|
||||
}),
|
||||
onHeaderCell: (column: ColumnsType<unknown>[number]): unknown => ({
|
||||
width: column.width,
|
||||
onResize: handleResize(index),
|
||||
}),
|
||||
})),
|
||||
[columnsData, handleResize],
|
||||
})) as ColumnsType<any>,
|
||||
[columnsData, onDragColumn, handleResize],
|
||||
);
|
||||
|
||||
return (
|
||||
<Table
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...restprops}
|
||||
components={{ header: { cell: ResizableHeader } }}
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
columns={mergeColumns as ColumnsType<any>}
|
||||
/>
|
||||
const tableParams = useMemo(
|
||||
() => ({
|
||||
...restProps,
|
||||
components: { header: { cell: ResizableHeader } },
|
||||
columns: mergedColumns,
|
||||
}),
|
||||
[mergedColumns, restProps],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (columns) {
|
||||
setColumns(columns);
|
||||
}
|
||||
}, [columns]);
|
||||
|
||||
return onDragColumn ? (
|
||||
<ReactDragListView.DragColumn {...dragColumnParams} onDragEnd={onDragColumn}>
|
||||
<Table {...tableParams} />
|
||||
</ReactDragListView.DragColumn>
|
||||
) : (
|
||||
<Table {...tableParams} />
|
||||
);
|
||||
}
|
||||
|
||||
ResizeTable.defaultProps = {
|
||||
onDragColumn: undefined,
|
||||
};
|
||||
|
||||
export default ResizeTable;
|
||||
|
||||
@@ -2,10 +2,16 @@ import styled from 'styled-components';
|
||||
|
||||
export const SpanStyle = styled.span`
|
||||
position: absolute;
|
||||
right: -5px;
|
||||
right: -0.313rem;
|
||||
bottom: 0;
|
||||
z-index: 1;
|
||||
width: 10px;
|
||||
width: 0.625rem;
|
||||
height: 100%;
|
||||
cursor: col-resize;
|
||||
`;
|
||||
|
||||
export const DragSpanStyle = styled.span`
|
||||
display: flex;
|
||||
margin: -1rem;
|
||||
padding: 1rem;
|
||||
`;
|
||||
|
||||
6
frontend/src/components/ResizeTable/types.ts
Normal file
6
frontend/src/components/ResizeTable/types.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { TableProps } from 'antd';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export interface ResizeTableProps extends TableProps<any> {
|
||||
onDragColumn?: (fromIndex: number, toIndex: number) => void;
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import { createMemoryHistory } from 'history';
|
||||
import { Router } from 'react-router-dom';
|
||||
|
||||
import RouteTab from './index';
|
||||
import { RouteTabProps } from './types';
|
||||
|
||||
function DummyComponent1(): JSX.Element {
|
||||
return <div>Dummy Component 1</div>;
|
||||
@@ -11,16 +12,18 @@ function DummyComponent2(): JSX.Element {
|
||||
return <div>Dummy Component 2</div>;
|
||||
}
|
||||
|
||||
const testRoutes = [
|
||||
const testRoutes: RouteTabProps['routes'] = [
|
||||
{
|
||||
name: 'Tab1',
|
||||
route: '/tab1',
|
||||
Component: DummyComponent1,
|
||||
key: 'Tab1',
|
||||
},
|
||||
{
|
||||
name: 'Tab2',
|
||||
route: '/tab2',
|
||||
Component: DummyComponent2,
|
||||
key: 'Tab2',
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Tabs, TabsProps } from 'antd';
|
||||
import { History } from 'history';
|
||||
|
||||
import { RouteTabProps } from './types';
|
||||
|
||||
function RouteTab({
|
||||
routes,
|
||||
@@ -13,16 +14,16 @@ function RouteTab({
|
||||
onChangeHandler();
|
||||
}
|
||||
|
||||
const selectedRoute = routes.find((e) => e.name === activeRoute);
|
||||
const selectedRoute = routes.find((e) => e.key === activeRoute);
|
||||
|
||||
if (selectedRoute) {
|
||||
history.push(selectedRoute.route);
|
||||
}
|
||||
};
|
||||
|
||||
const items = routes.map(({ Component, name, route }) => ({
|
||||
const items = routes.map(({ Component, name, route, key }) => ({
|
||||
label: name,
|
||||
key: name,
|
||||
key,
|
||||
tabKey: route,
|
||||
children: <Component />,
|
||||
}));
|
||||
@@ -32,6 +33,7 @@ function RouteTab({
|
||||
onChange={onChange}
|
||||
destroyInactiveTabPane
|
||||
activeKey={activeKey}
|
||||
defaultActiveKey={activeKey}
|
||||
animated
|
||||
items={items}
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
@@ -40,17 +42,6 @@ function RouteTab({
|
||||
);
|
||||
}
|
||||
|
||||
interface RouteTabProps {
|
||||
routes: {
|
||||
name: string;
|
||||
route: string;
|
||||
Component: () => JSX.Element;
|
||||
}[];
|
||||
activeKey: TabsProps['activeKey'];
|
||||
onChangeHandler?: VoidFunction;
|
||||
history: History<unknown>;
|
||||
}
|
||||
|
||||
RouteTab.defaultProps = {
|
||||
onChangeHandler: undefined,
|
||||
};
|
||||
|
||||
14
frontend/src/components/RouteTab/types.ts
Normal file
14
frontend/src/components/RouteTab/types.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { TabsProps } from 'antd';
|
||||
import { History } from 'history';
|
||||
|
||||
export interface RouteTabProps {
|
||||
routes: {
|
||||
name: React.ReactNode;
|
||||
route: string;
|
||||
Component: () => JSX.Element;
|
||||
key: string;
|
||||
}[];
|
||||
activeKey: TabsProps['activeKey'];
|
||||
onChangeHandler?: VoidFunction;
|
||||
history: History<unknown>;
|
||||
}
|
||||
5
frontend/src/components/TabLabel/TabLabel.interfaces.ts
Normal file
5
frontend/src/components/TabLabel/TabLabel.interfaces.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export type TabLabelProps = {
|
||||
isDisabled: boolean;
|
||||
label: string;
|
||||
tooltipText?: string;
|
||||
};
|
||||
29
frontend/src/components/TabLabel/index.tsx
Normal file
29
frontend/src/components/TabLabel/index.tsx
Normal file
@@ -0,0 +1,29 @@
|
||||
import { Tooltip } from 'antd';
|
||||
import { memo } from 'react';
|
||||
|
||||
import { TabLabelProps } from './TabLabel.interfaces';
|
||||
|
||||
function TabLabel({
|
||||
label,
|
||||
isDisabled,
|
||||
tooltipText,
|
||||
}: TabLabelProps): JSX.Element {
|
||||
const currentLabel = <span>{label}</span>;
|
||||
|
||||
if (isDisabled) {
|
||||
return (
|
||||
<Tooltip
|
||||
trigger="hover"
|
||||
autoAdjustOverflow
|
||||
placement="top"
|
||||
title={tooltipText}
|
||||
>
|
||||
{currentLabel}
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
return currentLabel;
|
||||
}
|
||||
|
||||
export default memo(TabLabel);
|
||||
22
frontend/src/components/TableRenderer/utils.ts
Normal file
22
frontend/src/components/TableRenderer/utils.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
|
||||
export const generatorResizeTableColumns = <T>({
|
||||
baseColumnOptions,
|
||||
dynamicColumnOption,
|
||||
}: GeneratorResizeTableColumnsProp<T>): ColumnsType<T> =>
|
||||
baseColumnOptions.map((config: ColumnType<T>) => {
|
||||
const { key } = config;
|
||||
const extraConfig = dynamicColumnOption.find(
|
||||
(dynamicConfigItem) => dynamicConfigItem.key === key,
|
||||
);
|
||||
return {
|
||||
...config,
|
||||
...extraConfig?.columnOption,
|
||||
};
|
||||
});
|
||||
|
||||
interface GeneratorResizeTableColumnsProp<T> {
|
||||
baseColumnOptions: ColumnsType<T>;
|
||||
dynamicColumnOption: { key: string; columnOption: ColumnType<T> }[];
|
||||
}
|
||||
@@ -1,5 +1,8 @@
|
||||
import { grey } from '@ant-design/colors';
|
||||
import { QuestionCircleFilled } from '@ant-design/icons';
|
||||
import { blue, grey } from '@ant-design/colors';
|
||||
import {
|
||||
QuestionCircleFilled,
|
||||
QuestionCircleOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Tooltip } from 'antd';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
@@ -7,7 +10,12 @@ import { useMemo } from 'react';
|
||||
|
||||
import { style } from './styles';
|
||||
|
||||
function TextToolTip({ text, url }: TextToolTipProps): JSX.Element {
|
||||
function TextToolTip({
|
||||
text,
|
||||
url,
|
||||
useFilledIcon = true,
|
||||
urlText,
|
||||
}: TextToolTipProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const overlay = useMemo(
|
||||
@@ -16,12 +24,12 @@ function TextToolTip({ text, url }: TextToolTipProps): JSX.Element {
|
||||
{`${text} `}
|
||||
{url && (
|
||||
<a href={url} rel="noopener noreferrer" target="_blank">
|
||||
here
|
||||
{urlText || 'here'}
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
[text, url],
|
||||
[text, url, urlText],
|
||||
);
|
||||
|
||||
const iconStyle = useMemo(
|
||||
@@ -32,19 +40,35 @@ function TextToolTip({ text, url }: TextToolTipProps): JSX.Element {
|
||||
[isDarkMode],
|
||||
);
|
||||
|
||||
const iconOutlinedStyle = useMemo(
|
||||
() => ({
|
||||
...style,
|
||||
color: isDarkMode ? themeColors.navyBlue : blue[0],
|
||||
}),
|
||||
[isDarkMode],
|
||||
);
|
||||
|
||||
return (
|
||||
<Tooltip overlay={overlay}>
|
||||
<QuestionCircleFilled style={iconStyle} />
|
||||
{useFilledIcon ? (
|
||||
<QuestionCircleFilled style={iconStyle} />
|
||||
) : (
|
||||
<QuestionCircleOutlined style={iconOutlinedStyle} />
|
||||
)}
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
TextToolTip.defaultProps = {
|
||||
url: '',
|
||||
urlText: '',
|
||||
useFilledIcon: true,
|
||||
};
|
||||
interface TextToolTipProps {
|
||||
url?: string;
|
||||
text: string;
|
||||
useFilledIcon?: boolean;
|
||||
urlText?: string;
|
||||
}
|
||||
|
||||
export default TextToolTip;
|
||||
|
||||
@@ -1 +1 @@
|
||||
export const style = { fontSize: '1.3125rem' };
|
||||
export const style = { fontSize: '1rem' };
|
||||
|
||||
31
frontend/src/components/Upgrade/UpgradePrompt.tsx
Normal file
31
frontend/src/components/Upgrade/UpgradePrompt.tsx
Normal file
@@ -0,0 +1,31 @@
|
||||
import { Alert, Space } from 'antd';
|
||||
import { SIGNOZ_UPGRADE_PLAN_URL } from 'constants/app';
|
||||
|
||||
type UpgradePromptProps = {
|
||||
title?: string;
|
||||
};
|
||||
|
||||
function UpgradePrompt({ title }: UpgradePromptProps): JSX.Element {
|
||||
return (
|
||||
<Space direction="vertical" style={{ width: '100%' }}>
|
||||
<Alert
|
||||
message={title}
|
||||
description={
|
||||
<div>
|
||||
This feature is available for paid plans only.{' '}
|
||||
<a href={SIGNOZ_UPGRADE_PLAN_URL} target="_blank" rel="noreferrer">
|
||||
Click here
|
||||
</a>{' '}
|
||||
to Upgrade
|
||||
</div>
|
||||
}
|
||||
type="warning"
|
||||
/>{' '}
|
||||
</Space>
|
||||
);
|
||||
}
|
||||
|
||||
UpgradePrompt.defaultProps = {
|
||||
title: 'Upgrade to a Paid Plan',
|
||||
};
|
||||
export default UpgradePrompt;
|
||||
5
frontend/src/constants/apDex.ts
Normal file
5
frontend/src/constants/apDex.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export const apDexToolTipText =
|
||||
"Apdex is a way to measure your users' satisfaction with the response time of your web service. It's represented as a score from 0-1.";
|
||||
export const apDexToolTipUrl =
|
||||
'https://signoz.io/docs/userguide/metrics/#apdex?utm_source=product&utm_medium=frontend&utm_campaign=apdex';
|
||||
export const apDexToolTipUrlText = 'Learn more about Apdex.';
|
||||
@@ -1,3 +1,5 @@
|
||||
const SOMETHING_WENT_WRONG = 'Something went wrong';
|
||||
|
||||
const getVersion = 'version';
|
||||
|
||||
export { getVersion };
|
||||
export { getVersion, SOMETHING_WENT_WRONG };
|
||||
|
||||
4
frontend/src/constants/events.ts
Normal file
4
frontend/src/constants/events.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export enum Events {
|
||||
UPDATE_GRAPH_VISIBILITY_STATE = 'UPDATE_GRAPH_VISIBILITY_STATE',
|
||||
UPDATE_GRAPH_MANAGER_TABLE = 'UPDATE_GRAPH_MANAGER_TABLE',
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user