Compare commits
190 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e7e0f5b96a | ||
|
|
a26ebb742a | ||
|
|
9d1305f174 | ||
|
|
ab514cc0f2 | ||
|
|
1f44f089e0 | ||
|
|
174fc107c2 | ||
|
|
06a55ccdd6 | ||
|
|
a3731e4c4e | ||
|
|
e183cace75 | ||
|
|
23490ca7f8 | ||
|
|
9f71e732c7 | ||
|
|
23d6287594 | ||
|
|
2624ce4007 | ||
|
|
3d5134b43c | ||
|
|
c657f96032 | ||
|
|
c18fff6ae8 | ||
|
|
28142764af | ||
|
|
2fa265ff2e | ||
|
|
dca0b11acd | ||
|
|
bad80def90 | ||
|
|
8965b9b503 | ||
|
|
05076968c9 | ||
|
|
7c8afc2e1c | ||
|
|
c8a1a8600e | ||
|
|
45cb1eb38f | ||
|
|
8ebb76bd0c | ||
|
|
309ffa4989 | ||
|
|
d787298600 | ||
|
|
7998d474e2 | ||
|
|
7b8ff5a285 | ||
|
|
cb22aef36f | ||
|
|
cf93712286 | ||
|
|
a906f94b8a | ||
|
|
93b6749920 | ||
|
|
8ab527b174 | ||
|
|
ad163c2b61 | ||
|
|
21f909f4c0 | ||
|
|
b67206dd65 | ||
|
|
ce5afd31fd | ||
|
|
9a184f5740 | ||
|
|
be14f1c32c | ||
|
|
ae37a608f8 | ||
|
|
aaeb579d0d | ||
|
|
1151e8521e | ||
|
|
d779b83715 | ||
|
|
47a41473df | ||
|
|
de370d7f0c | ||
|
|
8a5b26cefe | ||
|
|
2a20b6fc86 | ||
|
|
02ef1744b4 | ||
|
|
2c973adf0b | ||
|
|
f7ff491d35 | ||
|
|
6cd341a887 | ||
|
|
0832bce955 | ||
|
|
62b2462e03 | ||
|
|
152846f554 | ||
|
|
846da08cbd | ||
|
|
17f32e9765 | ||
|
|
48659a2957 | ||
|
|
a2a8a32d1c | ||
|
|
28f2ee2627 | ||
|
|
3b01bb2614 | ||
|
|
622e1765cf | ||
|
|
faaf0a6e73 | ||
|
|
4542a51531 | ||
|
|
191a538430 | ||
|
|
e6ce80213b | ||
|
|
3115b32dcd | ||
|
|
af272a368b | ||
|
|
b336a6cb45 | ||
|
|
b72815ca2f | ||
|
|
ed4a01dea6 | ||
|
|
1914c3b4a0 | ||
|
|
3811e96e23 | ||
|
|
8d16493432 | ||
|
|
db2bfbb887 | ||
|
|
213838a021 | ||
|
|
fd6f9a90e1 | ||
|
|
13f9922c53 | ||
|
|
a654baaa5b | ||
|
|
f766435acc | ||
|
|
d7a65ba689 | ||
|
|
05ce03e67d | ||
|
|
ba6818f487 | ||
|
|
ca53136cbf | ||
|
|
c46bef321c | ||
|
|
ba8f804b26 | ||
|
|
6cc7025e37 | ||
|
|
e62e541fc4 | ||
|
|
2f1ca93eda | ||
|
|
f1c7d72fc5 | ||
|
|
a405307c96 | ||
|
|
c85d48d7fa | ||
|
|
75470f6bb9 | ||
|
|
f75e688b32 | ||
|
|
5f3ca045df | ||
|
|
186632af69 | ||
|
|
fa652be926 | ||
|
|
1e39131c38 | ||
|
|
153e859ac3 | ||
|
|
d1cc29e118 | ||
|
|
972bf94dd0 | ||
|
|
3632208d45 | ||
|
|
cd9768c738 | ||
|
|
f01b9605db | ||
|
|
eec236af50 | ||
|
|
bbff2b459e | ||
|
|
d9535e7a8d | ||
|
|
a82bbe1a72 | ||
|
|
6812f55152 | ||
|
|
83163c17cd | ||
|
|
5ed7c9a46e | ||
|
|
2f323056d0 | ||
|
|
51b583480b | ||
|
|
7b1e2c8b98 | ||
|
|
b87f3bdb50 | ||
|
|
2f5908a3dd | ||
|
|
ca77820e9d | ||
|
|
a4346a2d93 | ||
|
|
44360ecacf | ||
|
|
b675c3cfec | ||
|
|
b23d8da96c | ||
|
|
215ea8d819 | ||
|
|
0c27d5acbc | ||
|
|
435d74c37e | ||
|
|
b35bdf01cc | ||
|
|
9b654143bb | ||
|
|
4841f150f4 | ||
|
|
16a49a8b04 | ||
|
|
1fd819b806 | ||
|
|
cab9e04cdd | ||
|
|
e8f341b850 | ||
|
|
1f6fcb9b8c | ||
|
|
1c7202b5bf | ||
|
|
24ac062bf5 | ||
|
|
b776bf5b09 | ||
|
|
144076e029 | ||
|
|
835251b342 | ||
|
|
ebbad5812f | ||
|
|
7b86022280 | ||
|
|
da1fd4b0cd | ||
|
|
57d28be9f5 | ||
|
|
126c9238ba | ||
|
|
31a3bc09c8 | ||
|
|
6ba5c0ecad | ||
|
|
27cd514fa5 | ||
|
|
f0e13784e5 | ||
|
|
742ceac32c | ||
|
|
545d46c39c | ||
|
|
d134e4f4d9 | ||
|
|
e03b0aa45f | ||
|
|
46e131698e | ||
|
|
d1ee15c372 | ||
|
|
1e035be978 | ||
|
|
88a97fc4b8 | ||
|
|
2e58f6db7a | ||
|
|
1916fc87b0 | ||
|
|
d8882acdd7 | ||
|
|
7f42b39684 | ||
|
|
b11f79b4c7 | ||
|
|
c717e39a1a | ||
|
|
c3253687d0 | ||
|
|
895c721b37 | ||
|
|
35f5fb6957 | ||
|
|
40ec4517c2 | ||
|
|
48a6f536fa | ||
|
|
13a6d7f7c6 | ||
|
|
8b6ed0f951 | ||
|
|
eef48c54f8 | ||
|
|
aad962d07d | ||
|
|
18bbb3cf36 | ||
|
|
a3455fb553 | ||
|
|
ece2988d0d | ||
|
|
db704b212d | ||
|
|
4b13b0a8a4 | ||
|
|
6f6499c267 | ||
|
|
3dcb44a758 | ||
|
|
0595cdc7af | ||
|
|
092c02762f | ||
|
|
d1d2829d2b | ||
|
|
ac446294e7 | ||
|
|
1cceab4d5e | ||
|
|
02898d14f9 | ||
|
|
09af6c262c | ||
|
|
faeaeb61a0 | ||
|
|
9c80ba6b78 | ||
|
|
dbba8b5b55 | ||
|
|
58ce838023 | ||
|
|
5260b152f5 | ||
|
|
f2dd254d83 |
2
.github/config.yml
vendored
2
.github/config.yml
vendored
@@ -17,7 +17,7 @@ newPRWelcomeComment: >
|
||||
# Comment to be posted to on pull requests merged by a first time user
|
||||
firstPRMergeComment: >
|
||||
Congrats on merging your first pull request!
|
||||
|
||||
|
||||

|
||||
|
||||
We here at SigNoz are proud of you! 🥳
|
||||
|
||||
4
.github/workflows/build.yaml
vendored
4
.github/workflows/build.yaml
vendored
@@ -32,6 +32,10 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Run tests
|
||||
shell: bash
|
||||
run: |
|
||||
make test
|
||||
- name: Build query-service image
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
2
.github/workflows/e2e-k3s.yaml
vendored
2
.github/workflows/e2e-k3s.yaml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
--set frontend.service.type=LoadBalancer \
|
||||
--set queryService.image.tag=$DOCKER_TAG \
|
||||
--set frontend.image.tag=$DOCKER_TAG
|
||||
|
||||
|
||||
# get pods, services and the container images
|
||||
kubectl get pods -n platform
|
||||
kubectl get svc -n platform
|
||||
|
||||
1
.github/workflows/pr_verify_linked_issue.yml
vendored
1
.github/workflows/pr_verify_linked_issue.yml
vendored
@@ -17,4 +17,3 @@ jobs:
|
||||
uses: hattan/verify-linked-issue-action@v1.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
2
.github/workflows/remove-label.yaml
vendored
2
.github/workflows/remove-label.yaml
vendored
@@ -11,6 +11,6 @@ jobs:
|
||||
- name: Remove label
|
||||
uses: buildsville/add-remove-label@v1
|
||||
with:
|
||||
label: ok-to-test
|
||||
label: ok-to-test,testing-deploy
|
||||
type: remove
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
25
.github/workflows/repo-stats.yml
vendored
25
.github/workflows/repo-stats.yml
vendored
@@ -1,25 +0,0 @@
|
||||
on:
|
||||
schedule:
|
||||
# Run this once per day, towards the end of the day for keeping the most
|
||||
# recent data point most meaningful (hours are interpreted in UTC).
|
||||
- cron: "0 8 * * *"
|
||||
workflow_dispatch: # Allow for running this manually.
|
||||
|
||||
jobs:
|
||||
j1:
|
||||
name: repostats
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: run-ghrs
|
||||
uses: jgehrcke/github-repo-stats@v1.1.0
|
||||
with:
|
||||
# Define the stats repository (the repo to fetch
|
||||
# stats for and to generate the report for).
|
||||
# Remove the parameter when the stats repository
|
||||
# and the data repository are the same.
|
||||
repository: signoz/signoz
|
||||
# Set a GitHub API token that can read the stats
|
||||
# repository, and that can push to the data
|
||||
# repository (which this workflow file lives in),
|
||||
# to store data and the report files.
|
||||
ghtoken: ${{ github.token }}
|
||||
1
.github/workflows/sonar.yml
vendored
1
.github/workflows/sonar.yml
vendored
@@ -24,4 +24,3 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
|
||||
38
.github/workflows/staging-deployment.yaml
vendored
Normal file
38
.github/workflows/staging-deployment.yaml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
name: staging-deployment
|
||||
# Trigger deployment only on push to develop branch
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy latest develop branch to staging
|
||||
runs-on: ubuntu-latest
|
||||
environment: staging
|
||||
steps:
|
||||
- name: Executing remote ssh commands using ssh key
|
||||
uses: appleboy/ssh-action@v0.1.6
|
||||
env:
|
||||
GITHUB_BRANCH: develop
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
host: ${{ secrets.HOST_DNS }}
|
||||
username: ${{ secrets.USERNAME }}
|
||||
key: ${{ secrets.EC2_SSH_KEY }}
|
||||
envs: GITHUB_BRANCH,GITHUB_SHA
|
||||
command_timeout: 60m
|
||||
script: |
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
docker system prune --force
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout ${GITHUB_BRANCH}
|
||||
git pull
|
||||
make build-ee-query-service-amd64
|
||||
make build-frontend-amd64
|
||||
make run-signoz
|
||||
39
.github/workflows/testing-deployment.yaml
vendored
Normal file
39
.github/workflows/testing-deployment.yaml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: testing-deployment
|
||||
# Trigger deployment only on testing-deploy label on pull request
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy PR branch to testing
|
||||
runs-on: ubuntu-latest
|
||||
environment: testing
|
||||
if: ${{ github.event.label.name == 'testing-deploy' }}
|
||||
steps:
|
||||
- name: Executing remote ssh commands using ssh key
|
||||
uses: appleboy/ssh-action@v0.1.6
|
||||
env:
|
||||
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
host: ${{ secrets.HOST_DNS }}
|
||||
username: ${{ secrets.USERNAME }}
|
||||
key: ${{ secrets.EC2_SSH_KEY }}
|
||||
envs: GITHUB_BRANCH,GITHUB_SHA
|
||||
command_timeout: 60m
|
||||
script: |
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export DEV_BUILD="1"
|
||||
docker system prune --force
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout ${GITHUB_BRANCH}
|
||||
git pull
|
||||
make build-ee-query-service-amd64
|
||||
make build-frontend-amd64
|
||||
make run-signoz
|
||||
@@ -215,9 +215,26 @@ Please ping us in the [`#contributing`](https://signoz-community.slack.com/archi
|
||||
|
||||
# 4. Contribute to Backend (Query-Service) 🌑
|
||||
|
||||
[**https://github.com/SigNoz/signoz/tree/develop/pkg/query-service**](https://github.com/SigNoz/signoz/tree/develop/pkg/query-service)
|
||||
**Need to Update: [https://github.com/SigNoz/signoz/tree/develop/pkg/query-service](https://github.com/SigNoz/signoz/tree/develop/pkg/query-service)**
|
||||
|
||||
## 4.1 To run ClickHouse setup (recommended for local development)
|
||||
## 4.1 Prerequisites
|
||||
|
||||
### 4.1.1 Install SQLite3
|
||||
|
||||
- Run `sqlite3` command to check if you already have SQLite3 installed on your machine.
|
||||
|
||||
- If not installed already, Install using below command
|
||||
- on Linux
|
||||
- on Debian / Ubuntu
|
||||
```
|
||||
sudo apt install sqlite3
|
||||
```
|
||||
- on CentOS / Fedora / RedHat
|
||||
```
|
||||
sudo yum install sqlite3
|
||||
```
|
||||
|
||||
## 4.2 To run ClickHouse setup (recommended for local development)
|
||||
|
||||
- Clone the SigNoz repository and cd into signoz directory,
|
||||
```
|
||||
|
||||
28
Makefile
28
Makefile
@@ -45,7 +45,7 @@ build-frontend-amd64:
|
||||
@echo "--> Building frontend docker image for amd64"
|
||||
@echo "------------------"
|
||||
@cd $(FRONTEND_DIRECTORY) && \
|
||||
docker build --file Dockerfile --no-cache -t $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
docker build --file Dockerfile -t $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
--build-arg TARGETPLATFORM="linux/amd64" .
|
||||
|
||||
# Step to build and push docker image of frontend(used in push pipeline)
|
||||
@@ -54,7 +54,7 @@ build-push-frontend:
|
||||
@echo "--> Building and pushing frontend docker image"
|
||||
@echo "------------------"
|
||||
@cd $(FRONTEND_DIRECTORY) && \
|
||||
docker buildx build --file Dockerfile --progress plane --no-cache --push --platform linux/amd64 \
|
||||
docker buildx build --file Dockerfile --progress plane --push --platform linux/arm64,linux/amd64 \
|
||||
--tag $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
# Steps to build and push docker image of query service
|
||||
@@ -65,7 +65,7 @@ build-query-service-amd64:
|
||||
@echo "--> Building query-service docker image for amd64"
|
||||
@echo "------------------"
|
||||
@docker build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
-t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
--build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="$(LD_FLAGS)" .
|
||||
|
||||
# Step to build and push docker image of query in amd64 and arm64 (used in push pipeline)
|
||||
@@ -73,7 +73,7 @@ build-push-query-service:
|
||||
@echo "------------------"
|
||||
@echo "--> Building and pushing query-service docker image"
|
||||
@echo "------------------"
|
||||
@docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plane --no-cache \
|
||||
@docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plane \
|
||||
--push --platform linux/arm64,linux/amd64 --build-arg LD_FLAGS="$(LD_FLAGS)" \
|
||||
--tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
@@ -84,11 +84,11 @@ build-ee-query-service-amd64:
|
||||
@echo "------------------"
|
||||
@if [ $(DEV_BUILD) != "" ]; then \
|
||||
docker build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
-t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
--build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="${LD_FLAGS} ${DEV_LD_FLAGS}" .; \
|
||||
else \
|
||||
docker build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
-t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
--build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="$(LD_FLAGS)" .; \
|
||||
fi
|
||||
|
||||
@@ -98,7 +98,7 @@ build-push-ee-query-service:
|
||||
@echo "--> Building and pushing query-service docker image"
|
||||
@echo "------------------"
|
||||
@docker buildx build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--progress plane --no-cache --push --platform linux/arm64,linux/amd64 \
|
||||
--progress plane --push --platform linux/arm64,linux/amd64 \
|
||||
--build-arg LD_FLAGS="$(LD_FLAGS)" --tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
dev-setup:
|
||||
@@ -119,16 +119,22 @@ down-local:
|
||||
$(STANDALONE_DIRECTORY)/docker-compose-core.yaml -f $(STANDALONE_DIRECTORY)/docker-compose-local.yaml \
|
||||
down -v
|
||||
|
||||
run-x86:
|
||||
pull-signoz:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml pull
|
||||
|
||||
run-signoz:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml up --build -d
|
||||
|
||||
down-x86:
|
||||
down-signoz:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml down -v
|
||||
|
||||
clear-standalone-data:
|
||||
@docker run --rm -v "$(PWD)/$(STANDALONE_DIRECTORY)/data:/pwd" busybox \
|
||||
sh -c "cd /pwd && rm -rf alertmanager/* clickhous*/* signoz/* zookeeper-*/*"
|
||||
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse*/* signoz/* zookeeper-*/*"
|
||||
|
||||
clear-swarm-data:
|
||||
@docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \
|
||||
sh -c "cd /pwd && rm -rf alertmanager/* clickhous*/* signoz/* zookeeper-*/*"
|
||||
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse*/* signoz/* zookeeper-*/*"
|
||||
|
||||
test:
|
||||
go test ./pkg/query-service/app/metrics/...
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
|
||||
##
|
||||
|
||||
SigNoz helps developers monitor applications and troubleshoot problems in their deployed applications. SigNoz uses distributed tracing to gain visibility into your software stack.
|
||||
SigNoz helps developers monitor applications and troubleshoot problems in their deployed applications. With SigNoz, you can:
|
||||
|
||||
👉 Visualise Metrics, Traces and Logs in a single pane of glass
|
||||
|
||||
@@ -144,6 +144,8 @@ Moreover, SigNoz has few more advanced features wrt Jaeger:
|
||||
- SigNoz Logs management are based on ClickHouse, a columnar OLAP datastore which makes aggregate log analytics queries much more efficient
|
||||
- 50% lower resource requirement compared to Elastic during ingestion
|
||||
|
||||
We have published benchmarks comparing Elastic with SigNoz. Check it out [here](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark)
|
||||
|
||||
<p>  </p>
|
||||
|
||||
### SigNoz vs Loki
|
||||
@@ -152,6 +154,8 @@ Moreover, SigNoz has few more advanced features wrt Jaeger:
|
||||
- SigNoz supports indexes over high cardinality data and has no limitations on the number of indexes, while Loki reaches max streams with a few indexes added to it.
|
||||
- Searching over a huge volume of data is difficult and slow in Loki compared to SigNoz
|
||||
|
||||
We have published benchmarks comparing Loki with SigNoz. Check it out [here](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark)
|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/Contributors.svg" width="50px" />
|
||||
|
||||
@@ -27,12 +27,6 @@ For x86 chip (amd):
|
||||
docker-compose -f docker/clickhouse-setup/docker-compose.yaml up -d
|
||||
```
|
||||
|
||||
For Mac with Apple chip (arm):
|
||||
|
||||
```sh
|
||||
docker-compose -f docker/clickhouse-setup/docker-compose.arm.yaml up -d
|
||||
```
|
||||
|
||||
Open http://localhost:3301 in your favourite browser. In couple of minutes, you should see
|
||||
the data generated from hotrod in SigNoz UI.
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ x-clickhouse-depend: &clickhouse-depend
|
||||
services:
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.0
|
||||
container_name: zookeeper-1
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
@@ -52,7 +51,6 @@ services:
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-2
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
@@ -69,7 +67,6 @@ services:
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-3
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
@@ -86,7 +83,6 @@ services:
|
||||
|
||||
clickhouse:
|
||||
<<: *clickhouse-defaults
|
||||
container_name: clickhouse
|
||||
hostname: clickhouse
|
||||
# ports:
|
||||
# - "9000:9000"
|
||||
@@ -101,7 +97,6 @@ services:
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-2
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
@@ -116,7 +111,6 @@ services:
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-3
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
@@ -143,7 +137,7 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.12.0
|
||||
image: signoz/query-service:0.16.2
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -172,7 +166,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.12.0
|
||||
image: signoz/frontend:0.16.2
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -185,7 +179,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.5
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@@ -194,6 +188,7 @@ services:
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}},dockerswarm.service.name={{.Service.Name}},dockerswarm.task.name={{.Task.Name}}
|
||||
- DOCKER_MULTI_NODE_CLUSTER=false
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
@@ -213,7 +208,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.5
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
|
||||
@@ -64,7 +64,9 @@ receivers:
|
||||
- job_name: otel-collector
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector
|
||||
|
||||
processors:
|
||||
batch:
|
||||
@@ -78,12 +80,16 @@ processors:
|
||||
signozspanmetrics/prometheus:
|
||||
metrics_exporter: prometheus
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 10000
|
||||
dimensions_cache_size: 100000
|
||||
dimensions:
|
||||
- name: service.namespace
|
||||
default: default
|
||||
- name: deployment.environment
|
||||
default: default
|
||||
# This is added to ensure the uniqueness of the timeseries
|
||||
# Otherwise, identical timeseries produced by multiple replicas of
|
||||
# collectors result in incorrect APM metrics
|
||||
- name: 'signoz.collector.id'
|
||||
# memory_limiter:
|
||||
# # 80% of maximum memory up to 2G
|
||||
# limit_mib: 1500
|
||||
@@ -104,11 +110,13 @@ exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/?database=signoz_traces
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
|
||||
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
prometheus:
|
||||
endpoint: 0.0.0.0:8889
|
||||
# logging: {}
|
||||
@@ -147,9 +155,13 @@ service:
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite]
|
||||
metrics/generic:
|
||||
receivers: [hostmetrics, prometheus]
|
||||
receivers: [hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [clickhousemetricswrite]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite/prometheus]
|
||||
metrics/spanmetrics:
|
||||
receivers: [otlp/spanmetrics]
|
||||
exporters: [prometheus]
|
||||
|
||||
@@ -7,7 +7,9 @@ receivers:
|
||||
scrape_interval: 60s
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector-metrics
|
||||
# SigNoz span metrics
|
||||
- job_name: signozspanmetrics-collector
|
||||
scrape_interval: 60s
|
||||
|
||||
@@ -30,6 +30,8 @@ server {
|
||||
|
||||
location /api {
|
||||
proxy_pass http://query-service:8080/api;
|
||||
# connection will be closed if no data is read for 600s between successive read operations
|
||||
proxy_read_timeout 600s;
|
||||
}
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
|
||||
@@ -905,7 +905,8 @@
|
||||
<dictionaries_config>*_dictionary.xml</dictionaries_config>
|
||||
|
||||
<!-- Configuration of user defined executable functions -->
|
||||
<user_defined_executable_functions_config>*_function.xml</user_defined_executable_functions_config>
|
||||
<user_defined_executable_functions_config>*function.xml</user_defined_executable_functions_config>
|
||||
<user_scripts_path>/var/lib/clickhouse/user_scripts/</user_scripts_path>
|
||||
|
||||
<!-- Uncomment if you want data to be compressed 30-100% better.
|
||||
Don't do that if you just started using ClickHouse.
|
||||
|
||||
21
deploy/docker/clickhouse-setup/custom-function.xml
Normal file
21
deploy/docker/clickhouse-setup/custom-function.xml
Normal file
@@ -0,0 +1,21 @@
|
||||
<functions>
|
||||
<function>
|
||||
<type>executable</type>
|
||||
<name>histogramQuantile</name>
|
||||
<return_type>Float64</return_type>
|
||||
<argument>
|
||||
<type>Array(Float64)</type>
|
||||
<name>buckets</name>
|
||||
</argument>
|
||||
<argument>
|
||||
<type>Array(Float64)</type>
|
||||
<name>counts</name>
|
||||
</argument>
|
||||
<argument>
|
||||
<type>Float64</type>
|
||||
<name>quantile</name>
|
||||
</argument>
|
||||
<format>CSV</format>
|
||||
<command>./histogramQuantile</command>
|
||||
</function>
|
||||
</functions>
|
||||
@@ -41,7 +41,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: otel-collector
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.5
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
# user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@@ -67,7 +67,7 @@ services:
|
||||
|
||||
otel-collector-metrics:
|
||||
container_name: otel-collector-metrics
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.5
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
|
||||
@@ -97,9 +97,11 @@ services:
|
||||
volumes:
|
||||
- ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
- ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
- ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
@@ -112,9 +114,12 @@ services:
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
@@ -127,12 +132,14 @@ services:
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:0.23.0-0.2
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.0-0.2}
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
@@ -146,7 +153,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.12.0
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.16.2}
|
||||
container_name: query-service
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
# ports:
|
||||
@@ -174,7 +181,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.12.0
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.16.2}
|
||||
container_name: frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -186,7 +193,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.66.5}
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@@ -195,6 +202,7 @@ services:
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- DOCKER_MULTI_NODE_CLUSTER=false
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
@@ -211,7 +219,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.66.5}
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
@@ -224,15 +232,15 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: ["all"]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: ["all"]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "grubykarol/locust:1.2.3-python3.9-alpine3.12"
|
||||
|
||||
@@ -64,7 +64,10 @@ receivers:
|
||||
- job_name: otel-collector
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector
|
||||
|
||||
|
||||
processors:
|
||||
batch:
|
||||
@@ -74,12 +77,16 @@ processors:
|
||||
signozspanmetrics/prometheus:
|
||||
metrics_exporter: prometheus
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 10000
|
||||
dimensions_cache_size: 100000
|
||||
dimensions:
|
||||
- name: service.namespace
|
||||
default: default
|
||||
- name: deployment.environment
|
||||
default: default
|
||||
# This is added to ensure the uniqueness of the timeseries
|
||||
# Otherwise, identical timeseries produced by multiple replicas of
|
||||
# collectors result in incorrect APM metrics
|
||||
- name: 'signoz.collector.id'
|
||||
# memory_limiter:
|
||||
# # 80% of maximum memory up to 2G
|
||||
# limit_mib: 1500
|
||||
@@ -112,11 +119,13 @@ exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/?database=signoz_traces
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
|
||||
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
prometheus:
|
||||
endpoint: 0.0.0.0:8889
|
||||
# logging: {}
|
||||
@@ -151,9 +160,13 @@ service:
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite]
|
||||
metrics/generic:
|
||||
receivers: [hostmetrics, prometheus]
|
||||
receivers: [hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [clickhousemetricswrite]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite/prometheus]
|
||||
metrics/spanmetrics:
|
||||
receivers: [otlp/spanmetrics]
|
||||
exporters: [prometheus]
|
||||
|
||||
@@ -11,7 +11,9 @@ receivers:
|
||||
scrape_interval: 60s
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector-metrics
|
||||
# SigNoz span metrics
|
||||
- job_name: signozspanmetrics-collector
|
||||
scrape_interval: 60s
|
||||
|
||||
BIN
deploy/docker/clickhouse-setup/user_scripts/histogramQuantile
Executable file
BIN
deploy/docker/clickhouse-setup/user_scripts/histogramQuantile
Executable file
Binary file not shown.
237
deploy/docker/clickhouse-setup/user_scripts/histogramQuantile.go
Normal file
237
deploy/docker/clickhouse-setup/user_scripts/histogramQuantile.go
Normal file
@@ -0,0 +1,237 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// NOTE: executable must be built with target OS and architecture set to linux/amd64
|
||||
// env GOOS=linux GOARCH=amd64 go build -o histogramQuantile histogramQuantile.go
|
||||
|
||||
// The following code is adapted from the following source:
|
||||
// https://github.com/prometheus/prometheus/blob/main/promql/quantile.go
|
||||
|
||||
type bucket struct {
|
||||
upperBound float64
|
||||
count float64
|
||||
}
|
||||
|
||||
// buckets implements sort.Interface.
|
||||
type buckets []bucket
|
||||
|
||||
func (b buckets) Len() int { return len(b) }
|
||||
func (b buckets) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||
func (b buckets) Less(i, j int) bool { return b[i].upperBound < b[j].upperBound }
|
||||
|
||||
// bucketQuantile calculates the quantile 'q' based on the given buckets. The
|
||||
// buckets will be sorted by upperBound by this function (i.e. no sorting
|
||||
// needed before calling this function). The quantile value is interpolated
|
||||
// assuming a linear distribution within a bucket. However, if the quantile
|
||||
// falls into the highest bucket, the upper bound of the 2nd highest bucket is
|
||||
// returned. A natural lower bound of 0 is assumed if the upper bound of the
|
||||
// lowest bucket is greater 0. In that case, interpolation in the lowest bucket
|
||||
// happens linearly between 0 and the upper bound of the lowest bucket.
|
||||
// However, if the lowest bucket has an upper bound less or equal 0, this upper
|
||||
// bound is returned if the quantile falls into the lowest bucket.
|
||||
//
|
||||
// There are a number of special cases (once we have a way to report errors
|
||||
// happening during evaluations of AST functions, we should report those
|
||||
// explicitly):
|
||||
//
|
||||
// If 'buckets' has 0 observations, NaN is returned.
|
||||
//
|
||||
// If 'buckets' has fewer than 2 elements, NaN is returned.
|
||||
//
|
||||
// If the highest bucket is not +Inf, NaN is returned.
|
||||
//
|
||||
// If q==NaN, NaN is returned.
|
||||
//
|
||||
// If q<0, -Inf is returned.
|
||||
//
|
||||
// If q>1, +Inf is returned.
|
||||
func bucketQuantile(q float64, buckets buckets) float64 {
|
||||
if math.IsNaN(q) {
|
||||
return math.NaN()
|
||||
}
|
||||
if q < 0 {
|
||||
return math.Inf(-1)
|
||||
}
|
||||
if q > 1 {
|
||||
return math.Inf(+1)
|
||||
}
|
||||
sort.Sort(buckets)
|
||||
if !math.IsInf(buckets[len(buckets)-1].upperBound, +1) {
|
||||
return math.NaN()
|
||||
}
|
||||
|
||||
buckets = coalesceBuckets(buckets)
|
||||
ensureMonotonic(buckets)
|
||||
|
||||
if len(buckets) < 2 {
|
||||
return math.NaN()
|
||||
}
|
||||
observations := buckets[len(buckets)-1].count
|
||||
if observations == 0 {
|
||||
return math.NaN()
|
||||
}
|
||||
rank := q * observations
|
||||
b := sort.Search(len(buckets)-1, func(i int) bool { return buckets[i].count >= rank })
|
||||
|
||||
if b == len(buckets)-1 {
|
||||
return buckets[len(buckets)-2].upperBound
|
||||
}
|
||||
if b == 0 && buckets[0].upperBound <= 0 {
|
||||
return buckets[0].upperBound
|
||||
}
|
||||
var (
|
||||
bucketStart float64
|
||||
bucketEnd = buckets[b].upperBound
|
||||
count = buckets[b].count
|
||||
)
|
||||
if b > 0 {
|
||||
bucketStart = buckets[b-1].upperBound
|
||||
count -= buckets[b-1].count
|
||||
rank -= buckets[b-1].count
|
||||
}
|
||||
return bucketStart + (bucketEnd-bucketStart)*(rank/count)
|
||||
}
|
||||
|
||||
// coalesceBuckets merges buckets with the same upper bound.
|
||||
//
|
||||
// The input buckets must be sorted.
|
||||
func coalesceBuckets(buckets buckets) buckets {
|
||||
last := buckets[0]
|
||||
i := 0
|
||||
for _, b := range buckets[1:] {
|
||||
if b.upperBound == last.upperBound {
|
||||
last.count += b.count
|
||||
} else {
|
||||
buckets[i] = last
|
||||
last = b
|
||||
i++
|
||||
}
|
||||
}
|
||||
buckets[i] = last
|
||||
return buckets[:i+1]
|
||||
}
|
||||
|
||||
// The assumption that bucket counts increase monotonically with increasing
|
||||
// upperBound may be violated during:
|
||||
//
|
||||
// * Recording rule evaluation of histogram_quantile, especially when rate()
|
||||
// has been applied to the underlying bucket timeseries.
|
||||
// * Evaluation of histogram_quantile computed over federated bucket
|
||||
// timeseries, especially when rate() has been applied.
|
||||
//
|
||||
// This is because scraped data is not made available to rule evaluation or
|
||||
// federation atomically, so some buckets are computed with data from the
|
||||
// most recent scrapes, but the other buckets are missing data from the most
|
||||
// recent scrape.
|
||||
//
|
||||
// Monotonicity is usually guaranteed because if a bucket with upper bound
|
||||
// u1 has count c1, then any bucket with a higher upper bound u > u1 must
|
||||
// have counted all c1 observations and perhaps more, so that c >= c1.
|
||||
//
|
||||
// Randomly interspersed partial sampling breaks that guarantee, and rate()
|
||||
// exacerbates it. Specifically, suppose bucket le=1000 has a count of 10 from
|
||||
// 4 samples but the bucket with le=2000 has a count of 7 from 3 samples. The
|
||||
// monotonicity is broken. It is exacerbated by rate() because under normal
|
||||
// operation, cumulative counting of buckets will cause the bucket counts to
|
||||
// diverge such that small differences from missing samples are not a problem.
|
||||
// rate() removes this divergence.)
|
||||
//
|
||||
// bucketQuantile depends on that monotonicity to do a binary search for the
|
||||
// bucket with the φ-quantile count, so breaking the monotonicity
|
||||
// guarantee causes bucketQuantile() to return undefined (nonsense) results.
|
||||
//
|
||||
// As a somewhat hacky solution until ingestion is atomic per scrape, we
|
||||
// calculate the "envelope" of the histogram buckets, essentially removing
|
||||
// any decreases in the count between successive buckets.
|
||||
|
||||
func ensureMonotonic(buckets buckets) {
|
||||
max := buckets[0].count
|
||||
for i := 1; i < len(buckets); i++ {
|
||||
switch {
|
||||
case buckets[i].count > max:
|
||||
max = buckets[i].count
|
||||
case buckets[i].count < max:
|
||||
buckets[i].count = max
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// End of copied code.
|
||||
|
||||
func readLines() []string {
|
||||
r := bufio.NewReader(os.Stdin)
|
||||
bytes := []byte{}
|
||||
lines := []string{}
|
||||
for {
|
||||
line, isPrefix, err := r.ReadLine()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
bytes = append(bytes, line...)
|
||||
if !isPrefix {
|
||||
str := strings.TrimSpace(string(bytes))
|
||||
if len(str) > 0 {
|
||||
lines = append(lines, str)
|
||||
bytes = []byte{}
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(bytes) > 0 {
|
||||
lines = append(lines, string(bytes))
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
||||
func main() {
|
||||
lines := readLines()
|
||||
for _, text := range lines {
|
||||
// Example input
|
||||
// "[1, 2, 4, 8, 16]", "[1, 5, 8, 10, 14]", 0.9"
|
||||
// bounds - counts - quantile
|
||||
parts := strings.Split(text, "\",")
|
||||
|
||||
var bucketNumbers []float64
|
||||
// Strip the ends with square brackets
|
||||
text = parts[0][2 : len(parts[0])-1]
|
||||
// Parse the bucket bounds
|
||||
for _, num := range strings.Split(text, ",") {
|
||||
num = strings.TrimSpace(num)
|
||||
number, err := strconv.ParseFloat(num, 64)
|
||||
if err == nil {
|
||||
bucketNumbers = append(bucketNumbers, number)
|
||||
}
|
||||
}
|
||||
|
||||
var bucketCounts []float64
|
||||
// Strip the ends with square brackets
|
||||
text = parts[1][2 : len(parts[1])-1]
|
||||
// Parse the bucket counts
|
||||
for _, num := range strings.Split(text, ",") {
|
||||
num = strings.TrimSpace(num)
|
||||
number, err := strconv.ParseFloat(num, 64)
|
||||
if err == nil {
|
||||
bucketCounts = append(bucketCounts, number)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the quantile
|
||||
q, err := strconv.ParseFloat(parts[2], 64)
|
||||
var b buckets
|
||||
|
||||
if err == nil {
|
||||
for i := 0; i < len(bucketNumbers); i++ {
|
||||
b = append(b, bucket{upperBound: bucketNumbers[i], count: bucketCounts[i]})
|
||||
}
|
||||
}
|
||||
fmt.Println(bucketQuantile(q, b))
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,8 @@ server {
|
||||
|
||||
location /api {
|
||||
proxy_pass http://query-service:8080/api;
|
||||
# connection will be closed if no data is read for 600s between successive read operations
|
||||
proxy_read_timeout 600s;
|
||||
}
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
|
||||
@@ -81,6 +81,11 @@ check_os() {
|
||||
os="centos"
|
||||
package_manager="yum"
|
||||
;;
|
||||
Rocky*)
|
||||
desired_os=1
|
||||
os="centos"
|
||||
package_manager="yum"
|
||||
;;
|
||||
SLES*)
|
||||
desired_os=1
|
||||
os="sles"
|
||||
@@ -223,7 +228,7 @@ wait_for_containers_start() {
|
||||
|
||||
# The while loop is important because for-loops don't work for dynamic values
|
||||
while [[ $timeout -gt 0 ]]; do
|
||||
status_code="$(curl -s -o /dev/null -w "%{http_code}" http://localhost:3301/api/v1/services/list || true)"
|
||||
status_code="$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:3301/api/v1/health?live=1" || true)"
|
||||
if [[ status_code -eq 200 ]]; then
|
||||
break
|
||||
else
|
||||
@@ -511,13 +516,15 @@ else
|
||||
echo ""
|
||||
echo -e "🟢 Your frontend is running on http://localhost:3301"
|
||||
echo ""
|
||||
echo "ℹ️ By default, retention period is set to 7 days for logs and traces, and 30 days for metrics."
|
||||
echo -e "To change this, navigate to the General tab on the Settings page of SigNoz UI. For more details, refer to https://signoz.io/docs/userguide/retention-period \n"
|
||||
|
||||
echo "ℹ️ To bring down SigNoz and clean volumes : $sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml down -v"
|
||||
|
||||
echo ""
|
||||
echo "+++++++++++++++++++++++++++++++++++++++++++++++++"
|
||||
echo ""
|
||||
echo "👉 Need help Getting Started?"
|
||||
echo "👉 Need help in Getting Started?"
|
||||
echo -e "Join us on Slack https://signoz.io/slack"
|
||||
echo ""
|
||||
echo -e "\n📨 Please share your email to receive support & updates about SigNoz!"
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"go.signoz.io/signoz/ee/query-service/license"
|
||||
baseapp "go.signoz.io/signoz/pkg/query-service/app"
|
||||
baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
rules "go.signoz.io/signoz/pkg/query-service/rules"
|
||||
"go.signoz.io/signoz/pkg/query-service/version"
|
||||
)
|
||||
@@ -96,7 +97,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router) {
|
||||
router.HandleFunc("/api/v1/complete/google",
|
||||
baseapp.OpenAccess(ah.receiveGoogleAuth)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
|
||||
router.HandleFunc("/api/v1/orgs/{orgId}/domains",
|
||||
baseapp.AdminAccess(ah.listDomainsByOrg)).
|
||||
Methods(http.MethodGet)
|
||||
@@ -127,5 +128,11 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router) {
|
||||
|
||||
func (ah *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) {
|
||||
version := version.GetVersion()
|
||||
ah.WriteJSON(w, r, map[string]string{"version": version, "ee": "Y"})
|
||||
versionResponse := basemodel.GetVersionResponse{
|
||||
Version: version,
|
||||
EE: "Y",
|
||||
SetupCompleted: ah.SetupCompleted,
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, versionResponse)
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"go.signoz.io/signoz/ee/query-service/constants"
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
@@ -87,9 +88,16 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
// get invite object
|
||||
invite, err := baseauth.ValidateInvite(ctx, req)
|
||||
if err != nil || invite == nil {
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to validate invite token", err)
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if invite == nil {
|
||||
zap.S().Errorf("failed to validate invite token: it is either empty or invalid", err)
|
||||
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// get auth domain from email domain
|
||||
@@ -190,7 +198,7 @@ func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string)
|
||||
}
|
||||
|
||||
// receiveGoogleAuth completes google OAuth response and forwards a request
|
||||
// to front-end to sign user in
|
||||
// to front-end to sign user in
|
||||
func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) {
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
@@ -221,15 +229,15 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
|
||||
// upgrade redirect url from the relay state for better accuracy
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// fetch domain by parsing relay state.
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// now that we have domain, use domain to fetch sso settings.
|
||||
// prepare google callback handler using parsedState -
|
||||
// now that we have domain, use domain to fetch sso settings.
|
||||
// prepare google callback handler using parsedState -
|
||||
// which contains redirect URL (front-end endpoint)
|
||||
callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState)
|
||||
|
||||
@@ -239,7 +247,7 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
|
||||
@@ -250,15 +258,12 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
|
||||
http.Redirect(w, r, nextPage, http.StatusSeeOther)
|
||||
}
|
||||
|
||||
|
||||
|
||||
// receiveSAML completes a SAML request and gets user logged in
|
||||
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// this is the source url that initiated the login request
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.S().Errorf("[receiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO)
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
@@ -287,13 +292,13 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// upgrade redirect url from the relay state for better accuracy
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// fetch domain by parsing relay state.
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
sp, err := domain.PrepareSamlRequest(parsedState)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to prepare saml request for domain (%s): %v", domain.String(), err)
|
||||
@@ -327,6 +332,6 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
http.Redirect(w, r, nextPage, http.StatusSeeOther)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/http"
|
||||
_ "net/http/pprof" // http profiler
|
||||
@@ -27,6 +30,7 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/healthcheck"
|
||||
basealm "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
|
||||
baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
pqle "go.signoz.io/signoz/pkg/query-service/pqlEngine"
|
||||
rules "go.signoz.io/signoz/pkg/query-service/rules"
|
||||
"go.signoz.io/signoz/pkg/query-service/telemetry"
|
||||
@@ -266,15 +270,82 @@ func (lrw *loggingResponseWriter) Flush() {
|
||||
lrw.ResponseWriter.(http.Flusher).Flush()
|
||||
}
|
||||
|
||||
func extractDashboardMetaData(path string, r *http.Request) (map[string]interface{}, bool) {
|
||||
pathToExtractBodyFrom := "/api/v2/metrics/query_range"
|
||||
|
||||
data := map[string]interface{}{}
|
||||
var postData *model.QueryRangeParamsV2
|
||||
|
||||
if path == pathToExtractBodyFrom && (r.Method == "POST") {
|
||||
if r.Body != nil {
|
||||
bodyBytes, err := ioutil.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
return nil, false
|
||||
}
|
||||
r.Body.Close() // must close
|
||||
r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
json.Unmarshal(bodyBytes, &postData)
|
||||
|
||||
} else {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
} else {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
signozMetricNotFound := false
|
||||
|
||||
if postData != nil {
|
||||
signozMetricNotFound = telemetry.GetInstance().CheckSigNozMetricsV2(postData.CompositeMetricQuery)
|
||||
|
||||
if postData.CompositeMetricQuery != nil {
|
||||
data["queryType"] = postData.CompositeMetricQuery.QueryType
|
||||
data["panelType"] = postData.CompositeMetricQuery.PanelType
|
||||
}
|
||||
|
||||
data["datasource"] = postData.DataSource
|
||||
}
|
||||
|
||||
if signozMetricNotFound {
|
||||
telemetry.GetInstance().AddActiveMetricsUser()
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_DASHBOARDS_METADATA, data, true)
|
||||
}
|
||||
|
||||
return data, true
|
||||
}
|
||||
|
||||
func getActiveLogs(path string, r *http.Request) {
|
||||
// if path == "/api/v1/dashboards/{uuid}" {
|
||||
// telemetry.GetInstance().AddActiveMetricsUser()
|
||||
// }
|
||||
if path == "/api/v1/logs" {
|
||||
hasFilters := len(r.URL.Query().Get("q"))
|
||||
if hasFilters > 0 {
|
||||
telemetry.GetInstance().AddActiveLogsUser()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
route := mux.CurrentRoute(r)
|
||||
path, _ := route.GetPathTemplate()
|
||||
|
||||
dashboardMetadata, metadataExists := extractDashboardMetaData(path, r)
|
||||
getActiveLogs(path, r)
|
||||
|
||||
lrw := NewLoggingResponseWriter(w)
|
||||
next.ServeHTTP(lrw, r)
|
||||
|
||||
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
|
||||
if metadataExists {
|
||||
for key, value := range dashboardMetadata {
|
||||
data[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data)
|
||||
|
||||
@@ -58,7 +58,7 @@ module.exports = {
|
||||
'react/no-array-index-key': 'error',
|
||||
'linebreak-style': [
|
||||
'error',
|
||||
process.platform === 'win32' ? 'windows' : 'unix',
|
||||
process.env.platform === 'win32' ? 'windows' : 'unix',
|
||||
],
|
||||
'@typescript-eslint/default-param-last': 'off',
|
||||
|
||||
@@ -102,9 +102,10 @@ module.exports = {
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-unused-vars': 'error',
|
||||
'func-style': ['error', 'declaration', { allowArrowFunctions: true }],
|
||||
'arrow-body-style': ['error', 'as-needed'],
|
||||
|
||||
// eslint rules need to remove
|
||||
'no-shadow': 'off',
|
||||
'@typescript-eslint/no-shadow': 'off',
|
||||
'import/no-cycle': 'off',
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
cd frontend && npm run commitlint
|
||||
cd frontend && yarn run commitlint --edit $1
|
||||
|
||||
1
frontend/.yarnrc
Normal file
1
frontend/.yarnrc
Normal file
@@ -0,0 +1 @@
|
||||
network-timeout 600000
|
||||
@@ -9,8 +9,9 @@ ARG TARGETARCH
|
||||
|
||||
WORKDIR /frontend
|
||||
|
||||
# Copy the package.json to install dependencies
|
||||
# Copy the package.json and .yarnrc files prior to install dependencies
|
||||
COPY package.json ./
|
||||
COPY .yarnrc ./
|
||||
|
||||
# Install the dependencies and make the folder
|
||||
RUN CI=1 yarn install
|
||||
|
||||
@@ -15,7 +15,7 @@ const config: Config.InitialOptions = {
|
||||
useESM: true,
|
||||
},
|
||||
},
|
||||
testMatch: ['<rootDir>/src/**/?(*.)(test).(ts|js)?(x)'],
|
||||
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],
|
||||
preset: 'ts-jest/presets/js-with-ts-esm',
|
||||
transform: {
|
||||
'^.+\\.(ts|tsx)?$': 'ts-jest',
|
||||
@@ -25,6 +25,7 @@ const config: Config.InitialOptions = {
|
||||
setupFilesAfterEnv: ['<rootDir>jest.setup.ts'],
|
||||
testPathIgnorePatterns: ['/node_modules/', '/public/'],
|
||||
moduleDirectories: ['node_modules', 'src'],
|
||||
testEnvironment: 'jest-environment-jsdom',
|
||||
testEnvironmentOptions: {
|
||||
'jest-playwright': {
|
||||
browsers: ['chromium', 'firefox', 'webkit'],
|
||||
|
||||
@@ -27,16 +27,12 @@
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@ant-design/colors": "^6.0.0",
|
||||
"@ant-design/icons": "^4.6.2",
|
||||
"@ant-design/colors": "6.0.0",
|
||||
"@ant-design/icons": "4.8.0",
|
||||
"@grafana/data": "^8.4.3",
|
||||
"@monaco-editor/react": "^4.3.1",
|
||||
"@testing-library/jest-dom": "^5.11.4",
|
||||
"@testing-library/react": "^11.1.0",
|
||||
"@testing-library/user-event": "^12.1.10",
|
||||
"@welldone-software/why-did-you-render": "^6.2.1",
|
||||
"@xstate/react": "^3.0.0",
|
||||
"antd": "4.19.2",
|
||||
"antd": "5.0.5",
|
||||
"axios": "^0.21.0",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^26.6.0",
|
||||
@@ -44,7 +40,7 @@
|
||||
"babel-plugin-named-asset-import": "^0.3.7",
|
||||
"babel-preset-minify": "^0.5.1",
|
||||
"babel-preset-react-app": "^10.0.0",
|
||||
"chart.js": "^3.4.0",
|
||||
"chart.js": "3.9.1",
|
||||
"chartjs-adapter-date-fns": "^2.0.0",
|
||||
"chartjs-plugin-annotation": "^1.4.0",
|
||||
"color": "^4.2.1",
|
||||
@@ -59,6 +55,7 @@
|
||||
"event-source-polyfill": "1.0.31",
|
||||
"file-loader": "6.1.1",
|
||||
"flat": "^5.0.2",
|
||||
"fontfaceobserver": "2.3.0",
|
||||
"history": "4.10.1",
|
||||
"html-webpack-plugin": "5.1.0",
|
||||
"i18next": "^21.6.12",
|
||||
@@ -70,17 +67,18 @@
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"react": "17.0.0",
|
||||
"react-dom": "17.0.0",
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0",
|
||||
"react-force-graph": "^1.41.0",
|
||||
"react-graph-vis": "^1.0.5",
|
||||
"react-grid-layout": "^1.3.4",
|
||||
"react-i18next": "^11.16.1",
|
||||
"react-intersection-observer": "9.4.1",
|
||||
"react-query": "^3.34.19",
|
||||
"react-redux": "^7.2.2",
|
||||
"react-router-dom": "^5.2.0",
|
||||
"react-use": "^17.3.2",
|
||||
"react-vis": "^1.11.7",
|
||||
"react-virtuoso": "4.0.3",
|
||||
"redux": "^4.0.5",
|
||||
"redux-thunk": "^2.3.0",
|
||||
"stream": "^0.0.2",
|
||||
@@ -120,7 +118,9 @@
|
||||
"@commitlint/config-conventional": "^16.2.4",
|
||||
"@jest/globals": "^27.5.1",
|
||||
"@playwright/test": "^1.22.0",
|
||||
"@testing-library/react-hooks": "^7.0.2",
|
||||
"@testing-library/jest-dom": "5.16.5",
|
||||
"@testing-library/react": "13.4.0",
|
||||
"@testing-library/user-event": "14.4.3",
|
||||
"@types/color": "^3.0.3",
|
||||
"@types/compression-webpack-plugin": "^9.0.0",
|
||||
"@types/copy-webpack-plugin": "^8.0.1",
|
||||
@@ -128,16 +128,17 @@
|
||||
"@types/d3-tip": "^3.5.5",
|
||||
"@types/event-source-polyfill": "^1.0.0",
|
||||
"@types/flat": "^5.0.2",
|
||||
"@types/fontfaceobserver": "2.1.0",
|
||||
"@types/jest": "^27.5.1",
|
||||
"@types/lodash-es": "^4.17.4",
|
||||
"@types/mini-css-extract-plugin": "^2.5.1",
|
||||
"@types/node": "^16.10.3",
|
||||
"@types/react": "^17.0.0",
|
||||
"@types/react-dom": "^16.9.9",
|
||||
"@types/react": "18.0.26",
|
||||
"@types/react-dom": "18.0.10",
|
||||
"@types/react-grid-layout": "^1.1.2",
|
||||
"@types/react-redux": "^7.1.11",
|
||||
"@types/react-resizable": "3.0.3",
|
||||
"@types/react-router-dom": "^5.1.6",
|
||||
"@types/redux": "^3.6.0",
|
||||
"@types/styled-components": "^5.1.4",
|
||||
"@types/uuid": "^8.3.1",
|
||||
"@types/vis": "^4.21.21",
|
||||
@@ -145,6 +146,7 @@
|
||||
"@types/webpack-dev-server": "^4.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^4.28.2",
|
||||
"@typescript-eslint/parser": "^4.28.2",
|
||||
"@welldone-software/why-did-you-render": "6.2.1",
|
||||
"autoprefixer": "^9.0.0",
|
||||
"babel-plugin-styled-components": "^1.12.0",
|
||||
"compression-webpack-plugin": "9.0.0",
|
||||
@@ -173,7 +175,9 @@
|
||||
"lint-staged": "^12.3.7",
|
||||
"portfinder-sync": "^0.0.2",
|
||||
"prettier": "2.2.1",
|
||||
"react-hooks-testing-library": "0.6.0",
|
||||
"react-hot-loader": "^4.13.0",
|
||||
"react-resizable": "3.0.4",
|
||||
"ts-jest": "^27.1.4",
|
||||
"ts-node": "^10.2.1",
|
||||
"typescript-plugin-css-modules": "^3.4.0",
|
||||
@@ -186,7 +190,7 @@
|
||||
]
|
||||
},
|
||||
"resolutions": {
|
||||
"@types/react": "17.0.0",
|
||||
"@types/react-dom": "17.0.0"
|
||||
"@types/react": "18.0.26",
|
||||
"@types/react-dom": "18.0.10"
|
||||
}
|
||||
}
|
||||
|
||||
10
frontend/public/css/antd.dark.min.css
vendored
10
frontend/public/css/antd.dark.min.css
vendored
File diff suppressed because one or more lines are too long
10
frontend/public/css/antd.min.css
vendored
10
frontend/public/css/antd.min.css
vendored
File diff suppressed because one or more lines are too long
@@ -6,7 +6,7 @@
|
||||
"release_notes": "Release Notes",
|
||||
"read_how_to_upgrade": "Read instructions on how to upgrade",
|
||||
"latest_version_signoz": "You are running the latest version of SigNoz.",
|
||||
"stale_version": "You are on an older version and may be losing out on the latest features we have shipped. We recommend to upgrade to the latest version",
|
||||
"stale_version": "You are on an older version and may be missing out on the latest features we have shipped. We recommend to upgrade to the latest version",
|
||||
"oops_something_went_wrong_version": "Oops.. facing issues with fetching updated version information",
|
||||
"n_a": "N/A",
|
||||
"routes": {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/* eslint-disable react-hooks/exhaustive-deps */
|
||||
import { notification } from 'antd';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import loginApi from 'api/user/login';
|
||||
import { Logout } from 'api/utils';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import React, { useEffect, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@@ -47,6 +47,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
|
||||
const navigateToLoginIfNotLoggedIn = (isLoggedIn = isLoggedInState): void => {
|
||||
@@ -106,7 +108,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
} else {
|
||||
Logout();
|
||||
|
||||
notification.error({
|
||||
notifications.error({
|
||||
message: response.error || t('something_went_wrong'),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { ConfigProvider } from 'antd';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import AppLayout from 'container/AppLayout';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import React, { Suspense } from 'react';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
@@ -9,29 +12,33 @@ import PrivateRoute from './Private';
|
||||
import routes from './routes';
|
||||
|
||||
function App(): JSX.Element {
|
||||
return (
|
||||
<Router history={history}>
|
||||
<PrivateRoute>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => {
|
||||
return (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
const themeConfig = useThemeConfig();
|
||||
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</PrivateRoute>
|
||||
</Router>
|
||||
return (
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<NotificationProvider>
|
||||
<Router history={history}>
|
||||
<PrivateRoute>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</PrivateRoute>
|
||||
</Router>
|
||||
</NotificationProvider>
|
||||
</ConfigProvider>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
@@ -8,9 +8,7 @@ const query = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`/variables/query?query=${encodeURIComponent(props.query)}`,
|
||||
);
|
||||
const response = await axios.post(`/variables/query`, props);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
|
||||
@@ -4,14 +4,16 @@ import { ENVIRONMENT } from 'constants/env';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { EventSourcePolyfill } from 'event-source-polyfill';
|
||||
|
||||
export const LiveTail = (queryParams: string): EventSourcePolyfill => {
|
||||
const dict = {
|
||||
headers: {
|
||||
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
|
||||
},
|
||||
};
|
||||
return new EventSourcePolyfill(
|
||||
// 10 min in ms
|
||||
const TIMEOUT_IN_MS = 10 * 60 * 1000;
|
||||
|
||||
export const LiveTail = (queryParams: string): EventSourcePolyfill =>
|
||||
new EventSourcePolyfill(
|
||||
`${ENVIRONMENT.baseURL}${apiV1}logs/tail?${queryParams}`,
|
||||
dict,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
|
||||
},
|
||||
heartbeatTimeout: TIMEOUT_IN_MS,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
@@ -32,6 +32,7 @@ const getFilters = async (
|
||||
maxDuration: String((duration.duration || [])[0] || ''),
|
||||
minDuration: String((duration.duration || [])[1] || ''),
|
||||
exclude,
|
||||
spanKind: props.spanKind,
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -10,9 +10,11 @@ const getSpans = async (
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const updatedSelectedTags = props.selectedTags.map((e) => ({
|
||||
Key: e.Key[0],
|
||||
Key: e.Key,
|
||||
Operator: e.Operator,
|
||||
Values: e.Values,
|
||||
StringValues: e.StringValues,
|
||||
NumberValues: e.NumberValues,
|
||||
BoolValues: e.BoolValues,
|
||||
}));
|
||||
|
||||
const exclude: string[] = [];
|
||||
@@ -35,13 +37,14 @@ const getSpans = async (
|
||||
start: String(props.start),
|
||||
end: String(props.end),
|
||||
function: props.function,
|
||||
groupBy: props.groupBy,
|
||||
groupBy: props.groupBy === 'none' ? '' : props.groupBy,
|
||||
step: props.step,
|
||||
tags: updatedSelectedTags,
|
||||
...nonDuration,
|
||||
maxDuration: String((duration.duration || [])[0] || ''),
|
||||
minDuration: String((duration.duration || [])[1] || ''),
|
||||
exclude,
|
||||
spanKind: props.spanKind,
|
||||
},
|
||||
);
|
||||
|
||||
|
||||
@@ -28,9 +28,11 @@ const getSpanAggregate = async (
|
||||
});
|
||||
|
||||
const updatedSelectedTags = props.selectedTags.map((e) => ({
|
||||
Key: e.Key[0],
|
||||
Key: e.Key,
|
||||
Operator: e.Operator,
|
||||
Values: e.Values,
|
||||
StringValues: e.StringValues,
|
||||
NumberValues: e.NumberValues,
|
||||
BoolValues: e.BoolValues,
|
||||
}));
|
||||
|
||||
const other = Object.fromEntries(props.selectedFilter);
|
||||
@@ -46,6 +48,7 @@ const getSpanAggregate = async (
|
||||
maxDuration: String((duration.duration || [])[0] || ''),
|
||||
minDuration: String((duration.duration || [])[1] || ''),
|
||||
exclude,
|
||||
spanKind: props.spanKind,
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -32,6 +32,7 @@ const getTagFilters = async (
|
||||
maxDuration: String((duration.duration || [])[0] || ''),
|
||||
minDuration: String((duration.duration || [])[1] || ''),
|
||||
exclude,
|
||||
spanKind: props.spanKind,
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -11,9 +11,12 @@ const getTagValue = async (
|
||||
const response = await axios.post<PayloadProps>(`/getTagValues`, {
|
||||
start: props.start.toString(),
|
||||
end: props.end.toString(),
|
||||
tagKey: props.tagKey,
|
||||
tagKey: {
|
||||
Key: props.tagKey.Key,
|
||||
Type: props.tagKey.Type,
|
||||
},
|
||||
spanKind: props.spanKind,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import generatePicker from 'antd/es/date-picker/generatePicker';
|
||||
import { Dayjs } from 'dayjs';
|
||||
// included in antd
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import dayjsGenerateConfig from 'rc-picker/lib/generate/dayjs';
|
||||
|
||||
const DatePicker = generatePicker<Dayjs>(dayjsGenerateConfig);
|
||||
|
||||
export default DatePicker;
|
||||
@@ -1,8 +1,6 @@
|
||||
import MEditor, { EditorProps } from '@monaco-editor/react';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import React from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
|
||||
function Editor({
|
||||
value,
|
||||
@@ -12,7 +10,7 @@ function Editor({
|
||||
height,
|
||||
options,
|
||||
}: MEditorProps): JSX.Element {
|
||||
const { isDarkMode } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
return (
|
||||
<MEditor
|
||||
theme={isDarkMode ? 'vs-dark' : 'vs-light'}
|
||||
|
||||
321
frontend/src/components/Graph/Plugin/DragSelect.ts
Normal file
321
frontend/src/components/Graph/Plugin/DragSelect.ts
Normal file
@@ -0,0 +1,321 @@
|
||||
import { Chart, ChartTypeRegistry, Plugin } from 'chart.js';
|
||||
import * as ChartHelpers from 'chart.js/helpers';
|
||||
|
||||
// utils
|
||||
import { ChartEventHandler, mergeDefaultOptions } from './utils';
|
||||
|
||||
export const dragSelectPluginId = 'drag-select-plugin';
|
||||
|
||||
type ChartDragHandlers = {
|
||||
mousedown: ChartEventHandler;
|
||||
mousemove: ChartEventHandler;
|
||||
mouseup: ChartEventHandler;
|
||||
globalMouseup: () => void;
|
||||
};
|
||||
|
||||
export type DragSelectPluginOptions = {
|
||||
color?: string;
|
||||
onSelect?: (startValueX: number, endValueX: number) => void;
|
||||
};
|
||||
|
||||
const defaultDragSelectPluginOptions: Required<DragSelectPluginOptions> = {
|
||||
color: 'rgba(0, 0, 0, 0.5)',
|
||||
onSelect: () => {},
|
||||
};
|
||||
|
||||
export function createDragSelectPluginOptions(
|
||||
isEnabled: boolean,
|
||||
onSelect?: (start: number, end: number) => void,
|
||||
color?: string,
|
||||
): DragSelectPluginOptions | false {
|
||||
if (!isEnabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return {
|
||||
onSelect,
|
||||
color,
|
||||
};
|
||||
}
|
||||
|
||||
function createMousedownHandler(
|
||||
chart: Chart,
|
||||
dragData: DragSelectData,
|
||||
): ChartEventHandler {
|
||||
return (ev): void => {
|
||||
const { left, right } = chart.chartArea;
|
||||
|
||||
let { x: startDragPositionX } = ChartHelpers.getRelativePosition(ev, chart);
|
||||
|
||||
if (left > startDragPositionX) {
|
||||
startDragPositionX = left;
|
||||
}
|
||||
|
||||
if (right < startDragPositionX) {
|
||||
startDragPositionX = right;
|
||||
}
|
||||
|
||||
const startValuePositionX = chart.scales.x.getValueForPixel(
|
||||
startDragPositionX,
|
||||
);
|
||||
|
||||
dragData.onDragStart(startDragPositionX, startValuePositionX);
|
||||
};
|
||||
}
|
||||
|
||||
function createMousemoveHandler(
|
||||
chart: Chart,
|
||||
dragData: DragSelectData,
|
||||
): ChartEventHandler {
|
||||
return (ev): void => {
|
||||
if (!dragData.isMouseDown) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { left, right } = chart.chartArea;
|
||||
|
||||
let { x: dragPositionX } = ChartHelpers.getRelativePosition(ev, chart);
|
||||
|
||||
if (left > dragPositionX) {
|
||||
dragPositionX = left;
|
||||
}
|
||||
|
||||
if (right < dragPositionX) {
|
||||
dragPositionX = right;
|
||||
}
|
||||
|
||||
const valuePositionX = chart.scales.x.getValueForPixel(dragPositionX);
|
||||
|
||||
dragData.onDrag(dragPositionX, valuePositionX);
|
||||
chart.update('none');
|
||||
};
|
||||
}
|
||||
|
||||
function createMouseupHandler(
|
||||
chart: Chart,
|
||||
options: DragSelectPluginOptions,
|
||||
dragData: DragSelectData,
|
||||
): ChartEventHandler {
|
||||
return (ev): void => {
|
||||
const { left, right } = chart.chartArea;
|
||||
|
||||
let { x: endRelativePostionX } = ChartHelpers.getRelativePosition(ev, chart);
|
||||
|
||||
if (left > endRelativePostionX) {
|
||||
endRelativePostionX = left;
|
||||
}
|
||||
|
||||
if (right < endRelativePostionX) {
|
||||
endRelativePostionX = right;
|
||||
}
|
||||
|
||||
const endValuePositionX = chart.scales.x.getValueForPixel(
|
||||
endRelativePostionX,
|
||||
);
|
||||
|
||||
dragData.onDragEnd(endRelativePostionX, endValuePositionX);
|
||||
|
||||
chart.update('none');
|
||||
|
||||
if (
|
||||
typeof options.onSelect === 'function' &&
|
||||
typeof dragData.startValuePositionX === 'number' &&
|
||||
typeof dragData.endValuePositionX === 'number'
|
||||
) {
|
||||
const start = Math.min(
|
||||
dragData.startValuePositionX,
|
||||
dragData.endValuePositionX,
|
||||
);
|
||||
const end = Math.max(
|
||||
dragData.startValuePositionX,
|
||||
dragData.endValuePositionX,
|
||||
);
|
||||
|
||||
options.onSelect(start, end);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createGlobalMouseupHandler(
|
||||
options: DragSelectPluginOptions,
|
||||
dragData: DragSelectData,
|
||||
): () => void {
|
||||
return (): void => {
|
||||
const { isDragging, endRelativePixelPositionX, endValuePositionX } = dragData;
|
||||
|
||||
if (!isDragging) {
|
||||
return;
|
||||
}
|
||||
|
||||
dragData.onDragEnd(
|
||||
endRelativePixelPositionX as number,
|
||||
endValuePositionX as number,
|
||||
);
|
||||
|
||||
if (
|
||||
typeof options.onSelect === 'function' &&
|
||||
typeof dragData.startValuePositionX === 'number' &&
|
||||
typeof dragData.endValuePositionX === 'number'
|
||||
) {
|
||||
const start = Math.min(
|
||||
dragData.startValuePositionX,
|
||||
dragData.endValuePositionX,
|
||||
);
|
||||
const end = Math.max(
|
||||
dragData.startValuePositionX,
|
||||
dragData.endValuePositionX,
|
||||
);
|
||||
|
||||
options.onSelect(start, end);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
class DragSelectData {
|
||||
public isDragging = false;
|
||||
|
||||
public isMouseDown = false;
|
||||
|
||||
public startRelativePixelPositionX: number | null = null;
|
||||
|
||||
public startValuePositionX: number | null | undefined = null;
|
||||
|
||||
public endRelativePixelPositionX: number | null = null;
|
||||
|
||||
public endValuePositionX: number | null | undefined = null;
|
||||
|
||||
public initialize(): void {
|
||||
this.isDragging = false;
|
||||
this.isMouseDown = false;
|
||||
this.startRelativePixelPositionX = null;
|
||||
this.startValuePositionX = null;
|
||||
this.endRelativePixelPositionX = null;
|
||||
this.endValuePositionX = null;
|
||||
}
|
||||
|
||||
public onDragStart(
|
||||
startRelativePixelPositionX: number,
|
||||
startValuePositionX: number | undefined,
|
||||
): void {
|
||||
this.isDragging = false;
|
||||
this.isMouseDown = true;
|
||||
this.startRelativePixelPositionX = startRelativePixelPositionX;
|
||||
this.startValuePositionX = startValuePositionX;
|
||||
this.endRelativePixelPositionX = null;
|
||||
this.endValuePositionX = null;
|
||||
}
|
||||
|
||||
public onDrag(
|
||||
endRelativePixelPositionX: number,
|
||||
endValuePositionX: number | undefined,
|
||||
): void {
|
||||
this.isDragging = true;
|
||||
this.endRelativePixelPositionX = endRelativePixelPositionX;
|
||||
this.endValuePositionX = endValuePositionX;
|
||||
}
|
||||
|
||||
public onDragEnd(
|
||||
endRelativePixelPositionX: number,
|
||||
endValuePositionX: number | undefined,
|
||||
): void {
|
||||
if (!this.isDragging) {
|
||||
this.initialize();
|
||||
return;
|
||||
}
|
||||
|
||||
this.isDragging = false;
|
||||
this.isMouseDown = false;
|
||||
this.endRelativePixelPositionX = endRelativePixelPositionX;
|
||||
this.endValuePositionX = endValuePositionX;
|
||||
}
|
||||
}
|
||||
|
||||
export const createDragSelectPlugin = (): Plugin<
|
||||
keyof ChartTypeRegistry,
|
||||
DragSelectPluginOptions
|
||||
> => {
|
||||
const dragData = new DragSelectData();
|
||||
let pluginOptions: Required<DragSelectPluginOptions>;
|
||||
|
||||
const handlers: ChartDragHandlers = {
|
||||
mousedown: () => {},
|
||||
mousemove: () => {},
|
||||
mouseup: () => {},
|
||||
globalMouseup: () => {},
|
||||
};
|
||||
|
||||
const dragSelectPlugin: Plugin<
|
||||
keyof ChartTypeRegistry,
|
||||
DragSelectPluginOptions
|
||||
> = {
|
||||
id: dragSelectPluginId,
|
||||
start: (chart: Chart, _, passedOptions) => {
|
||||
pluginOptions = mergeDefaultOptions(
|
||||
passedOptions,
|
||||
defaultDragSelectPluginOptions,
|
||||
);
|
||||
|
||||
const { canvas } = chart;
|
||||
|
||||
dragData.initialize();
|
||||
|
||||
const mousedownHandler = createMousedownHandler(chart, dragData);
|
||||
const mousemoveHandler = createMousemoveHandler(chart, dragData);
|
||||
const mouseupHandler = createMouseupHandler(chart, pluginOptions, dragData);
|
||||
const globalMouseupHandler = createGlobalMouseupHandler(
|
||||
pluginOptions,
|
||||
dragData,
|
||||
);
|
||||
|
||||
canvas.addEventListener('mousedown', mousedownHandler, { passive: true });
|
||||
canvas.addEventListener('mousemove', mousemoveHandler, { passive: true });
|
||||
canvas.addEventListener('mouseup', mouseupHandler, { passive: true });
|
||||
document.addEventListener('mouseup', globalMouseupHandler, {
|
||||
passive: true,
|
||||
});
|
||||
|
||||
handlers.mousedown = mousedownHandler;
|
||||
handlers.mousemove = mousemoveHandler;
|
||||
handlers.mouseup = mouseupHandler;
|
||||
handlers.globalMouseup = globalMouseupHandler;
|
||||
},
|
||||
beforeDestroy: (chart: Chart) => {
|
||||
const { canvas } = chart;
|
||||
|
||||
if (!canvas) {
|
||||
return;
|
||||
}
|
||||
|
||||
canvas.removeEventListener('mousedown', handlers.mousedown);
|
||||
canvas.removeEventListener('mousemove', handlers.mousemove);
|
||||
canvas.removeEventListener('mouseup', handlers.mouseup);
|
||||
document.removeEventListener('mouseup', handlers.globalMouseup);
|
||||
},
|
||||
afterDatasetsDraw: (chart: Chart) => {
|
||||
const {
|
||||
startRelativePixelPositionX,
|
||||
endRelativePixelPositionX,
|
||||
isDragging,
|
||||
} = dragData;
|
||||
|
||||
if (startRelativePixelPositionX && endRelativePixelPositionX && isDragging) {
|
||||
const left = Math.min(
|
||||
startRelativePixelPositionX,
|
||||
endRelativePixelPositionX,
|
||||
);
|
||||
const right = Math.max(
|
||||
startRelativePixelPositionX,
|
||||
endRelativePixelPositionX,
|
||||
);
|
||||
const top = chart.chartArea.top - 5;
|
||||
const bottom = chart.chartArea.bottom + 5;
|
||||
|
||||
/* eslint-disable-next-line no-param-reassign */
|
||||
chart.ctx.fillStyle = pluginOptions.color;
|
||||
chart.ctx.fillRect(left, top, right - left, bottom - top);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
return dragSelectPlugin;
|
||||
};
|
||||
@@ -11,7 +11,7 @@ export const emptyGraph = {
|
||||
ctx.textBaseline = 'middle';
|
||||
ctx.font = '1.5rem sans-serif';
|
||||
ctx.fillStyle = `${grey.primary}`;
|
||||
ctx.fillText('No data to display', width / 2, height / 2);
|
||||
ctx.fillText('No data', width / 2, height / 2);
|
||||
ctx.restore();
|
||||
},
|
||||
};
|
||||
|
||||
164
frontend/src/components/Graph/Plugin/IntersectionCursor.ts
Normal file
164
frontend/src/components/Graph/Plugin/IntersectionCursor.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { Chart, ChartEvent, ChartTypeRegistry, Plugin } from 'chart.js';
|
||||
import * as ChartHelpers from 'chart.js/helpers';
|
||||
|
||||
// utils
|
||||
import { ChartEventHandler, mergeDefaultOptions } from './utils';
|
||||
|
||||
export const intersectionCursorPluginId = 'intersection-cursor-plugin';
|
||||
|
||||
export type IntersectionCursorPluginOptions = {
|
||||
color?: string;
|
||||
dashSize?: number;
|
||||
gapSize?: number;
|
||||
};
|
||||
|
||||
export const defaultIntersectionCursorPluginOptions: Required<IntersectionCursorPluginOptions> = {
|
||||
color: 'white',
|
||||
dashSize: 3,
|
||||
gapSize: 3,
|
||||
};
|
||||
|
||||
export function createIntersectionCursorPluginOptions(
|
||||
isEnabled: boolean,
|
||||
color?: string,
|
||||
dashSize?: number,
|
||||
gapSize?: number,
|
||||
): IntersectionCursorPluginOptions | false {
|
||||
if (!isEnabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return {
|
||||
color,
|
||||
dashSize,
|
||||
gapSize,
|
||||
};
|
||||
}
|
||||
|
||||
function createMousemoveHandler(
|
||||
chart: Chart,
|
||||
cursorData: IntersectionCursorData,
|
||||
): ChartEventHandler {
|
||||
return (ev: ChartEvent | MouseEvent): void => {
|
||||
const { left, right, top, bottom } = chart.chartArea;
|
||||
|
||||
let { x, y } = ChartHelpers.getRelativePosition(ev, chart);
|
||||
|
||||
if (left > x) {
|
||||
x = left;
|
||||
}
|
||||
|
||||
if (right < x) {
|
||||
x = right;
|
||||
}
|
||||
|
||||
if (y < top) {
|
||||
y = top;
|
||||
}
|
||||
|
||||
if (y > bottom) {
|
||||
y = bottom;
|
||||
}
|
||||
|
||||
cursorData.onMouseMove(x, y);
|
||||
};
|
||||
}
|
||||
|
||||
function createMouseoutHandler(
|
||||
cursorData: IntersectionCursorData,
|
||||
): ChartEventHandler {
|
||||
return (): void => {
|
||||
cursorData.onMouseOut();
|
||||
};
|
||||
}
|
||||
|
||||
class IntersectionCursorData {
|
||||
public positionX: number | null | undefined;
|
||||
|
||||
public positionY: number | null | undefined;
|
||||
|
||||
public initialize(): void {
|
||||
this.positionX = null;
|
||||
this.positionY = null;
|
||||
}
|
||||
|
||||
public onMouseMove(x: number | undefined, y: number | undefined): void {
|
||||
this.positionX = x;
|
||||
this.positionY = y;
|
||||
}
|
||||
|
||||
public onMouseOut(): void {
|
||||
this.positionX = null;
|
||||
this.positionY = null;
|
||||
}
|
||||
}
|
||||
|
||||
export const createIntersectionCursorPlugin = (): Plugin<
|
||||
keyof ChartTypeRegistry,
|
||||
IntersectionCursorPluginOptions
|
||||
> => {
|
||||
const cursorData = new IntersectionCursorData();
|
||||
let pluginOptions: Required<IntersectionCursorPluginOptions>;
|
||||
|
||||
let mousemoveHandler: (ev: ChartEvent | MouseEvent) => void;
|
||||
let mouseoutHandler: (ev: ChartEvent | MouseEvent) => void;
|
||||
|
||||
const intersectionCursorPlugin: Plugin<
|
||||
keyof ChartTypeRegistry,
|
||||
IntersectionCursorPluginOptions
|
||||
> = {
|
||||
id: intersectionCursorPluginId,
|
||||
start: (chart: Chart, _, passedOptions) => {
|
||||
const { canvas } = chart;
|
||||
|
||||
cursorData.initialize();
|
||||
pluginOptions = mergeDefaultOptions(
|
||||
passedOptions,
|
||||
defaultIntersectionCursorPluginOptions,
|
||||
);
|
||||
|
||||
mousemoveHandler = createMousemoveHandler(chart, cursorData);
|
||||
mouseoutHandler = createMouseoutHandler(cursorData);
|
||||
|
||||
canvas.addEventListener('mousemove', mousemoveHandler, { passive: true });
|
||||
canvas.addEventListener('mouseout', mouseoutHandler, { passive: true });
|
||||
},
|
||||
beforeDestroy: (chart: Chart) => {
|
||||
const { canvas } = chart;
|
||||
|
||||
if (!canvas) {
|
||||
return;
|
||||
}
|
||||
|
||||
canvas.removeEventListener('mousemove', mousemoveHandler);
|
||||
canvas.removeEventListener('mouseout', mouseoutHandler);
|
||||
},
|
||||
afterDatasetsDraw: (chart: Chart) => {
|
||||
const { positionX, positionY } = cursorData;
|
||||
|
||||
const lineDashData = [pluginOptions.dashSize, pluginOptions.gapSize];
|
||||
|
||||
if (typeof positionX === 'number' && typeof positionY === 'number') {
|
||||
const { top, bottom, left, right } = chart.chartArea;
|
||||
|
||||
chart.ctx.beginPath();
|
||||
/* eslint-disable-next-line no-param-reassign */
|
||||
chart.ctx.strokeStyle = pluginOptions.color;
|
||||
chart.ctx.setLineDash(lineDashData);
|
||||
chart.ctx.moveTo(left, positionY);
|
||||
chart.ctx.lineTo(right, positionY);
|
||||
chart.ctx.stroke();
|
||||
|
||||
chart.ctx.beginPath();
|
||||
chart.ctx.setLineDash(lineDashData);
|
||||
/* eslint-disable-next-line no-param-reassign */
|
||||
chart.ctx.strokeStyle = pluginOptions.color;
|
||||
chart.ctx.moveTo(positionX, top);
|
||||
chart.ctx.lineTo(positionX, bottom);
|
||||
chart.ctx.stroke();
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
return intersectionCursorPlugin;
|
||||
};
|
||||
@@ -22,87 +22,86 @@ const getOrCreateLegendList = (
|
||||
listContainer.style.height = '100%';
|
||||
listContainer.style.flexWrap = 'wrap';
|
||||
listContainer.style.justifyContent = 'center';
|
||||
listContainer.style.fontSize = '0.75rem';
|
||||
legendContainer?.appendChild(listContainer);
|
||||
}
|
||||
|
||||
return listContainer;
|
||||
};
|
||||
|
||||
export const legend = (id: string, isLonger: boolean): Plugin<ChartType> => {
|
||||
return {
|
||||
id: 'htmlLegend',
|
||||
afterUpdate(chart): void {
|
||||
const ul = getOrCreateLegendList(chart, id || 'legend', isLonger);
|
||||
export const legend = (id: string, isLonger: boolean): Plugin<ChartType> => ({
|
||||
id: 'htmlLegend',
|
||||
afterUpdate(chart): void {
|
||||
const ul = getOrCreateLegendList(chart, id || 'legend', isLonger);
|
||||
|
||||
// Remove old legend items
|
||||
while (ul.firstChild) {
|
||||
ul.firstChild.remove();
|
||||
}
|
||||
// Remove old legend items
|
||||
while (ul.firstChild) {
|
||||
ul.firstChild.remove();
|
||||
}
|
||||
|
||||
// Reuse the built-in legendItems generator
|
||||
const items = get(chart, [
|
||||
'options',
|
||||
'plugins',
|
||||
'legend',
|
||||
'labels',
|
||||
'generateLabels',
|
||||
])
|
||||
? get(chart, ['options', 'plugins', 'legend', 'labels', 'generateLabels'])(
|
||||
chart,
|
||||
)
|
||||
: null;
|
||||
// Reuse the built-in legendItems generator
|
||||
const items = get(chart, [
|
||||
'options',
|
||||
'plugins',
|
||||
'legend',
|
||||
'labels',
|
||||
'generateLabels',
|
||||
])
|
||||
? get(chart, ['options', 'plugins', 'legend', 'labels', 'generateLabels'])(
|
||||
chart,
|
||||
)
|
||||
: null;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
items?.forEach((item: Record<any, any>, index: number) => {
|
||||
const li = document.createElement('li');
|
||||
li.style.alignItems = 'center';
|
||||
li.style.cursor = 'pointer';
|
||||
li.style.display = 'flex';
|
||||
li.style.marginLeft = '10px';
|
||||
li.style.marginTop = '5px';
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
items?.forEach((item: Record<any, any>, index: number) => {
|
||||
const li = document.createElement('li');
|
||||
li.style.alignItems = 'center';
|
||||
li.style.cursor = 'pointer';
|
||||
li.style.display = 'flex';
|
||||
li.style.marginLeft = '10px';
|
||||
// li.style.marginTop = '5px';
|
||||
|
||||
li.onclick = (): void => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
const { type } = chart.config;
|
||||
if (type === 'pie' || type === 'doughnut') {
|
||||
// Pie and doughnut charts only have a single dataset and visibility is per item
|
||||
chart.toggleDataVisibility(index);
|
||||
} else {
|
||||
chart.setDatasetVisibility(
|
||||
item.datasetIndex,
|
||||
!chart.isDatasetVisible(item.datasetIndex),
|
||||
);
|
||||
}
|
||||
chart.update();
|
||||
};
|
||||
|
||||
// Color box
|
||||
const boxSpan = document.createElement('span');
|
||||
boxSpan.style.background = `${item.strokeStyle}` || `${colors[0]}`;
|
||||
boxSpan.style.borderColor = `${item?.strokeStyle}`;
|
||||
boxSpan.style.borderWidth = `${item.lineWidth}px`;
|
||||
boxSpan.style.display = 'inline-block';
|
||||
boxSpan.style.minHeight = '20px';
|
||||
boxSpan.style.marginRight = '10px';
|
||||
boxSpan.style.minWidth = '20px';
|
||||
boxSpan.style.borderRadius = '50%';
|
||||
|
||||
if (item.text) {
|
||||
// Text
|
||||
const textContainer = document.createElement('span');
|
||||
textContainer.style.margin = '0';
|
||||
textContainer.style.padding = '0';
|
||||
textContainer.style.textDecoration = item.hidden ? 'line-through' : '';
|
||||
|
||||
const text = document.createTextNode(item.text);
|
||||
textContainer.appendChild(text);
|
||||
|
||||
li.appendChild(boxSpan);
|
||||
li.appendChild(textContainer);
|
||||
ul.appendChild(li);
|
||||
li.onclick = (): void => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
const { type } = chart.config;
|
||||
if (type === 'pie' || type === 'doughnut') {
|
||||
// Pie and doughnut charts only have a single dataset and visibility is per item
|
||||
chart.toggleDataVisibility(index);
|
||||
} else {
|
||||
chart.setDatasetVisibility(
|
||||
item.datasetIndex,
|
||||
!chart.isDatasetVisible(item.datasetIndex),
|
||||
);
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
};
|
||||
chart.update();
|
||||
};
|
||||
|
||||
// Color box
|
||||
const boxSpan = document.createElement('span');
|
||||
boxSpan.style.background = `${item.strokeStyle}` || `${colors[0]}`;
|
||||
boxSpan.style.borderColor = `${item?.strokeStyle}`;
|
||||
boxSpan.style.borderWidth = `${item.lineWidth}px`;
|
||||
boxSpan.style.display = 'inline-block';
|
||||
boxSpan.style.minHeight = '0.75rem';
|
||||
boxSpan.style.marginRight = '0.5rem';
|
||||
boxSpan.style.minWidth = '0.75rem';
|
||||
boxSpan.style.borderRadius = '50%';
|
||||
|
||||
if (item.text) {
|
||||
// Text
|
||||
const textContainer = document.createElement('span');
|
||||
textContainer.style.margin = '0';
|
||||
textContainer.style.padding = '0';
|
||||
textContainer.style.textDecoration = item.hidden ? 'line-through' : '';
|
||||
|
||||
const text = document.createTextNode(item.text);
|
||||
textContainer.appendChild(text);
|
||||
|
||||
li.appendChild(boxSpan);
|
||||
li.appendChild(textContainer);
|
||||
ul.appendChild(li);
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
20
frontend/src/components/Graph/Plugin/utils.ts
Normal file
20
frontend/src/components/Graph/Plugin/utils.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { ChartEvent } from 'chart.js';
|
||||
|
||||
export type ChartEventHandler = (ev: ChartEvent | MouseEvent) => void;
|
||||
|
||||
export function mergeDefaultOptions<T extends Record<string, unknown>>(
|
||||
options: T,
|
||||
defaultOptions: Required<T>,
|
||||
): Required<T> {
|
||||
const sanitizedOptions = { ...options };
|
||||
Object.keys(options).forEach((key) => {
|
||||
if (sanitizedOptions[key as keyof T] === undefined) {
|
||||
delete sanitizedOptions[key as keyof T];
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
...defaultOptions,
|
||||
...sanitizedOptions,
|
||||
};
|
||||
}
|
||||
8
frontend/src/components/Graph/helpers.ts
Normal file
8
frontend/src/components/Graph/helpers.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { themeColors } from 'constants/theme';
|
||||
|
||||
export const getAxisLabelColor = (currentTheme: string): string => {
|
||||
if (currentTheme === 'light') {
|
||||
return themeColors.black;
|
||||
}
|
||||
return themeColors.whiteCream;
|
||||
};
|
||||
@@ -23,14 +23,27 @@ import {
|
||||
} from 'chart.js';
|
||||
import * as chartjsAdapter from 'chartjs-adapter-date-fns';
|
||||
import annotationPlugin from 'chartjs-plugin-annotation';
|
||||
import React, { useCallback, useEffect, useRef } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import dayjs from 'dayjs';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import isEqual from 'lodash-es/isEqual';
|
||||
import React, { memo, useCallback, useEffect, useRef } from 'react';
|
||||
|
||||
import { hasData } from './hasData';
|
||||
import { getAxisLabelColor } from './helpers';
|
||||
import { legend } from './Plugin';
|
||||
import {
|
||||
createDragSelectPlugin,
|
||||
createDragSelectPluginOptions,
|
||||
dragSelectPluginId,
|
||||
DragSelectPluginOptions,
|
||||
} from './Plugin/DragSelect';
|
||||
import { emptyGraph } from './Plugin/EmptyGraph';
|
||||
import {
|
||||
createIntersectionCursorPlugin,
|
||||
createIntersectionCursorPluginOptions,
|
||||
intersectionCursorPluginId,
|
||||
IntersectionCursorPluginOptions,
|
||||
} from './Plugin/IntersectionCursor';
|
||||
import { LegendsContainer } from './styles';
|
||||
import { useXAxisTimeUnit } from './xAxisConfig';
|
||||
import { getToolTipValue, getYAxisFormattedValue } from './yAxisConfig';
|
||||
@@ -66,9 +79,13 @@ function Graph({
|
||||
forceReRender,
|
||||
staticLine,
|
||||
containerHeight,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
}: GraphProps): JSX.Element {
|
||||
const { isDarkMode } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
const nearestDatasetIndex = useRef<null | number>(null);
|
||||
const chartRef = useRef<HTMLCanvasElement>(null);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const currentTheme = isDarkMode ? 'dark' : 'light';
|
||||
const xAxisTimeUnit = useXAxisTimeUnit(data); // Computes the relevant time unit for x axis by analyzing the time stamp data
|
||||
|
||||
@@ -92,7 +109,7 @@ function Graph({
|
||||
}
|
||||
|
||||
if (chartRef.current !== null) {
|
||||
const options: ChartOptions = {
|
||||
const options: CustomChartOptions = {
|
||||
animation: {
|
||||
duration: animate ? 200 : 0,
|
||||
},
|
||||
@@ -136,6 +153,10 @@ function Graph({
|
||||
},
|
||||
tooltip: {
|
||||
callbacks: {
|
||||
title(context) {
|
||||
const date = dayjs(context[0].parsed.x);
|
||||
return date.format('MMM DD, YYYY, HH:mm:ss');
|
||||
},
|
||||
label(context) {
|
||||
let label = context.dataset.label || '';
|
||||
|
||||
@@ -145,10 +166,27 @@ function Graph({
|
||||
if (context.parsed.y !== null) {
|
||||
label += getToolTipValue(context.parsed.y.toString(), yAxisUnit);
|
||||
}
|
||||
|
||||
return label;
|
||||
},
|
||||
labelTextColor(labelData) {
|
||||
if (labelData.datasetIndex === nearestDatasetIndex.current) {
|
||||
return 'rgba(255, 255, 255, 1)';
|
||||
}
|
||||
|
||||
return 'rgba(255, 255, 255, 0.75)';
|
||||
},
|
||||
},
|
||||
},
|
||||
[dragSelectPluginId]: createDragSelectPluginOptions(
|
||||
!!onDragSelect,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
),
|
||||
[intersectionCursorPluginId]: createIntersectionCursorPluginOptions(
|
||||
!!onDragSelect,
|
||||
currentTheme === 'dark' ? 'white' : 'black',
|
||||
),
|
||||
},
|
||||
layout: {
|
||||
padding: 0,
|
||||
@@ -178,6 +216,7 @@ function Graph({
|
||||
},
|
||||
},
|
||||
type: 'time',
|
||||
ticks: { color: getAxisLabelColor(currentTheme) },
|
||||
},
|
||||
y: {
|
||||
display: true,
|
||||
@@ -186,6 +225,7 @@ function Graph({
|
||||
color: getGridColor(),
|
||||
},
|
||||
ticks: {
|
||||
color: getAxisLabelColor(currentTheme),
|
||||
// Include a dollar sign in the ticks
|
||||
callback(value) {
|
||||
return getYAxisFormattedValue(value.toString(), yAxisUnit);
|
||||
@@ -201,18 +241,50 @@ function Graph({
|
||||
tension: 0,
|
||||
cubicInterpolationMode: 'monotone',
|
||||
},
|
||||
point: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
hoverBackgroundColor: (ctx: any) => {
|
||||
if (ctx?.element?.options?.borderColor) {
|
||||
return ctx.element.options.borderColor;
|
||||
}
|
||||
return 'rgba(0,0,0,0.1)';
|
||||
},
|
||||
hoverRadius: 5,
|
||||
},
|
||||
},
|
||||
onClick: (event, element, chart) => {
|
||||
if (onClickHandler) {
|
||||
onClickHandler(event, element, chart, data);
|
||||
}
|
||||
},
|
||||
onHover: (event, _, chart) => {
|
||||
if (event.native) {
|
||||
const interactions = chart.getElementsAtEventForMode(
|
||||
event.native,
|
||||
'nearest',
|
||||
{
|
||||
intersect: false,
|
||||
},
|
||||
true,
|
||||
);
|
||||
|
||||
if (interactions[0]) {
|
||||
nearestDatasetIndex.current = interactions[0].datasetIndex;
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const chartHasData = hasData(data);
|
||||
const chartPlugins = [];
|
||||
|
||||
if (!chartHasData) chartPlugins.push(emptyGraph);
|
||||
if (chartHasData) {
|
||||
chartPlugins.push(createIntersectionCursorPlugin());
|
||||
chartPlugins.push(createDragSelectPlugin());
|
||||
} else {
|
||||
chartPlugins.push(emptyGraph);
|
||||
}
|
||||
|
||||
chartPlugins.push(legend(name, data.datasets.length > 3));
|
||||
|
||||
lineChartRef.current = new Chart(chartRef.current, {
|
||||
@@ -235,6 +307,9 @@ function Graph({
|
||||
yAxisUnit,
|
||||
onClickHandler,
|
||||
staticLine,
|
||||
onDragSelect,
|
||||
dragSelectColor,
|
||||
currentTheme,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -249,6 +324,13 @@ function Graph({
|
||||
);
|
||||
}
|
||||
|
||||
type CustomChartOptions = ChartOptions & {
|
||||
plugins: {
|
||||
[dragSelectPluginId]: DragSelectPluginOptions | false;
|
||||
[intersectionCursorPluginId]: IntersectionCursorPluginOptions | false;
|
||||
};
|
||||
};
|
||||
|
||||
interface GraphProps {
|
||||
animate?: boolean;
|
||||
type: ChartType;
|
||||
@@ -261,6 +343,8 @@ interface GraphProps {
|
||||
forceReRender?: boolean | null | number;
|
||||
staticLine?: StaticLineProps | undefined;
|
||||
containerHeight?: string | number;
|
||||
onDragSelect?: (start: number, end: number) => void;
|
||||
dragSelectColor?: string;
|
||||
}
|
||||
|
||||
export interface StaticLineProps {
|
||||
@@ -287,6 +371,11 @@ Graph.defaultProps = {
|
||||
yAxisUnit: undefined,
|
||||
forceReRender: undefined,
|
||||
staticLine: undefined,
|
||||
containerHeight: '85%',
|
||||
containerHeight: '90%',
|
||||
onDragSelect: undefined,
|
||||
dragSelectColor: undefined,
|
||||
};
|
||||
export default Graph;
|
||||
|
||||
export default memo(Graph, (prevProps, nextProps) =>
|
||||
isEqual(prevProps.data, nextProps.data),
|
||||
);
|
||||
|
||||
@@ -1,14 +1,28 @@
|
||||
import { themeColors } from 'constants/theme';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const LegendsContainer = styled.div`
|
||||
height: 15%;
|
||||
height: 10%;
|
||||
|
||||
* {
|
||||
::-webkit-scrollbar {
|
||||
display: none !important;
|
||||
width: 0.3rem;
|
||||
}
|
||||
::-webkit-scrollbar:horizontal {
|
||||
height: 0.3rem;
|
||||
}
|
||||
::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: ${themeColors.royalGrey};
|
||||
border-radius: 0.625rem;
|
||||
}
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: ${themeColors.matterhornGrey};
|
||||
}
|
||||
::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
-ms-overflow-style: none !important; /* IE and Edge */
|
||||
scrollbar-width: none !important; /* Firefox */
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -1,46 +1,23 @@
|
||||
import { Button, Popover } from 'antd';
|
||||
import getStep from 'lib/getStep';
|
||||
import { generateFilterQuery } from 'lib/logs/generateFilterQuery';
|
||||
import React, { memo, useCallback, useMemo } from 'react';
|
||||
import { connect, useDispatch, useSelector } from 'react-redux';
|
||||
import { bindActionCreators, Dispatch } from 'redux';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
import { getLogs } from 'store/actions/logs/getLogs';
|
||||
import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { Dispatch } from 'redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { SET_SEARCH_QUERY_STRING, TOGGLE_LIVE_TAIL } from 'types/actions/logs';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { SET_SEARCH_QUERY_STRING } from 'types/actions/logs';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
interface AddToQueryHOCProps {
|
||||
fieldKey: string;
|
||||
fieldValue: string;
|
||||
children: React.ReactNode;
|
||||
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
|
||||
getLogsAggregate: (
|
||||
props: Parameters<typeof getLogsAggregate>[0],
|
||||
) => ReturnType<typeof getLogsAggregate>;
|
||||
}
|
||||
function AddToQueryHOC({
|
||||
fieldKey,
|
||||
fieldValue,
|
||||
children,
|
||||
getLogs,
|
||||
getLogsAggregate,
|
||||
}: AddToQueryHOCProps): JSX.Element {
|
||||
const {
|
||||
searchFilter: { queryString },
|
||||
logLinesPerPage,
|
||||
idStart,
|
||||
idEnd,
|
||||
liveTail,
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
const dispatch = useDispatch();
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const generatedQuery = useMemo(
|
||||
() => generateFilterQuery({ fieldKey, fieldValue, type: 'IN' }),
|
||||
[fieldKey, fieldValue],
|
||||
@@ -56,71 +33,18 @@ function AddToQueryHOC({
|
||||
}
|
||||
dispatch({
|
||||
type: SET_SEARCH_QUERY_STRING,
|
||||
payload: updatedQueryString,
|
||||
payload: {
|
||||
searchQueryString: updatedQueryString,
|
||||
},
|
||||
});
|
||||
if (liveTail === 'STOPPED') {
|
||||
getLogs({
|
||||
q: updatedQueryString,
|
||||
limit: logLinesPerPage,
|
||||
orderBy: 'timestamp',
|
||||
order: 'desc',
|
||||
timestampStart: minTime,
|
||||
timestampEnd: maxTime,
|
||||
...(idStart ? { idGt: idStart } : {}),
|
||||
...(idEnd ? { idLt: idEnd } : {}),
|
||||
});
|
||||
getLogsAggregate({
|
||||
timestampStart: minTime,
|
||||
timestampEnd: maxTime,
|
||||
step: getStep({
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
inputFormat: 'ns',
|
||||
}),
|
||||
q: updatedQueryString,
|
||||
...(idStart ? { idGt: idStart } : {}),
|
||||
...(idEnd ? { idLt: idEnd } : {}),
|
||||
});
|
||||
} else if (liveTail === 'PLAYING') {
|
||||
dispatch({
|
||||
type: TOGGLE_LIVE_TAIL,
|
||||
payload: 'PAUSED',
|
||||
});
|
||||
setTimeout(
|
||||
() =>
|
||||
dispatch({
|
||||
type: TOGGLE_LIVE_TAIL,
|
||||
payload: liveTail,
|
||||
}),
|
||||
0,
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
dispatch,
|
||||
generatedQuery,
|
||||
getLogs,
|
||||
idEnd,
|
||||
idStart,
|
||||
logLinesPerPage,
|
||||
maxTime,
|
||||
minTime,
|
||||
queryString,
|
||||
}, [dispatch, generatedQuery, queryString]);
|
||||
|
||||
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
|
||||
fieldKey,
|
||||
]);
|
||||
|
||||
const popOverContent = (
|
||||
<span style={{ fontSize: '0.9rem' }}>Add to query: {fieldKey}</span>
|
||||
);
|
||||
return (
|
||||
<Button
|
||||
size="small"
|
||||
type="text"
|
||||
style={{
|
||||
margin: 0,
|
||||
padding: 0,
|
||||
}}
|
||||
onClick={handleQueryAdd}
|
||||
>
|
||||
<Button size="small" type="text" onClick={handleQueryAdd}>
|
||||
<Popover placement="top" content={popOverContent}>
|
||||
{children}
|
||||
</Popover>
|
||||
@@ -128,20 +52,10 @@ function AddToQueryHOC({
|
||||
);
|
||||
}
|
||||
|
||||
interface DispatchProps {
|
||||
getLogs: (
|
||||
props: Parameters<typeof getLogs>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
getLogsAggregate: (
|
||||
props: Parameters<typeof getLogsAggregate>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
interface AddToQueryHOCProps {
|
||||
fieldKey: string;
|
||||
fieldValue: string;
|
||||
children: React.ReactNode;
|
||||
}
|
||||
|
||||
const mapDispatchToProps = (
|
||||
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
|
||||
): DispatchProps => ({
|
||||
getLogs: bindActionCreators(getLogs, dispatch),
|
||||
getLogsAggregate: bindActionCreators(getLogsAggregate, dispatch),
|
||||
});
|
||||
|
||||
export default connect(null, mapDispatchToProps)(memo(AddToQueryHOC));
|
||||
export default memo(AddToQueryHOC);
|
||||
|
||||
@@ -1,29 +1,28 @@
|
||||
import { Popover } from 'antd';
|
||||
import React from 'react';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import React, { useCallback, useEffect } from 'react';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
|
||||
interface CopyClipboardHOCProps {
|
||||
textToCopy: string;
|
||||
children: React.ReactNode;
|
||||
}
|
||||
function CopyClipboardHOC({
|
||||
textToCopy,
|
||||
children,
|
||||
}: CopyClipboardHOCProps): JSX.Element {
|
||||
const [, setCopy] = useCopyToClipboard();
|
||||
const [value, setCopy] = useCopyToClipboard();
|
||||
const { notifications } = useNotifications();
|
||||
useEffect(() => {
|
||||
if (value.value) {
|
||||
notifications.success({
|
||||
message: 'Copied to clipboard',
|
||||
});
|
||||
}
|
||||
}, [value, notifications]);
|
||||
|
||||
const onClick = useCallback((): void => {
|
||||
setCopy(textToCopy);
|
||||
}, [setCopy, textToCopy]);
|
||||
|
||||
return (
|
||||
<span
|
||||
style={{
|
||||
margin: 0,
|
||||
padding: 0,
|
||||
cursor: 'pointer',
|
||||
}}
|
||||
onClick={(): void => setCopy(textToCopy)}
|
||||
onKeyDown={(): void => setCopy(textToCopy)}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
<span onClick={onClick} onKeyDown={onClick} role="button" tabIndex={0}>
|
||||
<Popover
|
||||
placement="top"
|
||||
content={<span style={{ fontSize: '0.9rem' }}>Copy to clipboard</span>}
|
||||
@@ -34,4 +33,9 @@ function CopyClipboardHOC({
|
||||
);
|
||||
}
|
||||
|
||||
interface CopyClipboardHOCProps {
|
||||
textToCopy: string;
|
||||
children: React.ReactNode;
|
||||
}
|
||||
|
||||
export default CopyClipboardHOC;
|
||||
|
||||
@@ -3,18 +3,23 @@ import { CopyFilled, ExpandAltOutlined } from '@ant-design/icons';
|
||||
import { Button, Divider, Row, Typography } from 'antd';
|
||||
import { map } from 'd3';
|
||||
import dayjs from 'dayjs';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
// utils
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
// interfaces
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SET_DETAILED_LOG_DATA } from 'types/actions/logs';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
// components
|
||||
import AddToQueryHOC from '../AddToQueryHOC';
|
||||
import CopyClipboardHOC from '../CopyClipboardHOC';
|
||||
import { Container } from './styles';
|
||||
// styles
|
||||
import { Container, LogContainer, Text, TextContainer } from './styles';
|
||||
import { isValidLogField } from './util';
|
||||
|
||||
interface LogFieldProps {
|
||||
@@ -23,23 +28,20 @@ interface LogFieldProps {
|
||||
}
|
||||
function LogGeneralField({ fieldKey, fieldValue }: LogFieldProps): JSX.Element {
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
overflow: 'hidden',
|
||||
width: '100%',
|
||||
}}
|
||||
>
|
||||
<Typography.Text type="secondary">{fieldKey}</Typography.Text>
|
||||
<TextContainer>
|
||||
<Text ellipsis type="secondary">
|
||||
{fieldKey}
|
||||
</Text>
|
||||
<CopyClipboardHOC textToCopy={fieldValue}>
|
||||
<Typography.Text ellipsis>
|
||||
{': '}
|
||||
{fieldValue}
|
||||
</Typography.Text>
|
||||
</CopyClipboardHOC>
|
||||
</div>
|
||||
</TextContainer>
|
||||
);
|
||||
}
|
||||
|
||||
function LogSelectedField({
|
||||
fieldKey = '',
|
||||
fieldValue = '',
|
||||
@@ -73,16 +75,19 @@ function LogSelectedField({
|
||||
);
|
||||
}
|
||||
|
||||
interface LogItemProps {
|
||||
interface ListLogViewProps {
|
||||
logData: ILog;
|
||||
}
|
||||
function LogItem({ logData }: LogItemProps): JSX.Element {
|
||||
function ListLogView({ logData }: ListLogViewProps): JSX.Element {
|
||||
const {
|
||||
fields: { selected },
|
||||
} = useSelector<AppState, ILogsReducer>((state) => state.logs);
|
||||
|
||||
const dispatch = useDispatch();
|
||||
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
|
||||
|
||||
const [, setCopy] = useCopyToClipboard();
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const handleDetailedView = useCallback(() => {
|
||||
dispatch({
|
||||
@@ -93,40 +98,40 @@ function LogItem({ logData }: LogItemProps): JSX.Element {
|
||||
|
||||
const handleCopyJSON = (): void => {
|
||||
setCopy(JSON.stringify(logData, null, 2));
|
||||
notifications.success({
|
||||
message: 'Copied to clipboard',
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<div style={{ maxWidth: '100%' }}>
|
||||
<div>
|
||||
<div>
|
||||
{'{'}
|
||||
<div style={{ marginLeft: '0.5rem' }}>
|
||||
<LogGeneralField
|
||||
fieldKey="log"
|
||||
fieldValue={flattenLogData.body as never}
|
||||
/>
|
||||
{flattenLogData.stream && (
|
||||
<LogContainer>
|
||||
<>
|
||||
<LogGeneralField fieldKey="log" fieldValue={flattenLogData.body} />
|
||||
{flattenLogData.stream && (
|
||||
<LogGeneralField fieldKey="stream" fieldValue={flattenLogData.stream} />
|
||||
)}
|
||||
<LogGeneralField
|
||||
fieldKey="stream"
|
||||
fieldValue={flattenLogData.stream as never}
|
||||
fieldKey="timestamp"
|
||||
fieldValue={dayjs((flattenLogData.timestamp as never) / 1e6).format()}
|
||||
/>
|
||||
)}
|
||||
<LogGeneralField
|
||||
fieldKey="timestamp"
|
||||
fieldValue={dayjs((flattenLogData.timestamp as never) / 1e6).format()}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
</LogContainer>
|
||||
{'}'}
|
||||
</div>
|
||||
<div>
|
||||
{map(selected, (field) => {
|
||||
return isValidLogField(flattenLogData[field.name] as never) ? (
|
||||
{map(selected, (field) =>
|
||||
isValidLogField(flattenLogData[field.name] as never) ? (
|
||||
<LogSelectedField
|
||||
key={field.name}
|
||||
fieldKey={field.name}
|
||||
fieldValue={flattenLogData[field.name] as never}
|
||||
/>
|
||||
) : null;
|
||||
})}
|
||||
) : null,
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<Divider style={{ padding: 0, margin: '0.4rem 0', opacity: 0.5 }} />
|
||||
@@ -154,4 +159,4 @@ function LogItem({ logData }: LogItemProps): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
export default LogItem;
|
||||
export default ListLogView;
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Card } from 'antd';
|
||||
import { Card, Typography } from 'antd';
|
||||
import styled, { keyframes } from 'styled-components';
|
||||
|
||||
const fadeInAnimation = keyframes`
|
||||
@@ -16,3 +16,20 @@ export const Container = styled(Card)`
|
||||
animation-duration: 0.2s;
|
||||
animation-timing-function: ease-in;
|
||||
`;
|
||||
|
||||
export const Text = styled(Typography.Text)`
|
||||
&&& {
|
||||
min-width: 1.5rem;
|
||||
white-space: nowrap;
|
||||
}
|
||||
`;
|
||||
|
||||
export const TextContainer = styled.div`
|
||||
display: flex;
|
||||
overflow: hidden;
|
||||
width: 100%;
|
||||
`;
|
||||
|
||||
export const LogContainer = styled.div`
|
||||
margin-left: 0.5rem;
|
||||
`;
|
||||
5
frontend/src/components/Logs/RawLogView/config.ts
Normal file
5
frontend/src/components/Logs/RawLogView/config.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export const rawLineStyle: React.CSSProperties = {
|
||||
marginBottom: 0,
|
||||
fontFamily: "'Fira Code', monospace",
|
||||
fontWeight: 300,
|
||||
};
|
||||
48
frontend/src/components/Logs/RawLogView/index.tsx
Normal file
48
frontend/src/components/Logs/RawLogView/index.tsx
Normal file
@@ -0,0 +1,48 @@
|
||||
import { ExpandAltOutlined } from '@ant-design/icons';
|
||||
import { Typography } from 'antd';
|
||||
import dayjs from 'dayjs';
|
||||
// hooks
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
// interfaces
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
import { rawLineStyle } from './config';
|
||||
// styles
|
||||
import { ExpandIconWrapper, RawLogViewContainer } from './styles';
|
||||
|
||||
interface RawLogViewProps {
|
||||
data: ILog;
|
||||
linesPerRow: number;
|
||||
onClickExpand: (log: ILog) => void;
|
||||
}
|
||||
|
||||
function RawLogView(props: RawLogViewProps): JSX.Element {
|
||||
const { data, linesPerRow, onClickExpand } = props;
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const text = useMemo(
|
||||
() => `${dayjs(data.timestamp / 1e6).format()} | ${data.body}`,
|
||||
[data.timestamp, data.body],
|
||||
);
|
||||
|
||||
const ellipsis = useMemo(() => ({ rows: linesPerRow }), [linesPerRow]);
|
||||
|
||||
const handleClickExpand = useCallback(() => {
|
||||
onClickExpand(data);
|
||||
}, [onClickExpand, data]);
|
||||
|
||||
return (
|
||||
<RawLogViewContainer wrap={false} align="middle" $isDarkMode={isDarkMode}>
|
||||
<ExpandIconWrapper flex="30px" onClick={handleClickExpand}>
|
||||
<ExpandAltOutlined />
|
||||
</ExpandIconWrapper>
|
||||
<Typography.Paragraph style={rawLineStyle} ellipsis={ellipsis}>
|
||||
{text}
|
||||
</Typography.Paragraph>
|
||||
</RawLogViewContainer>
|
||||
);
|
||||
}
|
||||
|
||||
export default RawLogView;
|
||||
24
frontend/src/components/Logs/RawLogView/styles.ts
Normal file
24
frontend/src/components/Logs/RawLogView/styles.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { blue } from '@ant-design/colors';
|
||||
import { Col, Row } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const RawLogViewContainer = styled(Row)<{ $isDarkMode: boolean }>`
|
||||
width: 100%;
|
||||
font-weight: 700;
|
||||
font-size: 0.625rem;
|
||||
line-height: 1.25rem;
|
||||
|
||||
transition: background-color 0.2s ease-in;
|
||||
|
||||
&:hover {
|
||||
background-color: ${({ $isDarkMode }): string =>
|
||||
$isDarkMode ? 'rgba(255,255,255,0.1)' : 'rgba(0, 0, 0, 0.1)'};
|
||||
}
|
||||
`;
|
||||
|
||||
export const ExpandIconWrapper = styled(Col)`
|
||||
color: ${blue[6]};
|
||||
padding: 0.25rem 0.375rem;
|
||||
cursor: pointer;
|
||||
font-size: 12px;
|
||||
`;
|
||||
12
frontend/src/components/Logs/TableView/config.ts
Normal file
12
frontend/src/components/Logs/TableView/config.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { TableProps } from 'antd';
|
||||
|
||||
export const defaultCellStyle: React.CSSProperties = {
|
||||
paddingTop: 4,
|
||||
paddingBottom: 6,
|
||||
paddingRight: 8,
|
||||
paddingLeft: 8,
|
||||
};
|
||||
|
||||
export const tableScroll: TableProps<Record<string, unknown>>['scroll'] = {
|
||||
x: true,
|
||||
};
|
||||
106
frontend/src/components/Logs/TableView/index.tsx
Normal file
106
frontend/src/components/Logs/TableView/index.tsx
Normal file
@@ -0,0 +1,106 @@
|
||||
import { ExpandAltOutlined } from '@ant-design/icons';
|
||||
import { Table, Typography } from 'antd';
|
||||
import { ColumnsType, ColumnType } from 'antd/es/table';
|
||||
import dayjs from 'dayjs';
|
||||
// utils
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
import React, { useMemo } from 'react';
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
// interfaces
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
// styles
|
||||
import { ExpandIconWrapper } from '../RawLogView/styles';
|
||||
// config
|
||||
import { defaultCellStyle, tableScroll } from './config';
|
||||
|
||||
type ColumnTypeRender<T = unknown> = ReturnType<
|
||||
NonNullable<ColumnType<T>['render']>
|
||||
>;
|
||||
|
||||
type LogsTableViewProps = {
|
||||
logs: ILog[];
|
||||
fields: IField[];
|
||||
linesPerRow: number;
|
||||
onClickExpand: (log: ILog) => void;
|
||||
};
|
||||
|
||||
function LogsTableView(props: LogsTableViewProps): JSX.Element {
|
||||
const { logs, fields, linesPerRow, onClickExpand } = props;
|
||||
|
||||
const flattenLogData = useMemo(() => logs.map((log) => FlatLogData(log)), [
|
||||
logs,
|
||||
]);
|
||||
|
||||
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
||||
const fieldColumns: ColumnsType<Record<string, unknown>> = fields.map(
|
||||
({ name }) => ({
|
||||
title: name,
|
||||
dataIndex: name,
|
||||
key: name,
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: defaultCellStyle,
|
||||
},
|
||||
children: (
|
||||
<Typography.Paragraph ellipsis={{ rows: linesPerRow }}>
|
||||
{field}
|
||||
</Typography.Paragraph>
|
||||
),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
return [
|
||||
{
|
||||
title: '',
|
||||
dataIndex: 'id',
|
||||
key: 'expand',
|
||||
// https://github.com/ant-design/ant-design/discussions/36886
|
||||
render: (_, item): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: defaultCellStyle,
|
||||
},
|
||||
children: (
|
||||
<ExpandIconWrapper
|
||||
onClick={(): void => {
|
||||
onClickExpand((item as unknown) as ILog);
|
||||
}}
|
||||
>
|
||||
<ExpandAltOutlined />
|
||||
</ExpandIconWrapper>
|
||||
),
|
||||
}),
|
||||
},
|
||||
{
|
||||
title: 'Timestamp',
|
||||
dataIndex: 'timestamp',
|
||||
key: 'timestamp',
|
||||
// https://github.com/ant-design/ant-design/discussions/36886
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => {
|
||||
const date = dayjs(field / 1e6).format();
|
||||
return {
|
||||
props: {
|
||||
style: defaultCellStyle,
|
||||
},
|
||||
children: <span>{date}</span>,
|
||||
};
|
||||
},
|
||||
},
|
||||
...fieldColumns,
|
||||
];
|
||||
}, [fields, linesPerRow, onClickExpand]);
|
||||
|
||||
return (
|
||||
<Table
|
||||
columns={columns}
|
||||
dataSource={flattenLogData}
|
||||
pagination={false}
|
||||
rowKey="id"
|
||||
bordered
|
||||
scroll={tableScroll}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default LogsTableView;
|
||||
@@ -11,7 +11,7 @@ function CustomModal({
|
||||
return (
|
||||
<Modal
|
||||
title={title}
|
||||
visible={isModalVisible}
|
||||
open={isModalVisible}
|
||||
footer={footer}
|
||||
closable={closable}
|
||||
>
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
/**
|
||||
* @jest-environment jsdom
|
||||
*/
|
||||
|
||||
import { render } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
import { Provider } from 'react-redux';
|
||||
|
||||
1
frontend/src/components/NotFound/constant.ts
Normal file
1
frontend/src/components/NotFound/constant.ts
Normal file
@@ -0,0 +1 @@
|
||||
export const defaultText = 'Ah, seems like we reached a dead end!';
|
||||
@@ -2,45 +2,52 @@ import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import NotFoundImage from 'assets/NotFound';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import React from 'react';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { Dispatch } from 'redux';
|
||||
import AppActions from 'types/actions';
|
||||
import { LOGGED_IN } from 'types/actions/app';
|
||||
|
||||
import { defaultText } from './constant';
|
||||
import { Button, Container, Text, TextContainer } from './styles';
|
||||
|
||||
function NotFound(): JSX.Element {
|
||||
function NotFound({ text = defaultText }: Props): JSX.Element {
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
const isLoggedIn = getLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN);
|
||||
|
||||
const onClickHandler = useCallback(() => {
|
||||
if (isLoggedIn) {
|
||||
dispatch({
|
||||
type: LOGGED_IN,
|
||||
payload: {
|
||||
isLoggedIn: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
}, [dispatch, isLoggedIn]);
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<NotFoundImage />
|
||||
|
||||
<TextContainer>
|
||||
<Text>Ah, seems like we reached a dead end!</Text>
|
||||
<Text>{text}</Text>
|
||||
<Text>Page Not Found</Text>
|
||||
</TextContainer>
|
||||
|
||||
<Button
|
||||
onClick={(): void => {
|
||||
if (isLoggedIn) {
|
||||
dispatch({
|
||||
type: LOGGED_IN,
|
||||
payload: {
|
||||
isLoggedIn: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
}}
|
||||
to={ROUTES.APPLICATION}
|
||||
tabIndex={0}
|
||||
>
|
||||
<Button onClick={onClickHandler} to={ROUTES.APPLICATION} tabIndex={0}>
|
||||
Return To Services Page
|
||||
</Button>
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
|
||||
interface Props {
|
||||
text?: string;
|
||||
}
|
||||
|
||||
NotFound.defaultProps = {
|
||||
text: defaultText,
|
||||
};
|
||||
|
||||
export default NotFound;
|
||||
|
||||
@@ -48,9 +48,9 @@ function ReleaseNote({ path }: ReleaseNoteProps): JSX.Element | null {
|
||||
(state) => state.app,
|
||||
);
|
||||
|
||||
const c = allComponentMap.find((item) => {
|
||||
return item.match(path, currentVersion, userFlags);
|
||||
});
|
||||
const c = allComponentMap.find((item) =>
|
||||
item.match(path, currentVersion, userFlags),
|
||||
);
|
||||
|
||||
if (!c) {
|
||||
return null;
|
||||
|
||||
44
frontend/src/components/ResizeTable/ResizableHeader.tsx
Normal file
44
frontend/src/components/ResizeTable/ResizableHeader.tsx
Normal file
@@ -0,0 +1,44 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import { Resizable, ResizeCallbackData } from 'react-resizable';
|
||||
|
||||
import { enableUserSelectHack } from './config';
|
||||
import { SpanStyle } from './styles';
|
||||
|
||||
function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
const { onResize, width, ...restProps } = props;
|
||||
|
||||
const handle = useMemo(
|
||||
() => (
|
||||
<SpanStyle
|
||||
className="react-resizable-handle"
|
||||
onClick={(e): void => e.stopPropagation()}
|
||||
/>
|
||||
),
|
||||
[],
|
||||
);
|
||||
|
||||
if (!width) {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
return <th {...restProps} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Resizable
|
||||
width={width}
|
||||
height={0}
|
||||
handle={handle}
|
||||
onResize={onResize}
|
||||
draggableOpts={enableUserSelectHack}
|
||||
>
|
||||
{/* eslint-disable-next-line react/jsx-props-no-spreading */}
|
||||
<th {...restProps} />
|
||||
</Resizable>
|
||||
);
|
||||
}
|
||||
|
||||
interface ResizableHeaderProps {
|
||||
onResize: (e: React.SyntheticEvent<Element>, data: ResizeCallbackData) => void;
|
||||
width: number;
|
||||
}
|
||||
|
||||
export default ResizableHeader;
|
||||
51
frontend/src/components/ResizeTable/ResizeTable.tsx
Normal file
51
frontend/src/components/ResizeTable/ResizeTable.tsx
Normal file
@@ -0,0 +1,51 @@
|
||||
import { Table } from 'antd';
|
||||
import type { TableProps } from 'antd/es/table';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
import { ResizeCallbackData } from 'react-resizable';
|
||||
|
||||
import ResizableHeader from './ResizableHeader';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function ResizeTable({ columns, ...restprops }: TableProps<any>): JSX.Element {
|
||||
const [columnsData, setColumns] = useState<ColumnsType>(columns || []);
|
||||
|
||||
const handleResize = useCallback(
|
||||
(index: number) => (
|
||||
_e: React.SyntheticEvent<Element>,
|
||||
{ size }: ResizeCallbackData,
|
||||
): void => {
|
||||
const newColumns = [...columnsData];
|
||||
newColumns[index] = {
|
||||
...newColumns[index],
|
||||
width: size.width,
|
||||
};
|
||||
setColumns(newColumns);
|
||||
},
|
||||
[columnsData],
|
||||
);
|
||||
|
||||
const mergeColumns = useMemo(
|
||||
() =>
|
||||
columnsData.map((col, index) => ({
|
||||
...col,
|
||||
onHeaderCell: (column: ColumnsType<unknown>[number]): unknown => ({
|
||||
width: column.width,
|
||||
onResize: handleResize(index),
|
||||
}),
|
||||
})),
|
||||
[columnsData, handleResize],
|
||||
);
|
||||
|
||||
return (
|
||||
<Table
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...restprops}
|
||||
components={{ header: { cell: ResizableHeader } }}
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
columns={mergeColumns as ColumnsType<any>}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default ResizeTable;
|
||||
1
frontend/src/components/ResizeTable/config.ts
Normal file
1
frontend/src/components/ResizeTable/config.ts
Normal file
@@ -0,0 +1 @@
|
||||
export const enableUserSelectHack = { enableUserSelectHack: false };
|
||||
4
frontend/src/components/ResizeTable/index.ts
Normal file
4
frontend/src/components/ResizeTable/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import ResizableHeader from './ResizableHeader';
|
||||
import ResizeTable from './ResizeTable';
|
||||
|
||||
export { ResizableHeader, ResizeTable };
|
||||
11
frontend/src/components/ResizeTable/styles.ts
Normal file
11
frontend/src/components/ResizeTable/styles.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const SpanStyle = styled.span`
|
||||
position: absolute;
|
||||
right: -5px;
|
||||
bottom: 0;
|
||||
z-index: 1;
|
||||
width: 10px;
|
||||
height: 100%;
|
||||
cursor: col-resize;
|
||||
`;
|
||||
@@ -4,9 +4,9 @@ import React from 'react';
|
||||
|
||||
import { SpinerStyle } from './styles';
|
||||
|
||||
function Spinner({ size, tip, height }: SpinnerProps): JSX.Element {
|
||||
function Spinner({ size, tip, height, style }: SpinnerProps): JSX.Element {
|
||||
return (
|
||||
<SpinerStyle height={height}>
|
||||
<SpinerStyle height={height} style={style}>
|
||||
<Spin spinning size={size} tip={tip} indicator={<LoadingOutlined spin />} />
|
||||
</SpinerStyle>
|
||||
);
|
||||
@@ -16,11 +16,13 @@ interface SpinnerProps {
|
||||
size?: SpinProps['size'];
|
||||
tip?: SpinProps['tip'];
|
||||
height?: React.CSSProperties['height'];
|
||||
style?: React.CSSProperties;
|
||||
}
|
||||
Spinner.defaultProps = {
|
||||
size: undefined,
|
||||
tip: undefined,
|
||||
height: undefined,
|
||||
style: {},
|
||||
};
|
||||
|
||||
export default Spinner;
|
||||
|
||||
@@ -6,18 +6,16 @@ import React from 'react';
|
||||
function TextToolTip({ text, url }: TextToolTipProps): JSX.Element {
|
||||
return (
|
||||
<Tooltip
|
||||
overlay={(): JSX.Element => {
|
||||
return (
|
||||
<div>
|
||||
{`${text} `}
|
||||
{url && (
|
||||
<a href={url} rel="noopener noreferrer" target="_blank">
|
||||
here
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}}
|
||||
overlay={(): JSX.Element => (
|
||||
<div>
|
||||
{`${text} `}
|
||||
{url && (
|
||||
<a href={url} rel="noopener noreferrer" target="_blank">
|
||||
here
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
>
|
||||
<QuestionCircleFilled style={{ fontSize: '1.3125rem' }} />
|
||||
</Tooltip>
|
||||
|
||||
@@ -8,7 +8,6 @@ export const TextContainer = styled.div<TextContainerProps>`
|
||||
display: flex;
|
||||
|
||||
> button {
|
||||
margin-left: ${({ noButtonMargin }): string => {
|
||||
return noButtonMargin ? '0' : '0.5rem';
|
||||
}}
|
||||
margin-left: ${({ noButtonMargin }): string =>
|
||||
noButtonMargin ? '0' : '0.5rem'}
|
||||
`;
|
||||
|
||||
@@ -3,4 +3,7 @@ export enum LOCALSTORAGE {
|
||||
IS_LOGGED_IN = 'IS_LOGGED_IN',
|
||||
AUTH_TOKEN = 'AUTH_TOKEN',
|
||||
REFRESH_AUTH_TOKEN = 'REFRESH_AUTH_TOKEN',
|
||||
THEME = 'THEME',
|
||||
LOGS_VIEW_MODE = 'LOGS_VIEW_MODE',
|
||||
LOGS_LINES_PER_ROW = 'LOGS_LINES_PER_ROW',
|
||||
}
|
||||
|
||||
@@ -8,11 +8,11 @@ export const OperatorConversions: Array<{
|
||||
{
|
||||
label: 'IN',
|
||||
metricValue: '=~',
|
||||
traceValue: 'in',
|
||||
traceValue: 'In',
|
||||
},
|
||||
{
|
||||
label: 'Not IN',
|
||||
metricValue: '!~',
|
||||
traceValue: 'not in',
|
||||
traceValue: 'NotIn',
|
||||
},
|
||||
];
|
||||
|
||||
43
frontend/src/constants/theme.ts
Normal file
43
frontend/src/constants/theme.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
const themeColors = {
|
||||
chartcolors: {
|
||||
dodgerBlue: '#2F80ED',
|
||||
mediumOrchid: '#BB6BD9',
|
||||
seaBuckthorn: '#F2994A',
|
||||
seaGreen: '#219653',
|
||||
turquoiseBlue: '#56CCF2',
|
||||
festivalOrange: '#F2C94C',
|
||||
silver: '#BDBDBD',
|
||||
outrageousOrange: '#FF6633',
|
||||
roseBud: '#FFB399',
|
||||
magentaPink: '#FF33FF',
|
||||
canary: '#FFFF99',
|
||||
deepSkyBlue: '#00B3E6',
|
||||
goldTips: '#E6B333',
|
||||
royalBlue: '#3366E6',
|
||||
avocado: '#999966',
|
||||
mintGreen: '#99FF99',
|
||||
chestnut: '#B34D4D',
|
||||
lima: '#80B300',
|
||||
olive: '#809900',
|
||||
beautyBush: '#E6B3B3',
|
||||
danube: '#6680B3',
|
||||
oliveDrab: '#66991A',
|
||||
lavenderRose: '#FF99E6',
|
||||
electricLime: '#CCFF1A',
|
||||
radicalRed: '#FF1A66',
|
||||
harleyOrange: '#E6331A',
|
||||
turquoise: '#33FFCC',
|
||||
gladeGreen: '#66994D',
|
||||
hemlock: '#66664D',
|
||||
vidaLoca: '#4D8000',
|
||||
rust: '#B33300',
|
||||
},
|
||||
errorColor: '#d32f2f',
|
||||
royalGrey: '#888888',
|
||||
matterhornGrey: '#555555',
|
||||
whiteCream: '#ffffffd5',
|
||||
black: '#000000',
|
||||
lightgrey: '#ddd',
|
||||
};
|
||||
|
||||
export { themeColors };
|
||||
@@ -1,8 +1,10 @@
|
||||
/* eslint-disable react/display-name */
|
||||
import { Button, notification, Table } from 'antd';
|
||||
import { Button } from 'antd';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import React, { useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@@ -16,7 +18,7 @@ import Delete from './Delete';
|
||||
|
||||
function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
|
||||
const { t } = useTranslation(['channels']);
|
||||
const [notifications, Element] = notification.useNotification();
|
||||
const { notifications } = useNotifications();
|
||||
const [channels, setChannels] = useState<Channels[]>(allChannels);
|
||||
const { role } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
const [action] = useComponentPermission(['new_alert_action'], role);
|
||||
@@ -34,11 +36,13 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
|
||||
title: t('column_channel_name'),
|
||||
dataIndex: 'name',
|
||||
key: 'name',
|
||||
width: 100,
|
||||
},
|
||||
{
|
||||
title: t('column_channel_type'),
|
||||
dataIndex: 'type',
|
||||
key: 'type',
|
||||
width: 80,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -48,6 +52,7 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
|
||||
dataIndex: 'id',
|
||||
key: 'action',
|
||||
align: 'center',
|
||||
width: 80,
|
||||
render: (id: string): JSX.Element => (
|
||||
<>
|
||||
<Button onClick={(): void => onClickEditHandler(id)} type="link">
|
||||
@@ -59,13 +64,7 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{Element}
|
||||
|
||||
<Table rowKey="id" dataSource={channels} columns={columns} />
|
||||
</>
|
||||
);
|
||||
return <ResizeTable columns={columns} dataSource={channels} rowKey="id" />;
|
||||
}
|
||||
|
||||
interface AlertChannelsProps {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Button } from 'antd';
|
||||
import { NotificationInstance } from 'antd/lib/notification';
|
||||
import { NotificationInstance } from 'antd/es/notification/interface';
|
||||
import deleteChannel from 'api/channels/delete';
|
||||
import React, { useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
9
frontend/src/container/AllError/constant.ts
Normal file
9
frontend/src/container/AllError/constant.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
const DEFAULT_FILTER_VALUE = '';
|
||||
const EXCEPTION_TYPE_FILTER_NAME = 'exceptionType';
|
||||
const SERVICE_NAME_FILTER_NAME = 'serviceName';
|
||||
|
||||
export {
|
||||
DEFAULT_FILTER_VALUE,
|
||||
EXCEPTION_TYPE_FILTER_NAME,
|
||||
SERVICE_NAME_FILTER_NAME,
|
||||
};
|
||||
@@ -3,20 +3,22 @@ import {
|
||||
Button,
|
||||
Card,
|
||||
Input,
|
||||
notification,
|
||||
Space,
|
||||
Table,
|
||||
TableProps,
|
||||
Tooltip,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import { ColumnType, TablePaginationConfig } from 'antd/es/table';
|
||||
import { FilterValue, SorterResult } from 'antd/es/table/interface';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import { FilterConfirmProps } from 'antd/lib/table/interface';
|
||||
import getAll from 'api/errors/getAll';
|
||||
import getErrorCounts from 'api/errors/getErrorCounts';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import ROUTES from 'constants/routes';
|
||||
import dayjs from 'dayjs';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import history from 'lib/history';
|
||||
import React, { useCallback, useEffect, useMemo } from 'react';
|
||||
@@ -29,8 +31,13 @@ import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { Exception, PayloadProps } from 'types/api/errors/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { FilterDropdownExtendsProps } from './types';
|
||||
import {
|
||||
extractFilterValues,
|
||||
getDefaultFilterValue,
|
||||
getDefaultOrder,
|
||||
getFilterString,
|
||||
getFilterValues,
|
||||
getNanoSeconds,
|
||||
getOffSet,
|
||||
getOrder,
|
||||
@@ -43,15 +50,27 @@ function AllErrors(): JSX.Element {
|
||||
const { maxTime, minTime, loading } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const { search, pathname } = useLocation();
|
||||
const params = useMemo(() => new URLSearchParams(search), [search]);
|
||||
|
||||
const { pathname } = useLocation();
|
||||
const params = useUrlQuery();
|
||||
const { t } = useTranslation(['common']);
|
||||
|
||||
const updatedOrder = getOrder(params.get(urlKey.order));
|
||||
const getUpdatedOffset = getOffSet(params.get(urlKey.offset));
|
||||
const getUpdatedParams = getOrderParams(params.get(urlKey.orderParam));
|
||||
const getUpdatedPageSize = getUpdatePageSize(params.get(urlKey.pageSize));
|
||||
const {
|
||||
updatedOrder,
|
||||
getUpdatedOffset,
|
||||
getUpdatedParams,
|
||||
getUpdatedPageSize,
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedServiceName,
|
||||
} = useMemo(
|
||||
() => ({
|
||||
updatedOrder: getOrder(params.get(urlKey.order)),
|
||||
getUpdatedOffset: getOffSet(params.get(urlKey.offset)),
|
||||
getUpdatedParams: getOrderParams(params.get(urlKey.orderParam)),
|
||||
getUpdatedPageSize: getUpdatePageSize(params.get(urlKey.pageSize)),
|
||||
getUpdatedExceptionType: getFilterString(params.get(urlKey.exceptionType)),
|
||||
getUpdatedServiceName: getFilterString(params.get(urlKey.serviceName)),
|
||||
}),
|
||||
[params],
|
||||
);
|
||||
|
||||
const updatedPath = useMemo(
|
||||
() =>
|
||||
@@ -60,6 +79,8 @@ function AllErrors(): JSX.Element {
|
||||
offset: getUpdatedOffset,
|
||||
orderParam: getUpdatedParams,
|
||||
pageSize: getUpdatedPageSize,
|
||||
exceptionType: getUpdatedExceptionType,
|
||||
serviceName: getUpdatedServiceName,
|
||||
})}`,
|
||||
[
|
||||
pathname,
|
||||
@@ -67,6 +88,8 @@ function AllErrors(): JSX.Element {
|
||||
getUpdatedOffset,
|
||||
getUpdatedParams,
|
||||
getUpdatedPageSize,
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedServiceName,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -81,26 +104,38 @@ function AllErrors(): JSX.Element {
|
||||
limit: getUpdatedPageSize,
|
||||
offset: getUpdatedOffset,
|
||||
orderParam: getUpdatedParams,
|
||||
exceptionType: getUpdatedExceptionType,
|
||||
serviceName: getUpdatedServiceName,
|
||||
}),
|
||||
enabled: !loading,
|
||||
},
|
||||
{
|
||||
queryKey: ['getErrorCounts', maxTime, minTime],
|
||||
queryKey: [
|
||||
'getErrorCounts',
|
||||
maxTime,
|
||||
minTime,
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedServiceName,
|
||||
],
|
||||
queryFn: (): Promise<ErrorResponse | SuccessResponse<number>> =>
|
||||
getErrorCounts({
|
||||
end: maxTime,
|
||||
start: minTime,
|
||||
exceptionType: getUpdatedExceptionType,
|
||||
serviceName: getUpdatedServiceName,
|
||||
}),
|
||||
enabled: !loading,
|
||||
},
|
||||
]);
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.error) {
|
||||
notification.error({
|
||||
notifications.error({
|
||||
message: data.error || t('something_went_wrong'),
|
||||
});
|
||||
}
|
||||
}, [data?.error, data?.payload, t]);
|
||||
}, [data?.error, data?.payload, t, notifications]);
|
||||
|
||||
const getDateValue = (value: string): JSX.Element => (
|
||||
<Typography>{dayjs(value).format('DD/MM/YYYY HH:mm:ss A')}</Typography>
|
||||
@@ -108,43 +143,81 @@ function AllErrors(): JSX.Element {
|
||||
|
||||
const filterIcon = useCallback(() => <SearchOutlined />, []);
|
||||
|
||||
const handleSearch = (
|
||||
confirm: (param?: FilterConfirmProps) => void,
|
||||
): VoidFunction => (): void => {
|
||||
confirm();
|
||||
};
|
||||
|
||||
const filterDropdownWrapper = useCallback(
|
||||
({ setSelectedKeys, selectedKeys, confirm, placeholder }) => {
|
||||
return (
|
||||
<Card size="small">
|
||||
<Space align="start" direction="vertical">
|
||||
<Input
|
||||
placeholder={placeholder}
|
||||
value={selectedKeys[0]}
|
||||
onChange={(e): void =>
|
||||
setSelectedKeys(e.target.value ? [e.target.value] : [])
|
||||
}
|
||||
allowClear
|
||||
onPressEnter={handleSearch(confirm)}
|
||||
/>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handleSearch(confirm)}
|
||||
icon={<SearchOutlined />}
|
||||
size="small"
|
||||
>
|
||||
Search
|
||||
</Button>
|
||||
</Space>
|
||||
</Card>
|
||||
const handleSearch = useCallback(
|
||||
(
|
||||
confirm: (param?: FilterConfirmProps) => void,
|
||||
filterValue: string,
|
||||
filterKey: string,
|
||||
): VoidFunction => (): void => {
|
||||
const { exceptionFilterValue, serviceFilterValue } = getFilterValues(
|
||||
getUpdatedServiceName || '',
|
||||
getUpdatedExceptionType || '',
|
||||
filterKey,
|
||||
filterValue || '',
|
||||
);
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: getUpdatedOffset,
|
||||
orderParam: getUpdatedParams,
|
||||
pageSize: getUpdatedPageSize,
|
||||
exceptionType: exceptionFilterValue,
|
||||
serviceName: serviceFilterValue,
|
||||
})}`,
|
||||
);
|
||||
confirm();
|
||||
},
|
||||
[],
|
||||
[
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedOffset,
|
||||
getUpdatedPageSize,
|
||||
getUpdatedParams,
|
||||
getUpdatedServiceName,
|
||||
pathname,
|
||||
updatedOrder,
|
||||
],
|
||||
);
|
||||
|
||||
const onExceptionTypeFilter = useCallback(
|
||||
(value, record: Exception): boolean => {
|
||||
const filterDropdownWrapper = useCallback(
|
||||
({
|
||||
setSelectedKeys,
|
||||
selectedKeys,
|
||||
confirm,
|
||||
placeholder,
|
||||
filterKey,
|
||||
}: FilterDropdownExtendsProps) => (
|
||||
<Card size="small">
|
||||
<Space align="start" direction="vertical">
|
||||
<Input
|
||||
placeholder={placeholder}
|
||||
value={selectedKeys[0]}
|
||||
onChange={(e): void =>
|
||||
setSelectedKeys(e.target.value ? [e.target.value] : [])
|
||||
}
|
||||
allowClear
|
||||
defaultValue={getDefaultFilterValue(
|
||||
filterKey,
|
||||
getUpdatedServiceName,
|
||||
getUpdatedExceptionType,
|
||||
)}
|
||||
onPressEnter={handleSearch(confirm, String(selectedKeys[0]), filterKey)}
|
||||
/>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handleSearch(confirm, String(selectedKeys[0]), filterKey)}
|
||||
icon={<SearchOutlined />}
|
||||
size="small"
|
||||
>
|
||||
Search
|
||||
</Button>
|
||||
</Space>
|
||||
</Card>
|
||||
),
|
||||
[getUpdatedExceptionType, getUpdatedServiceName, handleSearch],
|
||||
);
|
||||
|
||||
const onExceptionTypeFilter: ColumnType<Exception>['onFilter'] = useCallback(
|
||||
(value: unknown, record: Exception): boolean => {
|
||||
if (record.exceptionType && typeof value === 'string') {
|
||||
return record.exceptionType.toLowerCase().includes(value.toLowerCase());
|
||||
}
|
||||
@@ -154,7 +227,7 @@ function AllErrors(): JSX.Element {
|
||||
);
|
||||
|
||||
const onApplicationTypeFilter = useCallback(
|
||||
(value, record: Exception): boolean => {
|
||||
(value: unknown, record: Exception): boolean => {
|
||||
if (record.serviceName && typeof value === 'string') {
|
||||
return record.serviceName.toLowerCase().includes(value.toLowerCase());
|
||||
}
|
||||
@@ -167,6 +240,7 @@ function AllErrors(): JSX.Element {
|
||||
(
|
||||
onFilter: ColumnType<Exception>['onFilter'],
|
||||
placeholder: string,
|
||||
filterKey: string,
|
||||
): ColumnType<Exception> => ({
|
||||
onFilter,
|
||||
filterIcon,
|
||||
@@ -176,6 +250,7 @@ function AllErrors(): JSX.Element {
|
||||
selectedKeys,
|
||||
confirm,
|
||||
placeholder,
|
||||
filterKey,
|
||||
}),
|
||||
}),
|
||||
[filterIcon, filterDropdownWrapper],
|
||||
@@ -184,9 +259,10 @@ function AllErrors(): JSX.Element {
|
||||
const columns: ColumnsType<Exception> = [
|
||||
{
|
||||
title: 'Exception Type',
|
||||
width: 100,
|
||||
dataIndex: 'exceptionType',
|
||||
key: 'exceptionType',
|
||||
...getFilter(onExceptionTypeFilter, 'Search By Exception'),
|
||||
...getFilter(onExceptionTypeFilter, 'Search By Exception', 'exceptionType'),
|
||||
render: (value, record): JSX.Element => (
|
||||
<Tooltip overlay={(): JSX.Element => value}>
|
||||
<Link
|
||||
@@ -209,6 +285,7 @@ function AllErrors(): JSX.Element {
|
||||
title: 'Error Message',
|
||||
dataIndex: 'exceptionMessage',
|
||||
key: 'exceptionMessage',
|
||||
width: 100,
|
||||
render: (value): JSX.Element => (
|
||||
<Tooltip overlay={(): JSX.Element => value}>
|
||||
<Typography.Paragraph
|
||||
@@ -223,6 +300,7 @@ function AllErrors(): JSX.Element {
|
||||
},
|
||||
{
|
||||
title: 'Count',
|
||||
width: 50,
|
||||
dataIndex: 'exceptionCount',
|
||||
key: 'exceptionCount',
|
||||
sorter: true,
|
||||
@@ -235,6 +313,7 @@ function AllErrors(): JSX.Element {
|
||||
{
|
||||
title: 'Last Seen',
|
||||
dataIndex: 'lastSeen',
|
||||
width: 80,
|
||||
key: 'lastSeen',
|
||||
render: getDateValue,
|
||||
sorter: true,
|
||||
@@ -247,6 +326,7 @@ function AllErrors(): JSX.Element {
|
||||
{
|
||||
title: 'First Seen',
|
||||
dataIndex: 'firstSeen',
|
||||
width: 80,
|
||||
key: 'firstSeen',
|
||||
render: getDateValue,
|
||||
sorter: true,
|
||||
@@ -259,6 +339,7 @@ function AllErrors(): JSX.Element {
|
||||
{
|
||||
title: 'Application',
|
||||
dataIndex: 'serviceName',
|
||||
width: 100,
|
||||
key: 'serviceName',
|
||||
sorter: true,
|
||||
defaultSortOrder: getDefaultOrder(
|
||||
@@ -266,36 +347,49 @@ function AllErrors(): JSX.Element {
|
||||
updatedOrder,
|
||||
'serviceName',
|
||||
),
|
||||
...getFilter(onApplicationTypeFilter, 'Search By Application'),
|
||||
...getFilter(
|
||||
onApplicationTypeFilter,
|
||||
'Search By Application',
|
||||
'serviceName',
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const onChangeHandler: TableProps<Exception>['onChange'] = (
|
||||
paginations,
|
||||
_,
|
||||
sorter,
|
||||
) => {
|
||||
if (!Array.isArray(sorter)) {
|
||||
const { pageSize = 0, current = 0 } = paginations;
|
||||
const { columnKey = '', order } = sorter;
|
||||
const updatedOrder = order === 'ascend' ? 'ascending' : 'descending';
|
||||
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: (current - 1) * pageSize,
|
||||
orderParam: columnKey,
|
||||
pageSize,
|
||||
})}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
const onChangeHandler: TableProps<Exception>['onChange'] = useCallback(
|
||||
(
|
||||
paginations: TablePaginationConfig,
|
||||
filters: Record<string, FilterValue | null>,
|
||||
sorter: SorterResult<Exception>[] | SorterResult<Exception>,
|
||||
) => {
|
||||
if (!Array.isArray(sorter)) {
|
||||
const { pageSize = 0, current = 0 } = paginations;
|
||||
const { columnKey = '', order } = sorter;
|
||||
const updatedOrder = order === 'ascend' ? 'ascending' : 'descending';
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
const { exceptionType, serviceName } = extractFilterValues(filters, {
|
||||
serviceName: getFilterString(params.get(urlKey.serviceName)),
|
||||
exceptionType: getFilterString(params.get(urlKey.exceptionType)),
|
||||
});
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: (current - 1) * pageSize,
|
||||
orderParam: columnKey,
|
||||
pageSize,
|
||||
exceptionType,
|
||||
serviceName,
|
||||
})}`,
|
||||
);
|
||||
}
|
||||
},
|
||||
[pathname],
|
||||
);
|
||||
|
||||
return (
|
||||
<Table
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={data?.payload as Exception[]}
|
||||
columns={columns}
|
||||
rowKey="firstSeen"
|
||||
loading={isLoading || false || errorCountResponse.status === 'loading'}
|
||||
pagination={{
|
||||
|
||||
9
frontend/src/container/AllError/types.ts
Normal file
9
frontend/src/container/AllError/types.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { FilterDropdownProps } from 'antd/es/table/interface';
|
||||
|
||||
export interface FilterDropdownExtendsProps {
|
||||
placeholder: string;
|
||||
filterKey: string;
|
||||
confirm: FilterDropdownProps['confirm'];
|
||||
setSelectedKeys: FilterDropdownProps['setSelectedKeys'];
|
||||
selectedKeys: FilterDropdownProps['selectedKeys'];
|
||||
}
|
||||
@@ -1,7 +1,13 @@
|
||||
import { SortOrder } from 'antd/lib/table/interface';
|
||||
import { FilterValue, SortOrder } from 'antd/lib/table/interface';
|
||||
import Timestamp from 'timestamp-nano';
|
||||
import { Order, OrderBy } from 'types/api/errors/getAll';
|
||||
|
||||
import {
|
||||
DEFAULT_FILTER_VALUE,
|
||||
EXCEPTION_TYPE_FILTER_NAME,
|
||||
SERVICE_NAME_FILTER_NAME,
|
||||
} from './constant';
|
||||
|
||||
export const isOrder = (order: string | null): order is Order =>
|
||||
!!(order === 'ascending' || order === 'descending');
|
||||
|
||||
@@ -10,17 +16,18 @@ export const urlKey = {
|
||||
offset: 'offset',
|
||||
orderParam: 'orderParam',
|
||||
pageSize: 'pageSize',
|
||||
exceptionType: 'exceptionType',
|
||||
serviceName: 'serviceName',
|
||||
};
|
||||
|
||||
export const isOrderParams = (orderBy: string | null): orderBy is OrderBy => {
|
||||
return !!(
|
||||
export const isOrderParams = (orderBy: string | null): orderBy is OrderBy =>
|
||||
!!(
|
||||
orderBy === 'serviceName' ||
|
||||
orderBy === 'exceptionCount' ||
|
||||
orderBy === 'lastSeen' ||
|
||||
orderBy === 'firstSeen' ||
|
||||
orderBy === 'exceptionType'
|
||||
);
|
||||
};
|
||||
|
||||
export const getOrder = (order: string | null): Order => {
|
||||
if (isOrder(order)) {
|
||||
@@ -74,12 +81,9 @@ export const getDefaultOrder = (
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export const getNanoSeconds = (date: string): string => {
|
||||
return (
|
||||
Math.floor(new Date(date).getTime() / 1e3).toString() +
|
||||
String(Timestamp.fromString(date).getNano().toString()).padStart(9, '0')
|
||||
);
|
||||
};
|
||||
export const getNanoSeconds = (date: string): string =>
|
||||
Math.floor(new Date(date).getTime() / 1e3).toString() +
|
||||
String(Timestamp.fromString(date).getNano().toString()).padStart(9, '0');
|
||||
|
||||
export const getUpdatePageSize = (pageSize: string | null): number => {
|
||||
if (pageSize) {
|
||||
@@ -87,3 +91,94 @@ export const getUpdatePageSize = (pageSize: string | null): number => {
|
||||
}
|
||||
return 10;
|
||||
};
|
||||
|
||||
export const getFilterString = (filter: string | null): string => {
|
||||
if (filter) {
|
||||
return filter;
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
export const getDefaultFilterValue = (
|
||||
filterKey: string | null,
|
||||
serviceName: string,
|
||||
exceptionType: string,
|
||||
): string | undefined => {
|
||||
let defaultValue: string | undefined;
|
||||
switch (filterKey) {
|
||||
case SERVICE_NAME_FILTER_NAME:
|
||||
defaultValue = serviceName;
|
||||
break;
|
||||
case EXCEPTION_TYPE_FILTER_NAME:
|
||||
defaultValue = exceptionType;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return defaultValue;
|
||||
};
|
||||
|
||||
export const getFilterValues = (
|
||||
serviceName: string,
|
||||
exceptionType: string,
|
||||
filterKey: string,
|
||||
filterValue: string,
|
||||
): { exceptionFilterValue: string; serviceFilterValue: string } => {
|
||||
let serviceFilterValue = serviceName;
|
||||
let exceptionFilterValue = exceptionType;
|
||||
switch (filterKey) {
|
||||
case EXCEPTION_TYPE_FILTER_NAME:
|
||||
exceptionFilterValue = filterValue;
|
||||
break;
|
||||
case SERVICE_NAME_FILTER_NAME:
|
||||
serviceFilterValue = filterValue;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return { exceptionFilterValue, serviceFilterValue };
|
||||
};
|
||||
|
||||
type FilterValues = { exceptionType: string; serviceName: string };
|
||||
|
||||
const extractSingleFilterValue = (
|
||||
filterName: string,
|
||||
filters: Filter,
|
||||
): string => {
|
||||
const filterValues = filters[filterName];
|
||||
|
||||
if (
|
||||
!filterValues ||
|
||||
!Array.isArray(filterValues) ||
|
||||
filterValues.length === 0
|
||||
) {
|
||||
return DEFAULT_FILTER_VALUE;
|
||||
}
|
||||
|
||||
return String(filterValues[0]);
|
||||
};
|
||||
|
||||
type Filter = Record<string, FilterValue | null>;
|
||||
|
||||
export const extractFilterValues = (
|
||||
filters: Filter,
|
||||
prefilledFilters: FilterValues,
|
||||
): FilterValues => {
|
||||
const filterValues: FilterValues = {
|
||||
exceptionType: prefilledFilters.exceptionType,
|
||||
serviceName: prefilledFilters.serviceName,
|
||||
};
|
||||
if (filters[EXCEPTION_TYPE_FILTER_NAME]) {
|
||||
filterValues.exceptionType = extractSingleFilterValue(
|
||||
EXCEPTION_TYPE_FILTER_NAME,
|
||||
filters,
|
||||
);
|
||||
}
|
||||
if (filters[SERVICE_NAME_FILTER_NAME]) {
|
||||
filterValues.serviceName = extractSingleFilterValue(
|
||||
SERVICE_NAME_FILTER_NAME,
|
||||
filters,
|
||||
);
|
||||
}
|
||||
return filterValues;
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user