Compare commits
142 Commits
feat/make-
...
v0.56.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b70d50f2b3 | ||
|
|
728f699051 | ||
|
|
3bbbc759d3 | ||
|
|
2230ca1740 | ||
|
|
440fd4e02b | ||
|
|
78a924d378 | ||
|
|
b03fadc2ec | ||
|
|
4b79d3b785 | ||
|
|
a24fb5d84f | ||
|
|
137059ded6 | ||
|
|
f1ce82ac25 | ||
|
|
4aeed392d7 | ||
|
|
4356ddae8c | ||
|
|
76e7de3aed | ||
|
|
ae5e63cc64 | ||
|
|
5ef05891ce | ||
|
|
c452e23b18 | ||
|
|
69aab87d72 | ||
|
|
a60674cf1b | ||
|
|
022b9226a7 | ||
|
|
36e2404814 | ||
|
|
2eb3f6cb06 | ||
|
|
98cbdf570f | ||
|
|
d380894c35 | ||
|
|
ea0263cc73 | ||
|
|
f38a1d9f1c | ||
|
|
9390a815a8 | ||
|
|
4f76e13dbe | ||
|
|
6a4643558c | ||
|
|
a98c8db949 | ||
|
|
5ba9c9d48c | ||
|
|
e1ca71dcea | ||
|
|
266ed58908 | ||
|
|
1411ae41c3 | ||
|
|
bc8891d2f8 | ||
|
|
3b7455ac4c | ||
|
|
5a0a7c2c60 | ||
|
|
794d6fc0ca | ||
|
|
4c95df44d5 | ||
|
|
717545e14c | ||
|
|
e4d1452f5f | ||
|
|
88ace79a64 | ||
|
|
9b42326f80 | ||
|
|
44a3469b9b | ||
|
|
ef4b70f67b | ||
|
|
7a125e31ec | ||
|
|
c7bd7566c5 | ||
|
|
f4fbe62169 | ||
|
|
6e3141a4ce | ||
|
|
fc8391c5aa | ||
|
|
87499d1ead | ||
|
|
5fa8686fcf | ||
|
|
dc2db524c7 | ||
|
|
b3545b767a | ||
|
|
55f653d92e | ||
|
|
35f8e133a9 | ||
|
|
58d6487f77 | ||
|
|
6685482ea6 | ||
|
|
708158f50f | ||
|
|
0feab5aa93 | ||
|
|
b49ed913c7 | ||
|
|
419d2da363 | ||
|
|
df2844ea74 | ||
|
|
5e5f0f167f | ||
|
|
a6b05f0a3d | ||
|
|
f69aaa2cfb | ||
|
|
3866f89d3e | ||
|
|
f9ac41b865 | ||
|
|
c5b5bfe540 | ||
|
|
f3c01a5155 | ||
|
|
033b64a62a | ||
|
|
4aabfe7cf5 | ||
|
|
0218f701b2 | ||
|
|
f6d3f95768 | ||
|
|
cb1cd3555b | ||
|
|
ced72f86a4 | ||
|
|
54d5666b92 | ||
|
|
4edc6dbeae | ||
|
|
e203276678 | ||
|
|
8eb2cf144e | ||
|
|
2f7d208eb5 | ||
|
|
70fb5af19f | ||
|
|
fc7a94fa66 | ||
|
|
0077714cb0 | ||
|
|
723c31f6c5 | ||
|
|
1024483e58 | ||
|
|
cbcef2c880 | ||
|
|
0711c8855e | ||
|
|
72cbc1a9e7 | ||
|
|
a9841755a7 | ||
|
|
03e6c33f82 | ||
|
|
3c5aa86ee2 | ||
|
|
06a89b21da | ||
|
|
8c891f0e87 | ||
|
|
49dd5f2ef7 | ||
|
|
83d01e7a0d | ||
|
|
f8e97c9c5c | ||
|
|
b78ade2cf2 | ||
|
|
1b59719891 | ||
|
|
540a2c6712 | ||
|
|
481c4e1271 | ||
|
|
fe0d2a967f | ||
|
|
e77a6f4d7a | ||
|
|
a023a7514e | ||
|
|
3573c0863c | ||
|
|
b444c1e6b1 | ||
|
|
5698628839 | ||
|
|
3596f73fb1 | ||
|
|
5b22490d6d | ||
|
|
39f9fc6900 | ||
|
|
f854cdd9d3 | ||
|
|
011b2167ba | ||
|
|
a5f3a189f8 | ||
|
|
3fdfb51e02 | ||
|
|
43577c7ead | ||
|
|
6661aa7686 | ||
|
|
8d54e3b766 | ||
|
|
6c446226eb | ||
|
|
90b5f88413 | ||
|
|
08f3b089f4 | ||
|
|
1d8e5b6c0f | ||
|
|
0dcded59e5 | ||
|
|
bfb63ca8c4 | ||
|
|
71e24483dd | ||
|
|
317c41a166 | ||
|
|
ed4613cb1b | ||
|
|
6c06fea1aa | ||
|
|
6bc2f9125c | ||
|
|
262beef8f9 | ||
|
|
43cc6dea92 | ||
|
|
6684640abe | ||
|
|
0a146910d6 | ||
|
|
690ed0f7f1 | ||
|
|
5bcf7de440 | ||
|
|
703983a5f9 | ||
|
|
766a2123c5 | ||
|
|
a476c68f7e | ||
|
|
fc15aa6f1c | ||
|
|
4192fd573d | ||
|
|
ca13d80205 | ||
|
|
8d84ce8f06 | ||
|
|
09ea7b9eb5 |
49
.github/ISSUE_TEMPLATE/request_dashboard.md
vendored
Normal file
49
.github/ISSUE_TEMPLATE/request_dashboard.md
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
name: Request Dashboard
|
||||
about: Request a new dashboard for the SigNoz Dashboards repository
|
||||
title: '[Dashboard Request] '
|
||||
labels: 'dashboard-template'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Use this template to request a new dashboard for the SigNoz Dashboards repository. Providing detailed information will help us understand your needs better and speed up the dashboard creation process. -->
|
||||
|
||||
## Dashboard Name
|
||||
|
||||
<!-- Provide the name for the requested dashboard. Be specific (e.g., "MySQL Monitoring Dashboard"). -->
|
||||
|
||||
## Expected Dashboard Sections and Panels
|
||||
|
||||
(Can be tweaked (add or remove panels/sections) according to available metrics)
|
||||
|
||||
### Section Name
|
||||
|
||||
<!-- Brief description of what this section should display (e.g., "Resource usage metrics for MySQL database"). -->
|
||||
|
||||
### Panel Name
|
||||
|
||||
<!-- Description of the panel (e.g., "Displays current CPU usage, memory usage, etc."). -->
|
||||
|
||||
<!-- - **Example:**
|
||||
- **Section**: Resource Metrics
|
||||
- **Panel**: CPU Usage - Displays the current CPU usage across all database instances.
|
||||
- **Panel**: Memory Usage - Displays the total memory used by the MySQL process. -->
|
||||
|
||||
<!-- Repeat this format for any additional sections or panels. -->
|
||||
|
||||
## Expected Dashboard Variables
|
||||
|
||||
<!-- List any dashboard variables that should be included in the dashboard. Examples could be `deployment.environment`, `hostname`, `region`, etc. -->
|
||||
|
||||
## Additional Comments or Requirements
|
||||
|
||||
<!-- Include any other details, special requirements, or specific visualizations you'd like to request for this dashboard. -->
|
||||
|
||||
## References or Screenshots
|
||||
|
||||
<!-- Add any references or screenshots of requested dashboard if available. -->
|
||||
|
||||
## 📋 Notes
|
||||
|
||||
Please review the [CONTRIBUTING.md](https://github.com/SigNoz/dashboards/blob/main/CONTRIBUTING.md) for guidelines on dashboard structure, naming conventions, and how to submit a pull request.
|
||||
7
.github/workflows/build.yaml
vendored
7
.github/workflows/build.yaml
vendored
@@ -11,9 +11,9 @@ jobs:
|
||||
check-no-ee-references:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run check
|
||||
run: make check-no-ee-references
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run check
|
||||
run: make check-no-ee-references
|
||||
|
||||
build-frontend:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -43,7 +43,6 @@ jobs:
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'CLARITY_PROJECT_ID="${{ secrets.CLARITY_PROJECT_ID }}"' >> frontend/.env
|
||||
- name: Install dependencies
|
||||
run: cd frontend && yarn install
|
||||
- name: Run ESLint
|
||||
|
||||
2
.github/workflows/push.yaml
vendored
2
.github/workflows/push.yaml
vendored
@@ -9,7 +9,6 @@ on:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
|
||||
image-build-and-push-query-service:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -151,7 +150,6 @@ jobs:
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'CLARITY_PROJECT_ID="${{ secrets.CLARITY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
|
||||
@@ -30,6 +30,7 @@ Also, have a look at these [good first issues label](https://github.com/SigNoz/s
|
||||
- [To run ClickHouse setup](#41-to-run-clickhouse-setup-recommended-for-local-development)
|
||||
- [Contribute to SigNoz Helm Chart](#5-contribute-to-signoz-helm-chart-)
|
||||
- [To run helm chart for local development](#51-to-run-helm-chart-for-local-development)
|
||||
- [Contribute to Dashboards](#6-contribute-to-dashboards-)
|
||||
- [Other Ways to Contribute](#other-ways-to-contribute)
|
||||
|
||||
# 1. General Instructions 📝
|
||||
@@ -37,7 +38,7 @@ Also, have a look at these [good first issues label](https://github.com/SigNoz/s
|
||||
## 1.1 For Creating Issue(s)
|
||||
Before making any significant changes and before filing a new issue, please check [existing open](https://github.com/SigNoz/signoz/issues?q=is%3Aopen+is%3Aissue), or [recently closed](https://github.com/SigNoz/signoz/issues?q=is%3Aissue+is%3Aclosed) issues to make sure somebody else hasn't already reported the issue. Please try to include as much information as you can.
|
||||
|
||||
**Issue Types** - [Bug Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=) | [Feature Request](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=) | [Performance Issue Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=performance-issue-report.md&title=) | [Report a Security Vulnerability](https://github.com/SigNoz/signoz/security/policy)
|
||||
**Issue Types** - [Bug Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=) | [Feature Request](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=) | [Performance Issue Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=performance-issue-report.md&title=) | [Request Dashboard](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=dashboard-template&projects=&template=request_dashboard.md&title=%5BDashboard+Request%5D+) | [Report a Security Vulnerability](https://github.com/SigNoz/signoz/security/policy)
|
||||
|
||||
#### Details like these are incredibly useful:
|
||||
|
||||
@@ -56,7 +57,7 @@ Before making any significant changes and before filing a new issue, please chec
|
||||
Discussing your proposed changes ahead of time will make the contribution
|
||||
process smooth for everyone 🙌.
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -97,13 +98,14 @@ GitHub provides additional document on [forking a repository](https://help.githu
|
||||
stability and quality of the component.
|
||||
|
||||
|
||||
You can always reach out to `ankit@signoz.io` to understand more about the repo and product. We are very responsive over email and [SLACK](https://signoz.io/slack).
|
||||
You can always reach out to `ankit@signoz.io` to understand more about the repo and product. We are very responsive over email and [slack community](https://signoz.io/slack).
|
||||
|
||||
### Pointers:
|
||||
- If you find any **bugs** → please create an [**issue.**](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=)
|
||||
- If you find anything **missing** in documentation → you can create an issue with the label **`documentation`**.
|
||||
- If you want to build any **new feature** → please create an [issue with the label **`enhancement`**.](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=)
|
||||
- If you want to **discuss** something about the product, start a new [**discussion**.](https://github.com/SigNoz/signoz/discussions)
|
||||
- If you want to request a new **dashboard template** → please create an issue [here](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=dashboard-template&projects=&template=request_dashboard.md&title=%5BDashboard+Request%5D+).
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -117,7 +119,7 @@ e.g. If you are submitting a fix for an issue in frontend, the PR name should be
|
||||
|
||||
- Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :)
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -127,14 +129,13 @@ e.g. If you are submitting a fix for an issue in frontend, the PR name should be
|
||||
|
||||
- [**Frontend**](#3-develop-frontend-) (Written in Typescript, React)
|
||||
- [**Backend**](#4-contribute-to-backend-query-service-) (Query Service, written in Go)
|
||||
- [**Dashboard Templates**](#6-contribute-to-dashboards-) (JSON dashboard templates built with SigNoz)
|
||||
|
||||
Depending upon your area of expertise & interest, you can choose one or more to contribute. Below are detailed instructions to contribute in each area.
|
||||
|
||||
**Please note:** If you want to work on an issue, please ask the maintainers to assign the issue to you before starting work on it. This would help us understand who is working on an issue and prevent duplicate work. 🙏🏻
|
||||
**Please note:** If you want to work on an issue, please add a brief description of your solution on the issue before starting work on it.
|
||||
|
||||
⚠️ If you just raise a PR, without the corresponding issue being assigned to you - it may not be accepted.
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -188,7 +189,7 @@ Also, have a look at [Frontend README.md](https://github.com/SigNoz/signoz/blob/
|
||||
### Important Notes:
|
||||
The Maintainers / Contributors who will change Line Numbers of `Frontend` & `Query-Section`, please update line numbers in [`/.scripts/commentLinesForSetup.sh`](https://github.com/SigNoz/signoz/blob/develop/.scripts/commentLinesForSetup.sh)
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
## 3.2 Contribute to Frontend without installing SigNoz backend
|
||||
|
||||
@@ -209,7 +210,7 @@ Please ping us in the [`#contributing`](https://signoz-community.slack.com/archi
|
||||
|
||||
**Frontend should now be accessible at** [`http://localhost:3301/services`](http://localhost:3301/services)
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -309,7 +310,7 @@ Click the button below. A workspace with all required environments will be creat
|
||||
|
||||
> To use it on your forked repo, edit the 'Open in Gitpod' button URL to `https://gitpod.io/#https://github.com/<your-github-username>/signoz` -->
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -365,10 +366,21 @@ curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-
|
||||
| HOTROD_NAMESPACE=sample-application bash
|
||||
```
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
---
|
||||
|
||||
# 6. Contribute to Dashboards 📈
|
||||
|
||||
**Need to Update: [https://github.com/SigNoz/dashboards](https://github.com/SigNoz/dashboards)**
|
||||
|
||||
To contribute a new dashboard template for any service, follow the contribution guidelines in the [Dashboard Contributing Guide](https://github.com/SigNoz/dashboards/blob/main/CONTRIBUTING.md). In brief:
|
||||
|
||||
1. Create a dashboard JSON file.
|
||||
2. Add a README file explaining the dashboard, the metrics ingested, and the configurations needed.
|
||||
3. Include screenshots of the dashboard in the `assets/` directory.
|
||||
4. Submit a pull request for review.
|
||||
|
||||
## Other Ways to Contribute
|
||||
|
||||
There are many other ways to get involved with the community and to participate in this project:
|
||||
@@ -379,7 +391,6 @@ There are many other ways to get involved with the community and to participate
|
||||
- Help answer questions on forums such as Stack Overflow and [SigNoz Community Slack Channel](https://signoz.io/slack).
|
||||
- Tell others about the project on Twitter, your blog, etc.
|
||||
|
||||
|
||||
Again, Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :)
|
||||
|
||||
Thank You!
|
||||
|
||||
@@ -133,7 +133,7 @@ services:
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:0.23.5
|
||||
image: signoz/alertmanager:0.23.7
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
command:
|
||||
@@ -146,11 +146,11 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.49.1
|
||||
image: signoz/query-service:0.56.0
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
# "--prefer-delta=true"
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -186,7 +186,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.48.0
|
||||
image: signoz/frontend:0.56.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -199,7 +199,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:0.102.12
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
@@ -238,7 +238,7 @@ services:
|
||||
- query-service
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:0.102.2
|
||||
image: signoz/signoz-schema-migrator:0.102.12
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -144,6 +144,7 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
extensions:
|
||||
health_check:
|
||||
endpoint: 0.0.0.0:13133
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
version: "2.4"
|
||||
|
||||
include:
|
||||
- test-app-docker-compose.yaml
|
||||
|
||||
services:
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
@@ -54,7 +57,7 @@ services:
|
||||
|
||||
alertmanager:
|
||||
container_name: signoz-alertmanager
|
||||
image: signoz/alertmanager:0.23.5
|
||||
image: signoz/alertmanager:0.23.7
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
@@ -66,7 +69,7 @@ services:
|
||||
- --storage.path=/data
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.12}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -81,7 +84,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: signoz-otel-collector
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:0.102.12
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
@@ -128,29 +131,3 @@ services:
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../common/locust-scripts:/locust
|
||||
|
||||
@@ -25,7 +25,7 @@ services:
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
# "--prefer-delta=true"
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
ports:
|
||||
- "6060:6060"
|
||||
|
||||
279
deploy/docker/clickhouse-setup/docker-compose-minimal.yaml
Normal file
279
deploy/docker/clickhouse-setup/docker-compose-minimal.yaml
Normal file
@@ -0,0 +1,279 @@
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
restart: on-failure
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
depends_on:
|
||||
- zookeeper-1
|
||||
# - zookeeper-2
|
||||
# - zookeeper-3
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-db-depend: &db-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator:
|
||||
condition: service_completed_successfully
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
services:
|
||||
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: signoz-zookeeper-1
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
- "2181:2181"
|
||||
- "2888:2888"
|
||||
- "3888:3888"
|
||||
volumes:
|
||||
- ./data/zookeeper-1:/bitnami/zookeeper
|
||||
environment:
|
||||
- ZOO_SERVER_ID=1
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-2
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2182:2181"
|
||||
# - "2889:2888"
|
||||
# - "3889:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-2:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=2
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,0.0.0.0:2888:3888,zookeeper-3:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-3
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2183:2181"
|
||||
# - "2890:2888"
|
||||
# - "3890:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-3:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=3
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
clickhouse:
|
||||
<<: *clickhouse-defaults
|
||||
container_name: signoz-clickhouse
|
||||
hostname: clickhouse
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "8123:8123"
|
||||
- "9181:9181"
|
||||
volumes:
|
||||
- ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
- ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
- ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-2
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
# - "8124:8123"
|
||||
# - "9182:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-3
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
# - "8125:8123"
|
||||
# - "9183:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.7}
|
||||
container_name: signoz-alertmanager
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
command:
|
||||
- --queryService.url=http://query-service:8085
|
||||
- --storage.path=/data
|
||||
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
volumes:
|
||||
- ./prometheus.yml:/root/config/prometheus.yml
|
||||
- ../dashboards:/root/config/dashboards
|
||||
- ./data/signoz/:/var/lib/signoz/
|
||||
environment:
|
||||
- ClickHouseUrl=tcp://clickhouse:9000
|
||||
- ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/
|
||||
- SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db
|
||||
- DASHBOARDS_PATH=/root/config/dashboards
|
||||
- STORAGE=clickhouse
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
- alertmanager
|
||||
- query-service
|
||||
ports:
|
||||
- "3301:3301"
|
||||
volumes:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.12}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.12}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--manager-config=/etc/manager-config.yaml",
|
||||
"--copy-path=/var/tmp/collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
- /var/lib/docker/containers:/var/lib/docker/containers:ro
|
||||
- /:/hostfs:ro
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- DOCKER_MULTI_NODE_CLUSTER=false
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
# - "13133:13133" # health check extension
|
||||
# - "14250:14250" # Jaeger gRPC
|
||||
# - "14268:14268" # Jaeger thrift HTTP
|
||||
# - "55678:55678" # OpenCensus receiver
|
||||
# - "55679:55679" # zPages extension
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator:
|
||||
condition: service_completed_successfully
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
@@ -1,5 +1,8 @@
|
||||
version: "2.4"
|
||||
|
||||
include:
|
||||
- test-app-docker-compose.yaml
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
restart: on-failure
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
@@ -149,7 +152,7 @@ services:
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.5}
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.7}
|
||||
container_name: signoz-alertmanager
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
@@ -164,13 +167,13 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
"-gateway-url=https://api.staging.signoz.cloud"
|
||||
# "--prefer-delta=true"
|
||||
"-gateway-url=https://api.staging.signoz.cloud",
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -204,7 +207,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -216,7 +219,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.12}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -230,7 +233,7 @@ services:
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.12}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
@@ -280,29 +283,3 @@ services:
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../common/locust-scripts:/locust
|
||||
|
||||
@@ -1,307 +1,3 @@
|
||||
version: "2.4"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
restart: on-failure
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
depends_on:
|
||||
- zookeeper-1
|
||||
# - zookeeper-2
|
||||
# - zookeeper-3
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-db-depend: &db-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator:
|
||||
condition: service_completed_successfully
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
services:
|
||||
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: signoz-zookeeper-1
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
- "2181:2181"
|
||||
- "2888:2888"
|
||||
- "3888:3888"
|
||||
volumes:
|
||||
- ./data/zookeeper-1:/bitnami/zookeeper
|
||||
environment:
|
||||
- ZOO_SERVER_ID=1
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-2
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2182:2181"
|
||||
# - "2889:2888"
|
||||
# - "3889:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-2:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=2
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,0.0.0.0:2888:3888,zookeeper-3:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-3
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2183:2181"
|
||||
# - "2890:2888"
|
||||
# - "3890:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-3:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=3
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
clickhouse:
|
||||
<<: *clickhouse-defaults
|
||||
container_name: signoz-clickhouse
|
||||
hostname: clickhouse
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "8123:8123"
|
||||
- "9181:9181"
|
||||
volumes:
|
||||
- ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
- ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
- ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-2
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
# - "8124:8123"
|
||||
# - "9182:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-3
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
# - "8125:8123"
|
||||
# - "9183:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.5}
|
||||
container_name: signoz-alertmanager
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
command:
|
||||
- --queryService.url=http://query-service:8085
|
||||
- --storage.path=/data
|
||||
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml"
|
||||
# "--prefer-delta=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
volumes:
|
||||
- ./prometheus.yml:/root/config/prometheus.yml
|
||||
- ../dashboards:/root/config/dashboards
|
||||
- ./data/signoz/:/var/lib/signoz/
|
||||
environment:
|
||||
- ClickHouseUrl=tcp://clickhouse:9000
|
||||
- ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/
|
||||
- SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db
|
||||
- DASHBOARDS_PATH=/root/config/dashboards
|
||||
- STORAGE=clickhouse
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
- alertmanager
|
||||
- query-service
|
||||
ports:
|
||||
- "3301:3301"
|
||||
volumes:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--manager-config=/etc/manager-config.yaml",
|
||||
"--copy-path=/var/tmp/collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
- /var/lib/docker/containers:/var/lib/docker/containers:ro
|
||||
- /:/hostfs:ro
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- DOCKER_MULTI_NODE_CLUSTER=false
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
# - "13133:13133" # health check extension
|
||||
# - "14250:14250" # Jaeger gRPC
|
||||
# - "14268:14268" # Jaeger thrift HTTP
|
||||
# - "55678:55678" # OpenCensus receiver
|
||||
# - "55679:55679" # zPages extension
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator:
|
||||
condition: service_completed_successfully
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../common/locust-scripts:/locust
|
||||
include:
|
||||
- test-app-docker-compose.yaml
|
||||
- docker-compose-minimal.yaml
|
||||
|
||||
@@ -154,6 +154,7 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
# logging: {}
|
||||
|
||||
service:
|
||||
|
||||
26
deploy/docker/clickhouse-setup/test-app-docker-compose.yaml
Normal file
26
deploy/docker/clickhouse-setup/test-app-docker-compose.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
services:
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../common/locust-scripts:/locust
|
||||
44
ee/query-service/anomaly/daily.go
Normal file
44
ee/query-service/anomaly/daily.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
)
|
||||
|
||||
type DailyProvider struct {
|
||||
BaseSeasonalProvider
|
||||
}
|
||||
|
||||
var _ BaseProvider = (*DailyProvider)(nil)
|
||||
|
||||
func (dp *DailyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
|
||||
return &dp.BaseSeasonalProvider
|
||||
}
|
||||
|
||||
// NewDailyProvider uses the same generic option type
|
||||
func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvider {
|
||||
dp := &DailyProvider{
|
||||
BaseSeasonalProvider: BaseSeasonalProvider{},
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(dp)
|
||||
}
|
||||
|
||||
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
FeatureLookup: dp.ff,
|
||||
})
|
||||
|
||||
return dp
|
||||
}
|
||||
|
||||
func (p *DailyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
req.Seasonality = SeasonalityDaily
|
||||
return p.getAnomalies(ctx, req)
|
||||
}
|
||||
44
ee/query-service/anomaly/hourly.go
Normal file
44
ee/query-service/anomaly/hourly.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
)
|
||||
|
||||
type HourlyProvider struct {
|
||||
BaseSeasonalProvider
|
||||
}
|
||||
|
||||
var _ BaseProvider = (*HourlyProvider)(nil)
|
||||
|
||||
func (hp *HourlyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
|
||||
return &hp.BaseSeasonalProvider
|
||||
}
|
||||
|
||||
// NewHourlyProvider now uses the generic option type
|
||||
func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyProvider {
|
||||
hp := &HourlyProvider{
|
||||
BaseSeasonalProvider: BaseSeasonalProvider{},
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(hp)
|
||||
}
|
||||
|
||||
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
FeatureLookup: hp.ff,
|
||||
})
|
||||
|
||||
return hp
|
||||
}
|
||||
|
||||
func (p *HourlyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
req.Seasonality = SeasonalityHourly
|
||||
return p.getAnomalies(ctx, req)
|
||||
}
|
||||
248
ee/query-service/anomaly/params.go
Normal file
248
ee/query-service/anomaly/params.go
Normal file
@@ -0,0 +1,248 @@
|
||||
package anomaly
|
||||
|
||||
import (
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
type Seasonality string
|
||||
|
||||
const (
|
||||
SeasonalityHourly Seasonality = "hourly"
|
||||
SeasonalityDaily Seasonality = "daily"
|
||||
SeasonalityWeekly Seasonality = "weekly"
|
||||
)
|
||||
|
||||
func (s Seasonality) String() string {
|
||||
return string(s)
|
||||
}
|
||||
|
||||
var (
|
||||
oneWeekOffset = 24 * 7 * time.Hour.Milliseconds()
|
||||
oneDayOffset = 24 * time.Hour.Milliseconds()
|
||||
oneHourOffset = time.Hour.Milliseconds()
|
||||
fiveMinOffset = 5 * time.Minute.Milliseconds()
|
||||
)
|
||||
|
||||
func (s Seasonality) IsValid() bool {
|
||||
switch s {
|
||||
case SeasonalityHourly, SeasonalityDaily, SeasonalityWeekly:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
type GetAnomaliesRequest struct {
|
||||
Params *v3.QueryRangeParamsV3
|
||||
Seasonality Seasonality
|
||||
}
|
||||
|
||||
type GetAnomaliesResponse struct {
|
||||
Results []*v3.Result
|
||||
}
|
||||
|
||||
// anomalyParams is the params for anomaly detection
|
||||
// prediction = avg(past_period_query) + avg(current_season_query) - mean(past_season_query, past2_season_query, past3_season_query)
|
||||
//
|
||||
// ^ ^
|
||||
// | |
|
||||
// (rounded value for past peiod) + (seasonal growth)
|
||||
//
|
||||
// score = abs(value - prediction) / stddev (current_season_query)
|
||||
type anomalyQueryParams struct {
|
||||
// CurrentPeriodQuery is the query range params for period user is looking at or eval window
|
||||
// Example: (now-5m, now), (now-30m, now), (now-1h, now)
|
||||
// The results obtained from this query are used to compare with predicted values
|
||||
// and to detect anomalies
|
||||
CurrentPeriodQuery *v3.QueryRangeParamsV3
|
||||
// PastPeriodQuery is the query range params for past seasonal period
|
||||
// Example: For weekly seasonality, (now-1w-5m, now-1w)
|
||||
// : For daily seasonality, (now-1d-5m, now-1d)
|
||||
// : For hourly seasonality, (now-1h-5m, now-1h)
|
||||
PastPeriodQuery *v3.QueryRangeParamsV3
|
||||
// CurrentSeasonQuery is the query range params for current period (seasonal)
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-1w-5m, now)
|
||||
// : For daily seasonality, this is the query range params for the (now-1d-5m, now)
|
||||
// : For hourly seasonality, this is the query range params for the (now-1h-5m, now)
|
||||
CurrentSeasonQuery *v3.QueryRangeParamsV3
|
||||
// PastSeasonQuery is the query range params for past seasonal period to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-2w-5m, now-1w)
|
||||
// : For daily seasonality, this is the query range params for the (now-2d-5m, now-1d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-2h-5m, now-1h)
|
||||
PastSeasonQuery *v3.QueryRangeParamsV3
|
||||
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal period to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-3w-5m, now-2w)
|
||||
// : For daily seasonality, this is the query range params for the (now-3d-5m, now-2d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-3h-5m, now-2h)
|
||||
Past2SeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal period to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-4w-5m, now-3w)
|
||||
// : For daily seasonality, this is the query range params for the (now-4d-5m, now-3d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-4h-5m, now-3h)
|
||||
Past3SeasonQuery *v3.QueryRangeParamsV3
|
||||
}
|
||||
|
||||
func updateStepInterval(req *v3.QueryRangeParamsV3) {
|
||||
start := req.Start
|
||||
end := req.End
|
||||
|
||||
req.Step = int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60))
|
||||
for _, q := range req.CompositeQuery.BuilderQueries {
|
||||
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
|
||||
if minStep := common.MinAllowedStepInterval(start, end); q.StepInterval < minStep {
|
||||
q.StepInterval = minStep
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonality) *anomalyQueryParams {
|
||||
start := req.Start
|
||||
end := req.End
|
||||
|
||||
currentPeriodQuery := &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(currentPeriodQuery)
|
||||
|
||||
var pastPeriodStart, pastPeriodEnd int64
|
||||
|
||||
switch seasonality {
|
||||
// for one week period, we fetch the data from the past week with 5 min offset
|
||||
case SeasonalityWeekly:
|
||||
pastPeriodStart = start - oneWeekOffset - fiveMinOffset
|
||||
pastPeriodEnd = end - oneWeekOffset
|
||||
// for one day period, we fetch the data from the past day with 5 min offset
|
||||
case SeasonalityDaily:
|
||||
pastPeriodStart = start - oneDayOffset - fiveMinOffset
|
||||
pastPeriodEnd = end - oneDayOffset
|
||||
// for one hour period, we fetch the data from the past hour with 5 min offset
|
||||
case SeasonalityHourly:
|
||||
pastPeriodStart = start - oneHourOffset - fiveMinOffset
|
||||
pastPeriodEnd = end - oneHourOffset
|
||||
}
|
||||
|
||||
pastPeriodQuery := &v3.QueryRangeParamsV3{
|
||||
Start: pastPeriodStart,
|
||||
End: pastPeriodEnd,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(pastPeriodQuery)
|
||||
|
||||
// seasonality growth trend
|
||||
var currentGrowthPeriodStart, currentGrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
currentGrowthPeriodStart = start - oneWeekOffset
|
||||
currentGrowthPeriodEnd = end
|
||||
case SeasonalityDaily:
|
||||
currentGrowthPeriodStart = start - oneDayOffset
|
||||
currentGrowthPeriodEnd = end
|
||||
case SeasonalityHourly:
|
||||
currentGrowthPeriodStart = start - oneHourOffset
|
||||
currentGrowthPeriodEnd = end
|
||||
}
|
||||
|
||||
currentGrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: currentGrowthPeriodStart,
|
||||
End: currentGrowthPeriodEnd,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(currentGrowthQuery)
|
||||
|
||||
var pastGrowthPeriodStart, pastGrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
pastGrowthPeriodStart = start - 2*oneWeekOffset
|
||||
pastGrowthPeriodEnd = start - 1*oneWeekOffset
|
||||
case SeasonalityDaily:
|
||||
pastGrowthPeriodStart = start - 2*oneDayOffset
|
||||
pastGrowthPeriodEnd = start - 1*oneDayOffset
|
||||
case SeasonalityHourly:
|
||||
pastGrowthPeriodStart = start - 2*oneHourOffset
|
||||
pastGrowthPeriodEnd = start - 1*oneHourOffset
|
||||
}
|
||||
|
||||
pastGrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: pastGrowthPeriodStart,
|
||||
End: pastGrowthPeriodEnd,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(pastGrowthQuery)
|
||||
|
||||
var past2GrowthPeriodStart, past2GrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
past2GrowthPeriodStart = start - 3*oneWeekOffset
|
||||
past2GrowthPeriodEnd = start - 2*oneWeekOffset
|
||||
case SeasonalityDaily:
|
||||
past2GrowthPeriodStart = start - 3*oneDayOffset
|
||||
past2GrowthPeriodEnd = start - 2*oneDayOffset
|
||||
case SeasonalityHourly:
|
||||
past2GrowthPeriodStart = start - 3*oneHourOffset
|
||||
past2GrowthPeriodEnd = start - 2*oneHourOffset
|
||||
}
|
||||
|
||||
past2GrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: past2GrowthPeriodStart,
|
||||
End: past2GrowthPeriodEnd,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(past2GrowthQuery)
|
||||
|
||||
var past3GrowthPeriodStart, past3GrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
past3GrowthPeriodStart = start - 4*oneWeekOffset
|
||||
past3GrowthPeriodEnd = start - 3*oneWeekOffset
|
||||
case SeasonalityDaily:
|
||||
past3GrowthPeriodStart = start - 4*oneDayOffset
|
||||
past3GrowthPeriodEnd = start - 3*oneDayOffset
|
||||
case SeasonalityHourly:
|
||||
past3GrowthPeriodStart = start - 4*oneHourOffset
|
||||
past3GrowthPeriodEnd = start - 3*oneHourOffset
|
||||
}
|
||||
|
||||
past3GrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: past3GrowthPeriodStart,
|
||||
End: past3GrowthPeriodEnd,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(past3GrowthQuery)
|
||||
|
||||
return &anomalyQueryParams{
|
||||
CurrentPeriodQuery: currentPeriodQuery,
|
||||
PastPeriodQuery: pastPeriodQuery,
|
||||
CurrentSeasonQuery: currentGrowthQuery,
|
||||
PastSeasonQuery: pastGrowthQuery,
|
||||
Past2SeasonQuery: past2GrowthQuery,
|
||||
Past3SeasonQuery: past3GrowthQuery,
|
||||
}
|
||||
}
|
||||
|
||||
type anomalyQueryResults struct {
|
||||
CurrentPeriodResults []*v3.Result
|
||||
PastPeriodResults []*v3.Result
|
||||
CurrentSeasonResults []*v3.Result
|
||||
PastSeasonResults []*v3.Result
|
||||
Past2SeasonResults []*v3.Result
|
||||
Past3SeasonResults []*v3.Result
|
||||
}
|
||||
9
ee/query-service/anomaly/provider.go
Normal file
9
ee/query-service/anomaly/provider.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
type Provider interface {
|
||||
GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
|
||||
}
|
||||
466
ee/query-service/anomaly/seasonal.go
Normal file
466
ee/query-service/anomaly/seasonal.go
Normal file
@@ -0,0 +1,466 @@
|
||||
package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
// TODO(srikanthccv): make this configurable?
|
||||
movingAvgWindowSize = 7
|
||||
)
|
||||
|
||||
// BaseProvider is an interface that includes common methods for all provider types
|
||||
type BaseProvider interface {
|
||||
GetBaseSeasonalProvider() *BaseSeasonalProvider
|
||||
}
|
||||
|
||||
// GenericProviderOption is a generic type for provider options
|
||||
type GenericProviderOption[T BaseProvider] func(T)
|
||||
|
||||
func WithCache[T BaseProvider](cache cache.Cache) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().cache = cache
|
||||
}
|
||||
}
|
||||
|
||||
func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().keyGenerator = keyGenerator
|
||||
}
|
||||
}
|
||||
|
||||
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().ff = ff
|
||||
}
|
||||
}
|
||||
|
||||
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().reader = reader
|
||||
}
|
||||
}
|
||||
|
||||
type BaseSeasonalProvider struct {
|
||||
querierV2 interfaces.Querier
|
||||
reader interfaces.Reader
|
||||
fluxInterval time.Duration
|
||||
cache cache.Cache
|
||||
keyGenerator cache.KeyGenerator
|
||||
ff interfaces.FeatureLookup
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
||||
if !req.Seasonality.IsValid() {
|
||||
req.Seasonality = SeasonalityDaily
|
||||
}
|
||||
return prepareAnomalyQueryParams(req.Params, req.Seasonality)
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQueryParams) (*anomalyQueryResults, error) {
|
||||
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentPeriodResults, err = postprocess.PostProcessResult(currentPeriodResults, params.CurrentPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pastPeriodResults, err = postprocess.PostProcessResult(pastPeriodResults, params.PastPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentSeasonResults, err = postprocess.PostProcessResult(currentSeasonResults, params.CurrentSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pastSeasonResults, err = postprocess.PostProcessResult(pastSeasonResults, params.PastSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
|
||||
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past2SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
past2SeasonResults, err = postprocess.PostProcessResult(past2SeasonResults, params.Past2SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
|
||||
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past3SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
past3SeasonResults, err = postprocess.PostProcessResult(past3SeasonResults, params.Past3SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &anomalyQueryResults{
|
||||
CurrentPeriodResults: currentPeriodResults,
|
||||
PastPeriodResults: pastPeriodResults,
|
||||
CurrentSeasonResults: currentSeasonResults,
|
||||
PastSeasonResults: pastSeasonResults,
|
||||
Past2SeasonResults: past2SeasonResults,
|
||||
Past3SeasonResults: past3SeasonResults,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// getMatchingSeries gets the matching series from the query result
|
||||
// for the given series
|
||||
func (p *BaseSeasonalProvider) getMatchingSeries(queryResult *v3.Result, series *v3.Series) *v3.Series {
|
||||
if queryResult == nil || len(queryResult.Series) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, curr := range queryResult.Series {
|
||||
currLabels := labels.FromMap(curr.Labels)
|
||||
seriesLabels := labels.FromMap(series.Labels)
|
||||
if currLabels.Hash() == seriesLabels.Hash() {
|
||||
return curr
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getAvg(series *v3.Series) float64 {
|
||||
if series == nil || len(series.Points) == 0 {
|
||||
return 0
|
||||
}
|
||||
var sum float64
|
||||
for _, smpl := range series.Points {
|
||||
sum += smpl.Value
|
||||
}
|
||||
return sum / float64(len(series.Points))
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getStdDev(series *v3.Series) float64 {
|
||||
if series == nil || len(series.Points) == 0 {
|
||||
return 0
|
||||
}
|
||||
avg := p.getAvg(series)
|
||||
var sum float64
|
||||
for _, smpl := range series.Points {
|
||||
sum += math.Pow(smpl.Value-avg, 2)
|
||||
}
|
||||
return math.Sqrt(sum / float64(len(series.Points)))
|
||||
}
|
||||
|
||||
// getMovingAvg gets the moving average for the given series
|
||||
// for the given window size and start index
|
||||
func (p *BaseSeasonalProvider) getMovingAvg(series *v3.Series, movingAvgWindowSize, startIdx int) float64 {
|
||||
if series == nil || len(series.Points) == 0 {
|
||||
return 0
|
||||
}
|
||||
if startIdx >= len(series.Points)-movingAvgWindowSize {
|
||||
startIdx = int(math.Max(0, float64(len(series.Points)-movingAvgWindowSize)))
|
||||
}
|
||||
var sum float64
|
||||
points := series.Points[startIdx:]
|
||||
for i := 0; i < movingAvgWindowSize && i < len(points); i++ {
|
||||
sum += points[i].Value
|
||||
}
|
||||
avg := sum / float64(movingAvgWindowSize)
|
||||
return avg
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getMean(floats ...float64) float64 {
|
||||
if len(floats) == 0 {
|
||||
return 0
|
||||
}
|
||||
var sum float64
|
||||
for _, f := range floats {
|
||||
sum += f
|
||||
}
|
||||
return sum / float64(len(floats))
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getPredictedSeries(
|
||||
series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series,
|
||||
) *v3.Series {
|
||||
predictedSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
// for each point in the series, get the predicted value
|
||||
// the predicted value is the moving average (with window size = 7) of the previous period series
|
||||
// plus the average of the current season series
|
||||
// minus the mean of the past season series, past2 season series and past3 season series
|
||||
for idx, curr := range series.Points {
|
||||
predictedValue :=
|
||||
p.getMovingAvg(prevSeries, movingAvgWindowSize, idx) +
|
||||
p.getAvg(currentSeasonSeries) -
|
||||
p.getMean(p.getAvg(pastSeasonSeries), p.getAvg(past2SeasonSeries), p.getAvg(past3SeasonSeries))
|
||||
|
||||
if predictedValue < 0 {
|
||||
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
}
|
||||
|
||||
zap.L().Info("predictedSeries",
|
||||
zap.Float64("movingAvg", p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)),
|
||||
zap.Float64("avg", p.getAvg(currentSeasonSeries)),
|
||||
zap.Float64("mean", p.getMean(p.getAvg(pastSeasonSeries), p.getAvg(past2SeasonSeries), p.getAvg(past3SeasonSeries))),
|
||||
zap.Any("labels", series.Labels),
|
||||
zap.Float64("predictedValue", predictedValue),
|
||||
)
|
||||
predictedSeries.Points = append(predictedSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: predictedValue,
|
||||
})
|
||||
}
|
||||
|
||||
return predictedSeries
|
||||
}
|
||||
|
||||
// getBounds gets the upper and lower bounds for the given series
|
||||
// for the given z score threshold
|
||||
// moving avg of the previous period series + z score threshold * std dev of the series
|
||||
// moving avg of the previous period series - z score threshold * std dev of the series
|
||||
func (p *BaseSeasonalProvider) getBounds(
|
||||
series, predictedSeries *v3.Series,
|
||||
zScoreThreshold float64,
|
||||
) (*v3.Series, *v3.Series) {
|
||||
upperBoundSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
lowerBoundSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
for idx, curr := range series.Points {
|
||||
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(series)
|
||||
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(series)
|
||||
upperBoundSeries.Points = append(upperBoundSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: upperBound,
|
||||
})
|
||||
lowerBoundSeries.Points = append(lowerBoundSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: math.Max(lowerBound, 0),
|
||||
})
|
||||
}
|
||||
|
||||
return upperBoundSeries, lowerBoundSeries
|
||||
}
|
||||
|
||||
// getExpectedValue gets the expected value for the given series
|
||||
// for the given index
|
||||
// prevSeriesAvg + currentSeasonSeriesAvg - mean of past season series, past2 season series and past3 season series
|
||||
func (p *BaseSeasonalProvider) getExpectedValue(
|
||||
_, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, idx int,
|
||||
) float64 {
|
||||
prevSeriesAvg := p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
||||
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
||||
return prevSeriesAvg + currentSeasonSeriesAvg - p.getMean(pastSeasonSeriesAvg, past2SeasonSeriesAvg, past3SeasonSeriesAvg)
|
||||
}
|
||||
|
||||
// getScore gets the anomaly score for the given series
|
||||
// for the given index
|
||||
// (value - expectedValue) / std dev of the series
|
||||
func (p *BaseSeasonalProvider) getScore(
|
||||
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
|
||||
) float64 {
|
||||
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
|
||||
return (value - expectedValue) / p.getStdDev(weekSeries)
|
||||
}
|
||||
|
||||
// getAnomalyScores gets the anomaly scores for the given series
|
||||
// for the given index
|
||||
// (value - expectedValue) / std dev of the series
|
||||
func (p *BaseSeasonalProvider) getAnomalyScores(
|
||||
series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series,
|
||||
) *v3.Series {
|
||||
anomalyScoreSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
for idx, curr := range series.Points {
|
||||
anomalyScore := p.getScore(series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries, curr.Value, idx)
|
||||
anomalyScoreSeries.Points = append(anomalyScoreSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: anomalyScore,
|
||||
})
|
||||
}
|
||||
|
||||
return anomalyScoreSeries
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
anomalyParams := p.getQueryParams(req)
|
||||
anomalyQueryResults, err := p.getResults(ctx, anomalyParams)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentPeriodResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.CurrentPeriodResults {
|
||||
currentPeriodResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
pastPeriodResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.PastPeriodResults {
|
||||
pastPeriodResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
currentSeasonResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.CurrentSeasonResults {
|
||||
currentSeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
pastSeasonResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.PastSeasonResults {
|
||||
pastSeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
past2SeasonResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.Past2SeasonResults {
|
||||
past2SeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
past3SeasonResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.Past3SeasonResults {
|
||||
past3SeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
for _, result := range currentPeriodResultsMap {
|
||||
funcs := req.Params.CompositeQuery.BuilderQueries[result.QueryName].Functions
|
||||
|
||||
var zScoreThreshold float64
|
||||
for _, f := range funcs {
|
||||
if f.Name == v3.FunctionNameAnomaly {
|
||||
value, ok := f.NamedArgs["z_score_threshold"]
|
||||
if ok {
|
||||
zScoreThreshold = value.(float64)
|
||||
} else {
|
||||
zScoreThreshold = 3
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
pastPeriodResult, ok := pastPeriodResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
currentSeasonResult, ok := currentSeasonResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
pastSeasonResult, ok := pastSeasonResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
past2SeasonResult, ok := past2SeasonResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
past3SeasonResult, ok := past3SeasonResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, series := range result.Series {
|
||||
stdDev := p.getStdDev(series)
|
||||
zap.L().Info("stdDev", zap.Float64("stdDev", stdDev), zap.Any("labels", series.Labels))
|
||||
|
||||
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
|
||||
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
|
||||
pastSeasonSeries := p.getMatchingSeries(pastSeasonResult, series)
|
||||
past2SeasonSeries := p.getMatchingSeries(past2SeasonResult, series)
|
||||
past3SeasonSeries := p.getMatchingSeries(past3SeasonResult, series)
|
||||
|
||||
prevSeriesAvg := p.getAvg(pastPeriodSeries)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
||||
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
||||
zap.L().Info("getAvg", zap.Float64("prevSeriesAvg", prevSeriesAvg), zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg), zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg), zap.Float64("past2SeasonSeriesAvg", past2SeasonSeriesAvg), zap.Float64("past3SeasonSeriesAvg", past3SeasonSeriesAvg), zap.Any("labels", series.Labels))
|
||||
|
||||
predictedSeries := p.getPredictedSeries(
|
||||
series,
|
||||
pastPeriodSeries,
|
||||
currentSeasonSeries,
|
||||
pastSeasonSeries,
|
||||
past2SeasonSeries,
|
||||
past3SeasonSeries,
|
||||
)
|
||||
result.PredictedSeries = append(result.PredictedSeries, predictedSeries)
|
||||
|
||||
upperBoundSeries, lowerBoundSeries := p.getBounds(
|
||||
series,
|
||||
predictedSeries,
|
||||
zScoreThreshold,
|
||||
)
|
||||
result.UpperBoundSeries = append(result.UpperBoundSeries, upperBoundSeries)
|
||||
result.LowerBoundSeries = append(result.LowerBoundSeries, lowerBoundSeries)
|
||||
|
||||
anomalyScoreSeries := p.getAnomalyScores(
|
||||
series,
|
||||
pastPeriodSeries,
|
||||
currentSeasonSeries,
|
||||
pastSeasonSeries,
|
||||
past2SeasonSeries,
|
||||
past3SeasonSeries,
|
||||
)
|
||||
result.AnomalyScores = append(result.AnomalyScores, anomalyScoreSeries)
|
||||
}
|
||||
}
|
||||
|
||||
results := make([]*v3.Result, 0, len(currentPeriodResultsMap))
|
||||
for _, result := range currentPeriodResultsMap {
|
||||
results = append(results, result)
|
||||
}
|
||||
|
||||
return &GetAnomaliesResponse{
|
||||
Results: results,
|
||||
}, nil
|
||||
}
|
||||
43
ee/query-service/anomaly/weekly.go
Normal file
43
ee/query-service/anomaly/weekly.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
)
|
||||
|
||||
type WeeklyProvider struct {
|
||||
BaseSeasonalProvider
|
||||
}
|
||||
|
||||
var _ BaseProvider = (*WeeklyProvider)(nil)
|
||||
|
||||
func (wp *WeeklyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
|
||||
return &wp.BaseSeasonalProvider
|
||||
}
|
||||
|
||||
func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyProvider {
|
||||
wp := &WeeklyProvider{
|
||||
BaseSeasonalProvider: BaseSeasonalProvider{},
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(wp)
|
||||
}
|
||||
|
||||
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
FeatureLookup: wp.ff,
|
||||
})
|
||||
|
||||
return wp
|
||||
}
|
||||
|
||||
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
req.Seasonality = SeasonalityWeekly
|
||||
return p.getAnomalies(ctx, req)
|
||||
}
|
||||
@@ -38,8 +38,7 @@ type APIHandlerOptions struct {
|
||||
Cache cache.Cache
|
||||
Gateway *httputil.ReverseProxy
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
|
||||
FluxInterval time.Duration
|
||||
UseLogsNewSchema bool
|
||||
}
|
||||
|
||||
@@ -178,6 +177,8 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
am.ViewAccess(ah.listLicensesV2)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.AdminAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
|
||||
118
ee/query-service/app/api/queryrange.go
Normal file
118
ee/query-service/app/api/queryrange.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/anomaly"
|
||||
baseapp "go.signoz.io/signoz/pkg/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
bodyBytes, _ := io.ReadAll(r.Body)
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err))
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
queryRangeParams.Version = "v4"
|
||||
|
||||
// add temporality for each metric
|
||||
temporalityErr := aH.PopulateTemporality(r.Context(), queryRangeParams)
|
||||
if temporalityErr != nil {
|
||||
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
anomalyQueryExists := false
|
||||
anomalyQuery := &v3.BuilderQuery{}
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
|
||||
for _, fn := range query.Functions {
|
||||
if fn.Name == v3.FunctionNameAnomaly {
|
||||
anomalyQueryExists = true
|
||||
anomalyQuery = query
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if anomalyQueryExists {
|
||||
// ensure all queries have metric data source, and there should be only one anomaly query
|
||||
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
|
||||
if query.DataSource != v3.DataSourceMetrics {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("all queries must have metric data source")}, nil)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// get the threshold, and seasonality from the anomaly query
|
||||
var seasonality anomaly.Seasonality
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == v3.FunctionNameAnomaly {
|
||||
seasonalityStr, ok := fn.NamedArgs["seasonality"].(string)
|
||||
if !ok {
|
||||
seasonalityStr = "daily"
|
||||
}
|
||||
if seasonalityStr == "weekly" {
|
||||
seasonality = anomaly.SeasonalityWeekly
|
||||
} else if seasonalityStr == "daily" {
|
||||
seasonality = anomaly.SeasonalityDaily
|
||||
} else {
|
||||
seasonality = anomaly.SeasonalityHourly
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
var provider anomaly.Provider
|
||||
switch seasonality {
|
||||
case anomaly.SeasonalityWeekly:
|
||||
provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityDaily:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityHourly:
|
||||
provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
}
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
resp := v3.QueryRangeResponse{
|
||||
Result: anomalies.Results,
|
||||
ResultType: "anomaly",
|
||||
}
|
||||
aH.Respond(w, resp)
|
||||
} else {
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
aH.QueryRangeV4(w, r)
|
||||
}
|
||||
}
|
||||
@@ -1,401 +0,0 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// GetMetricResultEE runs the query and returns list of time series
|
||||
func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) {
|
||||
|
||||
defer utils.Elapsed("GetMetricResult", nil)()
|
||||
zap.L().Info("Executing metric result query: ", zap.String("query", query))
|
||||
|
||||
var hash string
|
||||
// If getSubTreeSpans function is used in the clickhouse query
|
||||
if strings.Contains(query, "getSubTreeSpans(") {
|
||||
var err error
|
||||
query, hash, err = r.getSubTreeSpansCustomFunction(ctx, query, hash)
|
||||
if err == fmt.Errorf("no spans found for the given query") {
|
||||
return nil, "", nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
}
|
||||
|
||||
rows, err := r.conn.Query(ctx, query)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing query", zap.Error(err))
|
||||
return nil, "", fmt.Errorf("error in processing query")
|
||||
}
|
||||
|
||||
var (
|
||||
columnTypes = rows.ColumnTypes()
|
||||
columnNames = rows.Columns()
|
||||
vars = make([]interface{}, len(columnTypes))
|
||||
)
|
||||
for i := range columnTypes {
|
||||
vars[i] = reflect.New(columnTypes[i].ScanType()).Interface()
|
||||
}
|
||||
// when group by is applied, each combination of cartesian product
|
||||
// of attributes is separate series. each item in metricPointsMap
|
||||
// represent a unique series.
|
||||
metricPointsMap := make(map[string][]basemodel.MetricPoint)
|
||||
// attribute key-value pairs for each group selection
|
||||
attributesMap := make(map[string]map[string]string)
|
||||
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
if err := rows.Scan(vars...); err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
var groupBy []string
|
||||
var metricPoint basemodel.MetricPoint
|
||||
groupAttributes := make(map[string]string)
|
||||
// Assuming that the end result row contains a timestamp, value and option labels
|
||||
// Label key and value are both strings.
|
||||
for idx, v := range vars {
|
||||
colName := columnNames[idx]
|
||||
switch v := v.(type) {
|
||||
case *string:
|
||||
// special case for returning all labels
|
||||
if colName == "fullLabels" {
|
||||
var metric map[string]string
|
||||
err := json.Unmarshal([]byte(*v), &metric)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
for key, val := range metric {
|
||||
groupBy = append(groupBy, val)
|
||||
groupAttributes[key] = val
|
||||
}
|
||||
} else {
|
||||
groupBy = append(groupBy, *v)
|
||||
groupAttributes[colName] = *v
|
||||
}
|
||||
case *time.Time:
|
||||
metricPoint.Timestamp = v.UnixMilli()
|
||||
case *float64:
|
||||
metricPoint.Value = *v
|
||||
case **float64:
|
||||
// ch seems to return this type when column is derived from
|
||||
// SELECT count(*)/ SELECT count(*)
|
||||
floatVal := *v
|
||||
if floatVal != nil {
|
||||
metricPoint.Value = *floatVal
|
||||
}
|
||||
case *float32:
|
||||
float32Val := float32(*v)
|
||||
metricPoint.Value = float64(float32Val)
|
||||
case *uint8, *uint64, *uint16, *uint32:
|
||||
if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok {
|
||||
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Uint())
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())
|
||||
}
|
||||
case *int8, *int16, *int32, *int64:
|
||||
if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok {
|
||||
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Int())
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
|
||||
}
|
||||
default:
|
||||
zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName))
|
||||
}
|
||||
}
|
||||
sort.Strings(groupBy)
|
||||
key := strings.Join(groupBy, "")
|
||||
attributesMap[key] = groupAttributes
|
||||
metricPointsMap[key] = append(metricPointsMap[key], metricPoint)
|
||||
}
|
||||
|
||||
var seriesList []*basemodel.Series
|
||||
for key := range metricPointsMap {
|
||||
points := metricPointsMap[key]
|
||||
// first point in each series could be invalid since the
|
||||
// aggregations are applied with point from prev series
|
||||
if len(points) != 0 && len(points) > 1 {
|
||||
points = points[1:]
|
||||
}
|
||||
attributes := attributesMap[key]
|
||||
series := basemodel.Series{Labels: attributes, Points: points}
|
||||
seriesList = append(seriesList, &series)
|
||||
}
|
||||
// err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash)
|
||||
// if err != nil {
|
||||
// zap.L().Error("Error in dropping temporary table: ", err)
|
||||
// return nil, err
|
||||
// }
|
||||
if hash == "" {
|
||||
return seriesList, hash, nil
|
||||
} else {
|
||||
return seriesList, "getSubTreeSpans" + hash, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) {
|
||||
|
||||
zap.L().Debug("Executing getSubTreeSpans function")
|
||||
|
||||
// str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;`
|
||||
|
||||
// process the query to fetch subTree query
|
||||
var subtreeInput string
|
||||
query, subtreeInput, hash = processQuery(query, hash)
|
||||
|
||||
err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in dropping temporary table", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
|
||||
// Create temporary table to store the getSubTreeSpans() results
|
||||
zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)")
|
||||
if err != nil {
|
||||
zap.L().Error("Error in creating temporary table", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
|
||||
var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse
|
||||
getSpansSubQuery := subtreeInput
|
||||
// Execute the subTree query
|
||||
zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery))
|
||||
err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery)
|
||||
|
||||
// zap.L().Info(getSpansSubQuery)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return query, hash, fmt.Errorf("error in processing sql query")
|
||||
}
|
||||
|
||||
var searchScanResponses []basemodel.SearchSpanDBResponseItem
|
||||
|
||||
// TODO : @ankit: I think the algorithm does not need to assume that subtrees are from the same TraceID. We can take this as an improvement later.
|
||||
// Fetch all the spans from of same TraceID so that we can build subtree
|
||||
modelQuery := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
|
||||
|
||||
if len(getSpansSubQueryDBResponses) == 0 {
|
||||
return query, hash, fmt.Errorf("no spans found for the given query")
|
||||
}
|
||||
zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery))
|
||||
err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return query, hash, fmt.Errorf("error in processing sql query")
|
||||
}
|
||||
|
||||
// Process model to fetch the spans
|
||||
zap.L().Debug("Processing model to fetch the spans")
|
||||
searchSpanResponses := []basemodel.SearchSpanResponseItem{}
|
||||
for _, item := range searchScanResponses {
|
||||
var jsonItem basemodel.SearchSpanResponseItem
|
||||
json.Unmarshal([]byte(item.Model), &jsonItem)
|
||||
jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano())
|
||||
if jsonItem.Events == nil {
|
||||
jsonItem.Events = []string{}
|
||||
}
|
||||
searchSpanResponses = append(searchSpanResponses, jsonItem)
|
||||
}
|
||||
// Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash
|
||||
// Use map to store pointer to the spans to avoid duplicates and save memory
|
||||
zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
|
||||
treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash))
|
||||
if err != nil {
|
||||
zap.L().Error("Error in preparing batch statement", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
for _, span := range treeSearchResponse {
|
||||
var parentID string
|
||||
if len(span.References) > 0 && span.References[0].RefType == "CHILD_OF" {
|
||||
parentID = span.References[0].SpanId
|
||||
}
|
||||
err = statement.Append(
|
||||
time.Unix(0, int64(span.TimeUnixNano)),
|
||||
span.TraceID,
|
||||
span.SpanID,
|
||||
parentID,
|
||||
span.RootSpanID,
|
||||
span.ServiceName,
|
||||
span.Name,
|
||||
span.RootName,
|
||||
uint64(span.DurationNano),
|
||||
int8(span.Kind),
|
||||
span.TagMap,
|
||||
span.Events,
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
}
|
||||
zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
err = statement.Send()
|
||||
if err != nil {
|
||||
zap.L().Error("Error in sending statement", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
return query, hash, nil
|
||||
}
|
||||
|
||||
//lint:ignore SA4009 return hash is feeded to the query
|
||||
func processQuery(query string, hash string) (string, string, string) {
|
||||
re3 := regexp.MustCompile(`getSubTreeSpans`)
|
||||
|
||||
submatchall3 := re3.FindAllStringIndex(query, -1)
|
||||
getSubtreeSpansMatchIndex := submatchall3[0][1]
|
||||
|
||||
query2countParenthesis := query[getSubtreeSpansMatchIndex:]
|
||||
|
||||
sqlCompleteIndex := 0
|
||||
countParenthesisImbalance := 0
|
||||
for i, char := range query2countParenthesis {
|
||||
|
||||
if string(char) == "(" {
|
||||
countParenthesisImbalance += 1
|
||||
}
|
||||
if string(char) == ")" {
|
||||
countParenthesisImbalance -= 1
|
||||
}
|
||||
if countParenthesisImbalance == 0 {
|
||||
sqlCompleteIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
subtreeInput := query2countParenthesis[1:sqlCompleteIndex]
|
||||
|
||||
// hash the subtreeInput
|
||||
hmd5 := md5.Sum([]byte(subtreeInput))
|
||||
hash = fmt.Sprintf("%x", hmd5)
|
||||
|
||||
// Reformat the query to use the getSubTreeSpans function
|
||||
query = query[:getSubtreeSpansMatchIndex] + hash + " " + query2countParenthesis[sqlCompleteIndex+1:]
|
||||
return query, subtreeInput, hash
|
||||
}
|
||||
|
||||
// getSubTreeAlgorithm is an algorithm to build the subtrees of the spans and return the list of spans
|
||||
func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse) (map[string]*basemodel.SearchSpanResponseItem, error) {
|
||||
|
||||
var spans []*model.SpanForTraceDetails
|
||||
for _, spanItem := range payload {
|
||||
var parentID string
|
||||
if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" {
|
||||
parentID = spanItem.References[0].SpanId
|
||||
}
|
||||
span := &model.SpanForTraceDetails{
|
||||
TimeUnixNano: spanItem.TimeUnixNano,
|
||||
SpanID: spanItem.SpanID,
|
||||
TraceID: spanItem.TraceID,
|
||||
ServiceName: spanItem.ServiceName,
|
||||
Name: spanItem.Name,
|
||||
Kind: spanItem.Kind,
|
||||
DurationNano: spanItem.DurationNano,
|
||||
TagMap: spanItem.TagMap,
|
||||
ParentID: parentID,
|
||||
Events: spanItem.Events,
|
||||
HasError: spanItem.HasError,
|
||||
}
|
||||
spans = append(spans, span)
|
||||
}
|
||||
|
||||
zap.L().Debug("Building Tree")
|
||||
roots, err := buildSpanTrees(&spans)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
searchSpansResult := make(map[string]*basemodel.SearchSpanResponseItem)
|
||||
// Every span which was fetched from getSubTree Input SQL query is considered root
|
||||
// For each root, get the subtree spans
|
||||
for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses {
|
||||
targetSpan := &model.SpanForTraceDetails{}
|
||||
// zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
|
||||
// Search target span object in the tree
|
||||
for _, root := range roots {
|
||||
targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID)
|
||||
if targetSpan != nil {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if targetSpan == nil {
|
||||
return nil, nil
|
||||
}
|
||||
// Build subtree for the target span
|
||||
// Mark the target span as root by setting parent ID as empty string
|
||||
targetSpan.ParentID = ""
|
||||
preParents := []*model.SpanForTraceDetails{targetSpan}
|
||||
children := []*model.SpanForTraceDetails{}
|
||||
|
||||
// Get the subtree child spans
|
||||
for i := 0; len(preParents) != 0; i++ {
|
||||
parents := []*model.SpanForTraceDetails{}
|
||||
for _, parent := range preParents {
|
||||
children = append(children, parent.Children...)
|
||||
parents = append(parents, parent.Children...)
|
||||
}
|
||||
preParents = parents
|
||||
}
|
||||
|
||||
resultSpans := children
|
||||
// Add the target span to the result spans
|
||||
resultSpans = append(resultSpans, targetSpan)
|
||||
|
||||
for _, item := range resultSpans {
|
||||
references := []basemodel.OtelSpanRef{
|
||||
{
|
||||
TraceId: item.TraceID,
|
||||
SpanId: item.ParentID,
|
||||
RefType: "CHILD_OF",
|
||||
},
|
||||
}
|
||||
|
||||
if item.Events == nil {
|
||||
item.Events = []string{}
|
||||
}
|
||||
searchSpansResult[item.SpanID] = &basemodel.SearchSpanResponseItem{
|
||||
TimeUnixNano: item.TimeUnixNano,
|
||||
SpanID: item.SpanID,
|
||||
TraceID: item.TraceID,
|
||||
ServiceName: item.ServiceName,
|
||||
Name: item.Name,
|
||||
Kind: item.Kind,
|
||||
References: references,
|
||||
DurationNano: item.DurationNano,
|
||||
TagMap: item.TagMap,
|
||||
Events: item.Events,
|
||||
HasError: item.HasError,
|
||||
RootSpanID: getSpansSubQueryDBResponse.SpanID,
|
||||
RootName: targetSpan.Name,
|
||||
}
|
||||
}
|
||||
}
|
||||
return searchSpansResult, nil
|
||||
}
|
||||
@@ -170,6 +170,14 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
<-readerReady
|
||||
rm, err := makeRulesManager(serverOptions.PromConfigPath,
|
||||
@@ -177,6 +185,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.RuleRepoURL,
|
||||
localDB,
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
lm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
@@ -237,15 +246,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
telemetry.GetInstance().SetReader(reader)
|
||||
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
|
||||
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
|
||||
|
||||
if err != nil {
|
||||
@@ -732,6 +732,7 @@ func makeRulesManager(
|
||||
ruleRepoURL string,
|
||||
db *sqlx.DB,
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm baseint.FeatureLookup,
|
||||
useLogsNewSchema bool) (*baserules.Manager, error) {
|
||||
@@ -756,10 +757,11 @@ func makeRulesManager(
|
||||
RepoURL: ruleRepoURL,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: nil,
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
FeatureFlags: fm,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
|
||||
@@ -20,6 +20,8 @@ import (
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
|
||||
prommodel "github.com/prometheus/common/model"
|
||||
|
||||
zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder"
|
||||
zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync"
|
||||
|
||||
@@ -77,6 +79,10 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
|
||||
return logger
|
||||
}
|
||||
|
||||
func init() {
|
||||
prommodel.NameValidationScheme = prommodel.UTF8Validation
|
||||
}
|
||||
|
||||
func main() {
|
||||
var promConfigPath, skipTopLvlOpsPath string
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
const Gateway = "GATEWAY"
|
||||
const PremiumSupport = "PREMIUM_SUPPORT"
|
||||
const QueryBuilderSearchV2 = "QUERY_BUILDER_SEARCH_V2"
|
||||
|
||||
var BasicPlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
@@ -129,7 +128,7 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: QueryBuilderSearchV2,
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
@@ -244,8 +243,8 @@ var ProPlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: QueryBuilderSearchV2,
|
||||
Active: false,
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
@@ -373,8 +372,8 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: QueryBuilderSearchV2,
|
||||
Active: false,
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
|
||||
393
ee/query-service/rules/anomaly.go
Normal file
393
ee/query-service/rules/anomaly.go
Normal file
@@ -0,0 +1,393 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/anomaly"
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/times"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/timestamp"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/formatter"
|
||||
|
||||
baserules "go.signoz.io/signoz/pkg/query-service/rules"
|
||||
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
const (
|
||||
RuleTypeAnomaly = "anomaly_rule"
|
||||
)
|
||||
|
||||
type AnomalyRule struct {
|
||||
*baserules.BaseRule
|
||||
|
||||
mtx sync.Mutex
|
||||
|
||||
reader interfaces.Reader
|
||||
|
||||
// querierV2 is used for alerts created after the introduction of new metrics query builder
|
||||
querierV2 interfaces.Querier
|
||||
|
||||
provider anomaly.Provider
|
||||
|
||||
seasonality anomaly.Seasonality
|
||||
}
|
||||
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
p *baserules.PostableRule,
|
||||
featureFlags interfaces.FeatureLookup,
|
||||
reader interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
opts ...baserules.RuleOption,
|
||||
) (*AnomalyRule, error) {
|
||||
|
||||
zap.L().Info("creating new AnomalyRule", zap.String("id", id), zap.Any("opts", opts))
|
||||
|
||||
baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
t := AnomalyRule{
|
||||
BaseRule: baseRule,
|
||||
}
|
||||
|
||||
switch strings.ToLower(p.RuleCondition.Seasonality) {
|
||||
case "hourly":
|
||||
t.seasonality = anomaly.SeasonalityHourly
|
||||
case "daily":
|
||||
t.seasonality = anomaly.SeasonalityDaily
|
||||
case "weekly":
|
||||
t.seasonality = anomaly.SeasonalityWeekly
|
||||
default:
|
||||
t.seasonality = anomaly.SeasonalityDaily
|
||||
}
|
||||
|
||||
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FeatureLookup: featureFlags,
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
t.reader = reader
|
||||
if t.seasonality == anomaly.SeasonalityHourly {
|
||||
t.provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
|
||||
)
|
||||
}
|
||||
return &t, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Type() baserules.RuleType {
|
||||
return RuleTypeAnomaly
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) prepareQueryRange(ts time.Time) (*v3.QueryRangeParamsV3, error) {
|
||||
|
||||
zap.L().Info("prepareQueryRange", zap.Int64("ts", ts.UnixMilli()), zap.Int64("evalWindow", r.EvalWindow().Milliseconds()), zap.Int64("evalDelay", r.EvalDelay().Milliseconds()))
|
||||
|
||||
start := ts.Add(-time.Duration(r.EvalWindow())).UnixMilli()
|
||||
end := ts.UnixMilli()
|
||||
|
||||
if r.EvalDelay() > 0 {
|
||||
start = start - int64(r.EvalDelay().Milliseconds())
|
||||
end = end - int64(r.EvalDelay().Milliseconds())
|
||||
}
|
||||
// round to minute otherwise we could potentially miss data
|
||||
start = start - (start % (60 * 1000))
|
||||
end = end - (end % (60 * 1000))
|
||||
|
||||
compositeQuery := r.Condition().CompositeQuery
|
||||
|
||||
if compositeQuery.PanelType != v3.PanelTypeGraph {
|
||||
compositeQuery.PanelType = v3.PanelTypeGraph
|
||||
}
|
||||
|
||||
// default mode
|
||||
return &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
|
||||
CompositeQuery: compositeQuery,
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) GetSelectedQuery() string {
|
||||
return r.Condition().GetSelectedQueryName()
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (baserules.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = r.PopulateTemporality(ctx, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("internal error while setting temporality")
|
||||
}
|
||||
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, &anomaly.GetAnomaliesRequest{
|
||||
Params: params,
|
||||
Seasonality: r.seasonality,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var queryResult *v3.Result
|
||||
for _, result := range anomalies.Results {
|
||||
if result.QueryName == r.GetSelectedQuery() {
|
||||
queryResult = result
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var resultVector baserules.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
zap.L().Info("anomaly scores", zap.String("scores", string(scoresJSON)))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
smpl, shouldAlert := r.ShouldAlert(*series)
|
||||
if shouldAlert {
|
||||
resultVector = append(resultVector, smpl)
|
||||
}
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, error) {
|
||||
|
||||
prevState := r.State()
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
res, err := r.buildAndRunQuery(ctx, ts)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r.mtx.Lock()
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
var alerts = make(map[uint64]*baserules.Alert, len(res))
|
||||
|
||||
for _, smpl := range res {
|
||||
l := make(map[string]string, len(smpl.Metric))
|
||||
for _, lbl := range smpl.Metric {
|
||||
l[lbl.Name] = lbl.Value
|
||||
}
|
||||
|
||||
value := valueFormatter.Format(smpl.V, r.Unit())
|
||||
threshold := valueFormatter.Format(r.TargetVal(), r.Unit())
|
||||
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
|
||||
|
||||
tmplData := baserules.AlertTemplateData(l, value, threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
// who are not used to Go's templating system.
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
|
||||
// utility function to apply go template on labels and annotations
|
||||
expand := func(text string) string {
|
||||
|
||||
tmpl := baserules.NewTemplateExpander(
|
||||
ctx,
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
tmplData,
|
||||
times.Time(timestamp.FromTime(ts)),
|
||||
nil,
|
||||
)
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
result = fmt.Sprintf("<error expanding template: %s>", err)
|
||||
zap.L().Error("Expanding alert template failed", zap.Error(err), zap.Any("data", tmplData))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
lb := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel)
|
||||
resultLabels := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel).Labels()
|
||||
|
||||
for name, value := range r.Labels().Map() {
|
||||
lb.Set(name, expand(value))
|
||||
}
|
||||
|
||||
lb.Set(labels.AlertNameLabel, r.Name())
|
||||
lb.Set(labels.AlertRuleIdLabel, r.ID())
|
||||
lb.Set(labels.RuleSourceLabel, r.GeneratorURL())
|
||||
|
||||
annotations := make(labels.Labels, 0, len(r.Annotations().Map()))
|
||||
for name, value := range r.Annotations().Map() {
|
||||
annotations = append(annotations, labels.Label{Name: name, Value: expand(value)})
|
||||
}
|
||||
if smpl.IsMissing {
|
||||
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
|
||||
}
|
||||
|
||||
lbs := lb.Labels()
|
||||
h := lbs.Hash()
|
||||
resultFPs[h] = struct{}{}
|
||||
|
||||
if _, ok := alerts[h]; ok {
|
||||
zap.L().Error("the alert query returns duplicate records", zap.String("ruleid", r.ID()), zap.Any("alert", alerts[h]))
|
||||
err = fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alerts[h] = &baserules.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
Annotations: annotations,
|
||||
ActiveAt: ts,
|
||||
State: model.StatePending,
|
||||
Value: smpl.V,
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: r.PreferredChannels(),
|
||||
Missing: smpl.IsMissing,
|
||||
}
|
||||
}
|
||||
|
||||
zap.L().Info("number of alerts found", zap.String("name", r.Name()), zap.Int("count", len(alerts)))
|
||||
|
||||
// alerts[h] is ready, add or update active list now
|
||||
for h, a := range alerts {
|
||||
// Check whether we already have alerting state for the identifying label set.
|
||||
// Update the last value and annotations if so, create a new alert entry otherwise.
|
||||
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
|
||||
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
alert.Receivers = r.PreferredChannels()
|
||||
continue
|
||||
}
|
||||
|
||||
r.Active[h] = a
|
||||
}
|
||||
|
||||
itemsToAdd := []model.RuleStateHistory{}
|
||||
|
||||
// Check if any pending alerts should be removed or fire now. Write out alert timeseries.
|
||||
for fp, a := range r.Active {
|
||||
labelsJSON, err := json.Marshal(a.QueryResultLables)
|
||||
if err != nil {
|
||||
zap.L().Error("error marshaling labels", zap.Error(err), zap.Any("labels", a.Labels))
|
||||
}
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
// retention time so it is reported as resolved to the AlertManager.
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > baserules.ResolvedRetention) {
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != model.StateInactive {
|
||||
a.State = model.StateInactive
|
||||
a.ResolvedAt = ts
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: model.StateInactive,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := model.StateFiring
|
||||
if a.Missing {
|
||||
state = model.StateNoData
|
||||
}
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
currentState := r.State()
|
||||
|
||||
overallStateChanged := currentState != prevState
|
||||
for idx, item := range itemsToAdd {
|
||||
item.OverallStateChanged = overallStateChanged
|
||||
item.OverallState = currentState
|
||||
itemsToAdd[idx] = item
|
||||
}
|
||||
|
||||
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
|
||||
|
||||
return len(r.Active), nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) String() string {
|
||||
|
||||
ar := baserules.PostableRule{
|
||||
AlertName: r.Name(),
|
||||
RuleCondition: r.Condition(),
|
||||
EvalWindow: baserules.Duration(r.EvalWindow()),
|
||||
Labels: r.Labels().Map(),
|
||||
Annotations: r.Annotations().Map(),
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
}
|
||||
|
||||
byt, err := yaml.Marshal(ar)
|
||||
if err != nil {
|
||||
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
|
||||
}
|
||||
|
||||
return string(byt)
|
||||
}
|
||||
@@ -53,8 +53,27 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
// create promql rule task for evalution
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
|
||||
} else if opts.Rule.RuleType == baserules.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
)
|
||||
if err != nil {
|
||||
return task, err
|
||||
}
|
||||
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type. Supported types: %s, %s", baserules.RuleTypeProm, baserules.RuleTypeThreshold)
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, baserules.RuleTypeProm, baserules.RuleTypeThreshold)
|
||||
}
|
||||
|
||||
return task, nil
|
||||
|
||||
@@ -207,7 +207,6 @@
|
||||
"eslint-plugin-sonarjs": "^0.12.0",
|
||||
"husky": "^7.0.4",
|
||||
"is-ci": "^3.0.1",
|
||||
"jest-playwright-preset": "^1.7.2",
|
||||
"jest-styled-components": "^7.0.8",
|
||||
"lint-staged": "^12.5.0",
|
||||
"msw": "1.3.2",
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_2048_2251)">
|
||||
<path opacity="0.9" d="M8.02226 15.9866C3.56539 15.9866 -6.10352e-05 12.4896 -6.10352e-05 8.11832C-6.10352e-05 3.79075 3.56539 0.25 8.02226 0.25H13.0584C14.7075 0.25 15.9999 1.56139 15.9999 3.13506V8.11832C15.9999 12.4896 12.4345 15.9866 8.02226 15.9866Z" fill="#F25733"/>
|
||||
<path d="M7.95919 4.71207C4.63025 4.71207 2.75514 7.46868 2.67693 7.58603C2.48413 7.87508 2.48413 8.24888 2.67707 8.53816C2.75514 8.65528 4.63025 11.4119 7.95919 11.4119C11.2881 11.4119 13.1633 8.65528 13.2414 8.53792C13.4342 8.24888 13.4342 7.87508 13.2413 7.58582C13.1632 7.46868 11.2881 4.71207 7.95919 4.71207ZM3.13771 8.23088C3.06925 8.12832 3.06925 7.99571 3.13771 7.89307C3.20059 7.79867 4.53564 5.83764 6.92256 5.36723C5.84092 5.78476 5.07127 6.83485 5.07127 8.062C5.07127 9.28912 5.84092 10.3392 6.92256 10.7567C4.53564 10.2863 3.20059 8.32528 3.13771 8.23088ZM6.62838 8.062C6.62838 8.21488 6.50443 8.3388 6.35151 8.3388C6.19859 8.3388 6.07465 8.21488 6.07465 8.062C6.07465 7.02287 6.92003 6.17748 7.95916 6.17748C8.11207 6.17748 8.23599 6.30141 8.23599 6.45434C8.23599 6.60727 8.11207 6.73119 7.95916 6.73119C7.22535 6.73119 6.62838 7.32815 6.62838 8.062ZM7.95919 8.73504C7.58803 8.73504 7.2861 8.43312 7.2861 8.062C7.2861 7.69085 7.58803 7.3889 7.95919 7.3889C8.33039 7.3889 8.63231 7.69083 8.63231 8.062C8.63231 8.43312 8.33039 8.73504 7.95919 8.73504ZM12.7806 8.23088C12.7178 8.32528 11.3827 10.2863 8.99583 10.7567C10.0775 10.3392 10.8471 9.28912 10.8471 8.062C10.8471 6.83487 10.0775 5.78477 8.99583 5.36724C11.3827 5.83768 12.7178 7.7987 12.7806 7.89307C12.8491 7.99571 12.8491 8.12832 12.7806 8.23088Z" fill="#F9F2F9"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_2048_2251">
|
||||
<rect width="16" height="16" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="100" height="100" fill="none"><rect width="100" height="100" fill="url(#a)" rx="20"/><g fill="#fff" fill-rule="evenodd" clip-rule="evenodd" filter="url(#b)"><path d="M11 49.941v-.003l.002-.005.003-.014.007-.035a8.37 8.37 0 0 1 .105-.42c.073-.263.184-.624.348-1.072.328-.896.866-2.135 1.73-3.617 1.732-2.97 4.753-6.883 9.95-10.955 5.223-4.092 10.295-6.293 14.08-7.471a35.328 35.328 0 0 1 4.585-1.114 23.628 23.628 0 0 1 1.687-.223 9.17 9.17 0 0 1 .108-.009l.034-.002h.011l.007-.001s.002 0 .133 2.217c.13 2.218.132 2.218.132 2.218h-.002l-.053.004a19.098 19.098 0 0 0-1.326.178c-.809.136-1.937.37-3.302.763l-.127.043c-2.745.94-6.666 2.775-11.249 6.362-4.572 3.577-7.142 6.95-8.563 9.393-.711 1.222-1.137 2.215-1.383 2.889a9.995 9.995 0 0 0-.29.933c.008.037.022.095.044.173.046.166.123.423.246.76.246.674.672 1.667 1.383 2.89 1.421 2.441 3.991 5.815 8.563 9.392 4.584 3.587 8.504 5.423 11.25 6.362l.126.043c1.365.393 2.493.627 3.302.763a19.098 19.098 0 0 0 1.326.178l.053.004h.002s-.002 0-.133 2.218C43.66 75 43.657 75 43.657 75h-.007l-.011-.001-.034-.002a9.17 9.17 0 0 1-.478-.046 23.628 23.628 0 0 1-1.317-.186 35.328 35.328 0 0 1-4.584-1.114c-3.786-1.178-8.858-3.38-14.081-7.471-5.197-4.072-8.218-7.985-9.95-10.955-.864-1.482-1.402-2.72-1.73-3.617-.164-.448-.275-.81-.348-1.072a8.37 8.37 0 0 1-.105-.42l-.007-.035-.003-.014-.002-.005v-.121Zm78 0v-.003l-.002-.005-.002-.014-.008-.035a8.532 8.532 0 0 0-.105-.42 14.049 14.049 0 0 0-.348-1.072c-.328-.896-.866-2.135-1.73-3.617-1.732-2.97-4.753-6.883-9.95-10.955-5.223-4.092-10.295-6.293-14.08-7.471a35.328 35.328 0 0 0-4.585-1.114 23.628 23.628 0 0 0-1.687-.223 9.17 9.17 0 0 0-.108-.009l-.034-.002h-.011L56.343 25s-.002 0-.133 2.217c-.13 2.218-.132 2.218-.132 2.218h.002l.053.004a19.098 19.098 0 0 1 1.326.178c.809.136 1.937.37 3.302.763l.127.043c2.745.94 6.666 2.775 11.249 6.362 4.572 3.577 7.141 6.95 8.563 9.393.711 1.222 1.137 2.215 1.383 2.889a9.995 9.995 0 0 1 .29.933 9.995 9.995 0 0 1-.29.934c-.246.673-.672 1.666-1.383 2.888-1.422 2.442-3.991 5.816-8.563 9.393-4.584 3.587-8.504 5.423-11.25 6.362l-.126.043a30.108 30.108 0 0 1-3.302.763 19.098 19.098 0 0 1-1.326.178l-.053.004h-.002s.002 0 .133 2.218C56.34 75 56.343 75 56.343 75h.007l.011-.001.034-.002a9.17 9.17 0 0 0 .478-.046c.314-.034.758-.092 1.317-.186a35.328 35.328 0 0 0 4.584-1.114c3.786-1.178 8.858-3.38 14.081-7.471 5.197-4.072 8.218-7.985 9.95-10.955.864-1.482 1.402-2.72 1.73-3.617.164-.448.275-.81.348-1.072a8.532 8.532 0 0 0 .105-.42l.008-.035.002-.014.001-.005.001-.003v-.118Z"/><path d="M68.342 49.998c0 9.846-7.924 17.827-17.7 17.827-9.775 0-17.7-7.981-17.7-17.827 0-9.846 7.925-17.827 17.7-17.827 9.776 0 17.7 7.981 17.7 17.827ZM46.218 39.97s-2.127 2.508-2.766 4.457c-.412 1.257-.553 3.343-.553 3.343h-5.531s0-1.672 1.106-4.457c1.106-2.786 2.212-3.343 2.212-3.343h5.532Zm-2.766 15.6c.639 1.949 2.766 4.457 2.766 4.457h-5.532s-1.106-.557-2.212-3.343c-1.106-2.785-1.106-4.457-1.106-4.457h5.53s.142 2.086.554 3.343Z"/></g><defs><radialGradient id="a" cx="0" cy="0" r="1" gradientTransform="matrix(40.99997 42 -42 40.99997 50 50)" gradientUnits="userSpaceOnUse"><stop offset=".33" stop-color="#F76526"/><stop offset="1" stop-color="#F43030"/></radialGradient><filter id="b" width="90" height="62" x="5" y="23" color-interpolation-filters="sRGB" filterUnits="userSpaceOnUse"><feFlood flood-opacity="0" result="BackgroundImageFix"/><feColorMatrix in="SourceAlpha" result="hardAlpha" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/><feOffset dy="4"/><feGaussianBlur stdDeviation="3"/><feComposite in2="hardAlpha" operator="out"/><feColorMatrix values="0 0 0 0 0.368384 0 0 0 0 0.0623777 0 0 0 0 0.0623777 0 0 0 0.25 0"/><feBlend in2="BackgroundImageFix" result="effect1_dropShadow_3909_18731"/><feBlend in="SourceGraphic" in2="effect1_dropShadow_3909_18731" result="shape"/><feColorMatrix in="SourceAlpha" result="hardAlpha" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/><feOffset dy="4"/><feGaussianBlur stdDeviation="3"/><feComposite in2="hardAlpha" k2="-1" k3="1" operator="arithmetic"/><feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.06 0"/><feBlend in2="shape" result="effect2_innerShadow_3909_18731"/></filter></defs></svg>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 4.1 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 13 KiB |
@@ -53,6 +53,7 @@
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_last": "last",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_equal": "is equal to",
|
||||
@@ -117,6 +118,8 @@
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit",
|
||||
"text_alert_on_absent": "Send a notification if data is missing for",
|
||||
"text_require_min_points": "Run alert evaluation only when there are minimum of",
|
||||
"text_num_points": "data points in each result group",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query"
|
||||
|
||||
@@ -40,6 +40,7 @@
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_last": "last",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_equal": "is equal to",
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support."
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
|
||||
}
|
||||
|
||||
@@ -53,6 +53,7 @@
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_last": "last",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_equal": "is equal to",
|
||||
@@ -117,6 +118,8 @@
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit",
|
||||
"text_alert_on_absent": "Send a notification if data is missing for",
|
||||
"text_require_min_points": "Run alert evaluation only when there are minimum of",
|
||||
"text_num_points": "data points in each result group",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query"
|
||||
|
||||
@@ -40,6 +40,7 @@
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_last": "last",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_equal": "is equal to",
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support."
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 10 KiB |
@@ -12,6 +12,7 @@ import useAnalytics from 'hooks/analytics/useAnalytics';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import useFeatureFlags from 'hooks/useFeatureFlag';
|
||||
import useGetFeatureFlag from 'hooks/useGetFeatureFlag';
|
||||
import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
@@ -58,23 +59,13 @@ function App(): JSX.Element {
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const isChatSupportEnabled =
|
||||
useFeatureFlags(FeatureKeys.CHAT_SUPPORT)?.active || false;
|
||||
|
||||
const isPremiumSupportEnabled =
|
||||
useFeatureFlags(FeatureKeys.PREMIUM_SUPPORT)?.active || false;
|
||||
|
||||
const featureResponse = useGetFeatureFlag((allFlags) => {
|
||||
const isOnboardingEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active ||
|
||||
false;
|
||||
|
||||
const isChatSupportEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.CHAT_SUPPORT)?.active ||
|
||||
false;
|
||||
|
||||
const isPremiumSupportEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.PREMIUM_SUPPORT)?.active ||
|
||||
false;
|
||||
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled &&
|
||||
!licenseData?.payload?.trialConvertedToSubscription;
|
||||
|
||||
dispatch({
|
||||
type: UPDATE_FEATURE_FLAG_RESPONSE,
|
||||
payload: {
|
||||
@@ -83,6 +74,10 @@ function App(): JSX.Element {
|
||||
},
|
||||
});
|
||||
|
||||
const isOnboardingEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active ||
|
||||
false;
|
||||
|
||||
if (!isOnboardingEnabled || !isCloudUserVal) {
|
||||
const newRoutes = routes.filter(
|
||||
(route) => route?.path !== ROUTES.GET_STARTED,
|
||||
@@ -90,16 +85,6 @@ function App(): JSX.Element {
|
||||
|
||||
setRoutes(newRoutes);
|
||||
}
|
||||
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
name: user?.name || '',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const isOnBasicPlan =
|
||||
@@ -137,7 +122,6 @@ function App(): JSX.Element {
|
||||
|
||||
window.analytics.identify(email, sanitizedIdentifyPayload);
|
||||
window.analytics.group(domain, groupTraits);
|
||||
window.clarity('identify', email, name);
|
||||
|
||||
posthog?.identify(email, {
|
||||
email,
|
||||
@@ -202,6 +186,26 @@ function App(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled &&
|
||||
!licenseData?.payload?.trialConvertedToSubscription;
|
||||
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
name: user?.name || '',
|
||||
});
|
||||
}
|
||||
}, [
|
||||
isLoggedInState,
|
||||
isChatSupportEnabled,
|
||||
user,
|
||||
licenseData,
|
||||
isPremiumSupportEnabled,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (user && user?.email && user?.userId && user?.name) {
|
||||
try {
|
||||
@@ -228,6 +232,10 @@ function App(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [user]);
|
||||
|
||||
useEffect(() => {
|
||||
console.info('We are hiring! https://jobs.gem.com/signoz');
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
.client-side-qb-search {
|
||||
.ant-select-selection-search {
|
||||
width: max-content !important;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,654 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
|
||||
import './ClientSideQBSearch.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Select, Tag, Tooltip } from 'antd';
|
||||
import {
|
||||
OPERATORS,
|
||||
QUERY_BUILDER_OPERATORS_BY_TYPES,
|
||||
QUERY_BUILDER_SEARCH_VALUES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { CustomTagProps } from 'container/QueryBuilder/filters/QueryBuilderSearch';
|
||||
import { selectStyle } from 'container/QueryBuilder/filters/QueryBuilderSearch/config';
|
||||
import { PLACEHOLDER } from 'container/QueryBuilder/filters/QueryBuilderSearch/constant';
|
||||
import { TypographyText } from 'container/QueryBuilder/filters/QueryBuilderSearch/style';
|
||||
import {
|
||||
checkCommaInValue,
|
||||
getOperatorFromValue,
|
||||
getOperatorValue,
|
||||
getTagToken,
|
||||
isInNInOperator,
|
||||
} from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
|
||||
import {
|
||||
DropdownState,
|
||||
ITag,
|
||||
Option,
|
||||
} from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import Suggestions from 'container/QueryBuilder/filters/QueryBuilderSearchV2/Suggestions';
|
||||
import { WhereClauseConfig } from 'hooks/queryBuilder/useAutoComplete';
|
||||
import { validationMapper } from 'hooks/queryBuilder/useIsValidTag';
|
||||
import { operatorTypeMapper } from 'hooks/queryBuilder/useOperatorType';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { isArray, isEmpty, isEqual, isObject } from 'lodash-es';
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import type { BaseSelectRef } from 'rc-select';
|
||||
import {
|
||||
KeyboardEvent,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilter,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
export interface AttributeKey {
|
||||
key: string;
|
||||
}
|
||||
|
||||
export interface AttributeValuesMap {
|
||||
[key: string]: AttributeValue;
|
||||
}
|
||||
|
||||
interface ClientSideQBSearchProps {
|
||||
filters: TagFilter;
|
||||
onChange: (value: TagFilter) => void;
|
||||
whereClauseConfig?: WhereClauseConfig;
|
||||
placeholder?: string;
|
||||
className?: string;
|
||||
suffixIcon?: React.ReactNode;
|
||||
attributeValuesMap?: AttributeValuesMap;
|
||||
attributeKeys: AttributeKey[];
|
||||
}
|
||||
|
||||
interface AttributeValue {
|
||||
stringAttributeValues: string[] | [];
|
||||
numberAttributeValues: number[] | [];
|
||||
boolAttributeValues: boolean[] | [];
|
||||
}
|
||||
|
||||
function ClientSideQBSearch(
|
||||
props: ClientSideQBSearchProps,
|
||||
): React.ReactElement {
|
||||
const {
|
||||
onChange,
|
||||
placeholder,
|
||||
className,
|
||||
suffixIcon,
|
||||
whereClauseConfig,
|
||||
attributeValuesMap,
|
||||
attributeKeys,
|
||||
filters,
|
||||
} = props;
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const selectRef = useRef<BaseSelectRef>(null);
|
||||
|
||||
const [isOpen, setIsOpen] = useState<boolean>(false);
|
||||
|
||||
// create the tags from the initial query here, this should only be computed on the first load as post that tags and query will be always in sync.
|
||||
const [tags, setTags] = useState<ITag[]>(filters.items as ITag[]);
|
||||
|
||||
// this will maintain the current state of in process filter item
|
||||
const [currentFilterItem, setCurrentFilterItem] = useState<ITag | undefined>();
|
||||
|
||||
const [currentState, setCurrentState] = useState<DropdownState>(
|
||||
DropdownState.ATTRIBUTE_KEY,
|
||||
);
|
||||
|
||||
// to maintain the current running state until the tokenization happens for the tag
|
||||
const [searchValue, setSearchValue] = useState<string>('');
|
||||
|
||||
const [dropdownOptions, setDropdownOptions] = useState<Option[]>([]);
|
||||
|
||||
const attributeValues = useMemo(() => {
|
||||
if (currentFilterItem?.key?.key) {
|
||||
return attributeValuesMap?.[currentFilterItem.key.key];
|
||||
}
|
||||
return {
|
||||
stringAttributeValues: [],
|
||||
numberAttributeValues: [],
|
||||
boolAttributeValues: [],
|
||||
};
|
||||
}, [attributeValuesMap, currentFilterItem?.key?.key]);
|
||||
|
||||
const handleDropdownSelect = useCallback(
|
||||
(value: string) => {
|
||||
let parsedValue: BaseAutocompleteData | string;
|
||||
|
||||
try {
|
||||
parsedValue = JSON.parse(value);
|
||||
} catch {
|
||||
parsedValue = value;
|
||||
}
|
||||
if (currentState === DropdownState.ATTRIBUTE_KEY) {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
...prev,
|
||||
key: parsedValue as BaseAutocompleteData,
|
||||
op: '',
|
||||
value: '',
|
||||
}));
|
||||
setCurrentState(DropdownState.OPERATOR);
|
||||
setSearchValue((parsedValue as BaseAutocompleteData)?.key);
|
||||
} else if (currentState === DropdownState.OPERATOR) {
|
||||
if (value === OPERATORS.EXISTS || value === OPERATORS.NOT_EXISTS) {
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: value,
|
||||
value: '',
|
||||
} as ITag,
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
key: prev?.key as BaseAutocompleteData,
|
||||
op: value as string,
|
||||
value: '',
|
||||
}));
|
||||
setCurrentState(DropdownState.ATTRIBUTE_VALUE);
|
||||
setSearchValue(`${currentFilterItem?.key?.key} ${value}`);
|
||||
}
|
||||
} else if (currentState === DropdownState.ATTRIBUTE_VALUE) {
|
||||
const operatorType =
|
||||
operatorTypeMapper[currentFilterItem?.op || ''] || 'NOT_VALID';
|
||||
const isMulti = operatorType === QUERY_BUILDER_SEARCH_VALUES.MULTIPLY;
|
||||
|
||||
if (isMulti) {
|
||||
const { tagKey, tagOperator, tagValue } = getTagToken(searchValue);
|
||||
// this condition takes care of adding the IN/NIN multi values when pressed enter on an already existing value.
|
||||
// not the best interaction but in sync with what we have today!
|
||||
if (tagValue.includes(String(value))) {
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
setCurrentFilterItem(undefined);
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: currentFilterItem?.op,
|
||||
value: tagValue,
|
||||
} as ITag,
|
||||
]);
|
||||
return;
|
||||
}
|
||||
// this is for adding subsequent comma seperated values
|
||||
const newSearch = [...tagValue];
|
||||
newSearch[newSearch.length === 0 ? 0 : newSearch.length - 1] = value;
|
||||
const newSearchValue = newSearch.join(',');
|
||||
setSearchValue(`${tagKey} ${tagOperator} ${newSearchValue},`);
|
||||
} else {
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
setCurrentFilterItem(undefined);
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: currentFilterItem?.op,
|
||||
value,
|
||||
} as ITag,
|
||||
]);
|
||||
}
|
||||
}
|
||||
},
|
||||
[currentFilterItem?.key, currentFilterItem?.op, currentState, searchValue],
|
||||
);
|
||||
|
||||
const handleSearch = useCallback((value: string) => {
|
||||
setSearchValue(value);
|
||||
}, []);
|
||||
|
||||
const onInputKeyDownHandler = useCallback(
|
||||
(event: KeyboardEvent<Element>): void => {
|
||||
if (event.key === 'Backspace' && !searchValue) {
|
||||
event.stopPropagation();
|
||||
setTags((prev) => prev.slice(0, -1));
|
||||
}
|
||||
},
|
||||
[searchValue],
|
||||
);
|
||||
|
||||
const handleOnBlur = useCallback((): void => {
|
||||
if (searchValue) {
|
||||
const operatorType =
|
||||
operatorTypeMapper[currentFilterItem?.op || ''] || 'NOT_VALID';
|
||||
// if key is added and operator is not present then convert to body CONTAINS key
|
||||
if (
|
||||
currentFilterItem?.key &&
|
||||
isEmpty(currentFilterItem?.op) &&
|
||||
whereClauseConfig?.customKey === 'body' &&
|
||||
whereClauseConfig?.customOp === OPERATORS.CONTAINS
|
||||
) {
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: {
|
||||
key: 'body',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'body--string----true',
|
||||
},
|
||||
op: OPERATORS.CONTAINS,
|
||||
value: currentFilterItem?.key?.key,
|
||||
},
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else if (
|
||||
currentFilterItem?.op === OPERATORS.EXISTS ||
|
||||
currentFilterItem?.op === OPERATORS.NOT_EXISTS
|
||||
) {
|
||||
// is exists and not exists operator is present then convert directly to tag! no need of value here
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: currentFilterItem?.op,
|
||||
value: '',
|
||||
},
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else if (
|
||||
// if the current state is in sync with the kind of operator used then convert into a tag
|
||||
validationMapper[operatorType]?.(
|
||||
isArray(currentFilterItem?.value)
|
||||
? currentFilterItem?.value.length || 0
|
||||
: 1,
|
||||
)
|
||||
) {
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key as BaseAutocompleteData,
|
||||
op: currentFilterItem?.op as string,
|
||||
value: currentFilterItem?.value || '',
|
||||
},
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
currentFilterItem?.key,
|
||||
currentFilterItem?.op,
|
||||
currentFilterItem?.value,
|
||||
searchValue,
|
||||
whereClauseConfig?.customKey,
|
||||
whereClauseConfig?.customOp,
|
||||
]);
|
||||
|
||||
// this useEffect takes care of tokenisation based on the search state
|
||||
useEffect(() => {
|
||||
// if there is no search value reset to the default state
|
||||
if (!searchValue) {
|
||||
setCurrentFilterItem(undefined);
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
}
|
||||
|
||||
// split the current search value based on delimiters
|
||||
const { tagKey, tagOperator, tagValue } = getTagToken(searchValue);
|
||||
|
||||
if (
|
||||
// Case 1 - if key is defined but the search text doesn't match with the set key,
|
||||
// can happen when user selects from dropdown and then deletes a few characters
|
||||
currentFilterItem?.key &&
|
||||
currentFilterItem?.key?.key !== tagKey.split(' ')[0]
|
||||
) {
|
||||
setCurrentFilterItem(undefined);
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else if (tagOperator && isEmpty(currentFilterItem?.op)) {
|
||||
// Case 2 -> key is set and now typing for the operator
|
||||
if (
|
||||
tagOperator === OPERATORS.EXISTS ||
|
||||
tagOperator === OPERATORS.NOT_EXISTS
|
||||
) {
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: tagOperator,
|
||||
value: '',
|
||||
} as ITag,
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
key: prev?.key as BaseAutocompleteData,
|
||||
op: tagOperator,
|
||||
value: '',
|
||||
}));
|
||||
|
||||
setCurrentState(DropdownState.ATTRIBUTE_VALUE);
|
||||
}
|
||||
} else if (
|
||||
// Case 3 -> selected operator from dropdown and then erased a part of it
|
||||
!isEmpty(currentFilterItem?.op) &&
|
||||
tagOperator !== currentFilterItem?.op
|
||||
) {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
key: prev?.key as BaseAutocompleteData,
|
||||
op: '',
|
||||
value: '',
|
||||
}));
|
||||
setCurrentState(DropdownState.OPERATOR);
|
||||
} else if (currentState === DropdownState.ATTRIBUTE_VALUE) {
|
||||
// Case 4 -> the final value state where we set the current filter values and the tokenisation happens on either
|
||||
// dropdown click or blur event
|
||||
const currentValue = {
|
||||
key: currentFilterItem?.key as BaseAutocompleteData,
|
||||
op: currentFilterItem?.op as string,
|
||||
value: tagValue,
|
||||
};
|
||||
if (!isEqual(currentValue, currentFilterItem)) {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
key: prev?.key as BaseAutocompleteData,
|
||||
op: prev?.op as string,
|
||||
value: tagValue,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}, [
|
||||
currentFilterItem,
|
||||
currentFilterItem?.key,
|
||||
currentFilterItem?.op,
|
||||
searchValue,
|
||||
currentState,
|
||||
]);
|
||||
|
||||
// the useEffect takes care of setting the dropdown values correctly on change of the current state
|
||||
useEffect(() => {
|
||||
if (currentState === DropdownState.ATTRIBUTE_KEY) {
|
||||
const filteredAttributeKeys = attributeKeys.filter((key) =>
|
||||
key.key.startsWith(searchValue),
|
||||
);
|
||||
setDropdownOptions(
|
||||
filteredAttributeKeys?.map(
|
||||
(key) =>
|
||||
({
|
||||
label: key.key,
|
||||
value: key,
|
||||
} as Option),
|
||||
) || [],
|
||||
);
|
||||
}
|
||||
if (currentState === DropdownState.OPERATOR) {
|
||||
const keyOperator = searchValue.split(' ');
|
||||
const partialOperator = keyOperator?.[1];
|
||||
const strippedKey = keyOperator?.[0];
|
||||
|
||||
let operatorOptions;
|
||||
if (currentFilterItem?.key?.dataType) {
|
||||
operatorOptions = QUERY_BUILDER_OPERATORS_BY_TYPES[
|
||||
currentFilterItem.key
|
||||
.dataType as keyof typeof QUERY_BUILDER_OPERATORS_BY_TYPES
|
||||
].map((operator) => ({
|
||||
label: operator,
|
||||
value: operator,
|
||||
}));
|
||||
|
||||
if (partialOperator) {
|
||||
operatorOptions = operatorOptions.filter((op) =>
|
||||
op.label.startsWith(partialOperator.toLocaleUpperCase()),
|
||||
);
|
||||
}
|
||||
setDropdownOptions(operatorOptions);
|
||||
} else if (strippedKey.endsWith('[*]') && strippedKey.startsWith('body.')) {
|
||||
operatorOptions = [OPERATORS.HAS, OPERATORS.NHAS].map((operator) => ({
|
||||
label: operator,
|
||||
value: operator,
|
||||
}));
|
||||
setDropdownOptions(operatorOptions);
|
||||
} else {
|
||||
operatorOptions = QUERY_BUILDER_OPERATORS_BY_TYPES.universal.map(
|
||||
(operator) => ({
|
||||
label: operator,
|
||||
value: operator,
|
||||
}),
|
||||
);
|
||||
|
||||
if (partialOperator) {
|
||||
operatorOptions = operatorOptions.filter((op) =>
|
||||
op.label.startsWith(partialOperator.toLocaleUpperCase()),
|
||||
);
|
||||
}
|
||||
setDropdownOptions(operatorOptions);
|
||||
}
|
||||
}
|
||||
|
||||
if (currentState === DropdownState.ATTRIBUTE_VALUE) {
|
||||
const values: Array<string | number | boolean> = [];
|
||||
const { tagValue } = getTagToken(searchValue);
|
||||
if (isArray(tagValue)) {
|
||||
if (!isEmpty(tagValue[tagValue.length - 1]))
|
||||
values.push(tagValue[tagValue.length - 1]);
|
||||
} else if (!isEmpty(tagValue)) values.push(tagValue);
|
||||
|
||||
const currentAttributeValues =
|
||||
attributeValues?.stringAttributeValues ||
|
||||
attributeValues?.numberAttributeValues ||
|
||||
attributeValues?.boolAttributeValues ||
|
||||
[];
|
||||
|
||||
values.push(...currentAttributeValues);
|
||||
|
||||
if (attributeValuesMap) {
|
||||
setDropdownOptions(
|
||||
values.map(
|
||||
(val) =>
|
||||
({
|
||||
label: checkCommaInValue(String(val)),
|
||||
value: val,
|
||||
} as Option),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
// If attributeValuesMap is not provided, don't set dropdown options
|
||||
setDropdownOptions([]);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
attributeValues,
|
||||
currentFilterItem?.key?.dataType,
|
||||
currentState,
|
||||
attributeKeys,
|
||||
searchValue,
|
||||
attributeValuesMap,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
const filterTags: IBuilderQuery['filters'] = {
|
||||
op: 'AND',
|
||||
items: [],
|
||||
};
|
||||
tags.forEach((tag) => {
|
||||
const computedTagValue =
|
||||
tag.value &&
|
||||
Array.isArray(tag.value) &&
|
||||
tag.value[tag.value.length - 1] === ''
|
||||
? tag.value?.slice(0, -1)
|
||||
: tag.value ?? '';
|
||||
filterTags.items.push({
|
||||
id: tag.id || uuid().slice(0, 8),
|
||||
key: tag.key,
|
||||
op: getOperatorValue(tag.op),
|
||||
value: computedTagValue,
|
||||
});
|
||||
});
|
||||
|
||||
if (!isEqual(filters, filterTags)) {
|
||||
onChange(filterTags);
|
||||
setTags(
|
||||
filterTags.items.map((tag) => ({
|
||||
...tag,
|
||||
op: getOperatorFromValue(tag.op),
|
||||
})) as ITag[],
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [tags]);
|
||||
|
||||
const queryTags = useMemo(
|
||||
() => tags.map((tag) => `${tag.key.key} ${tag.op} ${tag.value}`),
|
||||
[tags],
|
||||
);
|
||||
|
||||
const onTagRender = ({
|
||||
value,
|
||||
closable,
|
||||
onClose,
|
||||
}: CustomTagProps): React.ReactElement => {
|
||||
const { tagOperator } = getTagToken(value);
|
||||
const isInNin = isInNInOperator(tagOperator);
|
||||
const chipValue = isInNin
|
||||
? value?.trim()?.replace(/,\s*$/, '')
|
||||
: value?.trim();
|
||||
|
||||
const indexInQueryTags = queryTags.findIndex((qTag) => isEqual(qTag, value));
|
||||
const tagDetails = tags[indexInQueryTags];
|
||||
|
||||
const onCloseHandler = (): void => {
|
||||
onClose();
|
||||
setSearchValue('');
|
||||
setTags((prev) => prev.filter((t) => !isEqual(t, tagDetails)));
|
||||
};
|
||||
|
||||
const tagEditHandler = (value: string): void => {
|
||||
setCurrentFilterItem(tagDetails);
|
||||
setSearchValue(value);
|
||||
setCurrentState(DropdownState.ATTRIBUTE_VALUE);
|
||||
setTags((prev) => prev.filter((t) => !isEqual(t, tagDetails)));
|
||||
};
|
||||
|
||||
const isDisabled = !!searchValue;
|
||||
|
||||
return (
|
||||
<span className="qb-search-bar-tokenised-tags">
|
||||
<Tag
|
||||
closable={!searchValue && closable}
|
||||
onClose={onCloseHandler}
|
||||
className={tagDetails?.key?.type || ''}
|
||||
>
|
||||
<Tooltip title={chipValue}>
|
||||
<TypographyText
|
||||
ellipsis
|
||||
$isInNin={isInNin}
|
||||
disabled={isDisabled}
|
||||
$isEnabled={!!searchValue}
|
||||
onClick={(): void => {
|
||||
if (!isDisabled) tagEditHandler(value);
|
||||
}}
|
||||
>
|
||||
{chipValue}
|
||||
</TypographyText>
|
||||
</Tooltip>
|
||||
</Tag>
|
||||
</span>
|
||||
);
|
||||
};
|
||||
|
||||
const suffixIconContent = useMemo(() => {
|
||||
if (suffixIcon) {
|
||||
return suffixIcon;
|
||||
}
|
||||
return isOpen ? (
|
||||
<ChevronUp
|
||||
size={14}
|
||||
color={isDarkMode ? Color.TEXT_VANILLA_100 : Color.TEXT_INK_100}
|
||||
/>
|
||||
) : (
|
||||
<ChevronDown
|
||||
size={14}
|
||||
color={isDarkMode ? Color.TEXT_VANILLA_100 : Color.TEXT_INK_100}
|
||||
/>
|
||||
);
|
||||
}, [isDarkMode, isOpen, suffixIcon]);
|
||||
|
||||
return (
|
||||
<div className="query-builder-search-v2 ">
|
||||
<Select
|
||||
ref={selectRef}
|
||||
getPopupContainer={popupContainer}
|
||||
virtual={false}
|
||||
showSearch
|
||||
tagRender={onTagRender}
|
||||
transitionName=""
|
||||
choiceTransitionName=""
|
||||
filterOption={false}
|
||||
open={isOpen}
|
||||
suffixIcon={suffixIconContent}
|
||||
onDropdownVisibleChange={setIsOpen}
|
||||
autoClearSearchValue={false}
|
||||
mode="multiple"
|
||||
placeholder={placeholder}
|
||||
value={queryTags}
|
||||
searchValue={searchValue}
|
||||
className={className}
|
||||
rootClassName="query-builder-search client-side-qb-search"
|
||||
disabled={!attributeKeys.length}
|
||||
style={selectStyle}
|
||||
onSearch={handleSearch}
|
||||
onSelect={handleDropdownSelect}
|
||||
onInputKeyDown={onInputKeyDownHandler}
|
||||
notFoundContent={null}
|
||||
showAction={['focus']}
|
||||
onBlur={handleOnBlur}
|
||||
>
|
||||
{dropdownOptions.map((option) => {
|
||||
let val = option.value;
|
||||
try {
|
||||
if (isObject(option.value)) {
|
||||
val = JSON.stringify(option.value);
|
||||
} else {
|
||||
val = option.value;
|
||||
}
|
||||
} catch {
|
||||
val = option.value;
|
||||
}
|
||||
return (
|
||||
<Select.Option key={isObject(val) ? `select-option` : val} value={val}>
|
||||
<Suggestions
|
||||
label={option.label}
|
||||
value={option.value}
|
||||
option={currentState}
|
||||
searchValue={searchValue}
|
||||
/>
|
||||
</Select.Option>
|
||||
);
|
||||
})}
|
||||
</Select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
ClientSideQBSearch.defaultProps = {
|
||||
placeholder: PLACEHOLDER,
|
||||
className: '',
|
||||
suffixIcon: null,
|
||||
whereClauseConfig: {},
|
||||
attributeValuesMap: {},
|
||||
};
|
||||
|
||||
export default ClientSideQBSearch;
|
||||
@@ -12,6 +12,20 @@ beforeAll(() => {
|
||||
matchMedia();
|
||||
});
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('react-dnd', () => ({
|
||||
useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
|
||||
@@ -22,7 +22,13 @@ export type GetViewDetailsUsingViewKey = (
|
||||
viewKey: string,
|
||||
data: ViewProps[] | undefined,
|
||||
) =>
|
||||
| { query: Query; name: string; uuid: string; panelType: PANEL_TYPES }
|
||||
| {
|
||||
query: Query;
|
||||
name: string;
|
||||
uuid: string;
|
||||
panelType: PANEL_TYPES;
|
||||
extraData?: string;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
export interface IsQueryUpdatedInViewProps {
|
||||
|
||||
@@ -29,9 +29,9 @@ export const getViewDetailsUsingViewKey: GetViewDetailsUsingViewKey = (
|
||||
) => {
|
||||
const selectedView = data?.find((view) => view.uuid === viewKey);
|
||||
if (selectedView) {
|
||||
const { compositeQuery, name, uuid } = selectedView;
|
||||
const { compositeQuery, name, uuid, extraData } = selectedView;
|
||||
const query = mapQueryDataFromApi(compositeQuery);
|
||||
return { query, name, uuid, panelType: compositeQuery.panelType };
|
||||
return { query, name, uuid, panelType: compositeQuery.panelType, extraData };
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
@@ -139,6 +139,7 @@ export const getGraphOptions = (
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
stacked: isStacked,
|
||||
grid: {
|
||||
display: true,
|
||||
color: getGridColor(),
|
||||
@@ -165,6 +166,7 @@ export const getGraphOptions = (
|
||||
ticks: { color: getAxisLabelColor(currentTheme) },
|
||||
},
|
||||
y: {
|
||||
stacked: isStacked,
|
||||
display: true,
|
||||
grid: {
|
||||
display: true,
|
||||
@@ -178,9 +180,6 @@ export const getGraphOptions = (
|
||||
},
|
||||
},
|
||||
},
|
||||
stacked: {
|
||||
display: isStacked === undefined ? false : 'auto',
|
||||
},
|
||||
},
|
||||
elements: {
|
||||
line: {
|
||||
|
||||
@@ -2,6 +2,14 @@ export const VIEW_TYPES = {
|
||||
OVERVIEW: 'OVERVIEW',
|
||||
JSON: 'JSON',
|
||||
CONTEXT: 'CONTEXT',
|
||||
INFRAMETRICS: 'INFRAMETRICS',
|
||||
} as const;
|
||||
|
||||
export type VIEWS = typeof VIEW_TYPES[keyof typeof VIEW_TYPES];
|
||||
|
||||
export const RESOURCE_KEYS = {
|
||||
CLUSTER_NAME: 'k8s.cluster.name',
|
||||
POD_NAME: 'k8s.pod.name',
|
||||
NODE_NAME: 'k8s.node.name',
|
||||
HOST_NAME: 'host.name',
|
||||
} as const;
|
||||
|
||||
@@ -9,6 +9,7 @@ import cx from 'classnames';
|
||||
import { LogType } from 'components/Logs/LogStateIndicator/LogStateIndicator';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ContextView from 'container/LogDetailedView/ContextView/ContextView';
|
||||
import InfraMetrics from 'container/LogDetailedView/InfraMetrics/InfraMetrics';
|
||||
import JSONView from 'container/LogDetailedView/JsonView';
|
||||
import Overview from 'container/LogDetailedView/Overview';
|
||||
import {
|
||||
@@ -22,6 +23,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import {
|
||||
BarChart2,
|
||||
Braces,
|
||||
Copy,
|
||||
Filter,
|
||||
@@ -36,7 +38,7 @@ import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
|
||||
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
import { RESOURCE_KEYS, VIEW_TYPES, VIEWS } from './constants';
|
||||
import { LogDetailProps } from './LogDetail.interfaces';
|
||||
import QueryBuilderSearchWrapper from './QueryBuilderSearchWrapper';
|
||||
|
||||
@@ -192,6 +194,17 @@ function LogDetail({
|
||||
Context
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.INFRAMETRICS}
|
||||
>
|
||||
<div className="view-title">
|
||||
<BarChart2 size={14} />
|
||||
Metrics
|
||||
</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
|
||||
{selectedView === VIEW_TYPES.JSON && (
|
||||
@@ -246,6 +259,15 @@ function LogDetail({
|
||||
isEdit={isEdit}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.INFRAMETRICS && (
|
||||
<InfraMetrics
|
||||
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
|
||||
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
|
||||
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
|
||||
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
|
||||
logLineTimestamp={log.timestamp.toString()}
|
||||
/>
|
||||
)}
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ function AddToQueryHOC({
|
||||
}: AddToQueryHOCProps): JSX.Element {
|
||||
const handleQueryAdd = (event: MouseEvent<HTMLDivElement>): void => {
|
||||
event.stopPropagation();
|
||||
onAddToQuery(fieldKey, fieldValue, OPERATORS.IN);
|
||||
onAddToQuery(fieldKey, fieldValue, OPERATORS['=']);
|
||||
};
|
||||
|
||||
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
|
||||
|
||||
@@ -22,26 +22,21 @@
|
||||
}
|
||||
|
||||
&.INFO {
|
||||
background-color: var(--bg-slate-400);
|
||||
background-color: var(--bg-robin-500);
|
||||
}
|
||||
|
||||
&.WARNING,
|
||||
&.WARN {
|
||||
background-color: var(--bg-amber-500);
|
||||
}
|
||||
|
||||
&.ERROR {
|
||||
background-color: var(--bg-cherry-500);
|
||||
}
|
||||
|
||||
&.TRACE {
|
||||
background-color: var(--bg-robin-300);
|
||||
background-color: var(--bg-forest-400);
|
||||
}
|
||||
|
||||
&.DEBUG {
|
||||
background-color: var(--bg-forest-500);
|
||||
background-color: var(--bg-aqua-500);
|
||||
}
|
||||
|
||||
&.FATAL {
|
||||
background-color: var(--bg-sakura-500);
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ function WelcomeLeftContainer({
|
||||
<Container>
|
||||
<LeftContainer direction="vertical">
|
||||
<Space align="center">
|
||||
<Logo src="signoz-signup.svg" alt="logo" />
|
||||
<Logo src="/Logos/signoz-brand-logo.svg" alt="logo" />
|
||||
<Title style={{ fontSize: '46px', margin: 0 }}>SigNoz</Title>
|
||||
</Space>
|
||||
<Typography>{t('monitor_signup')}</Typography>
|
||||
|
||||
@@ -6,7 +6,6 @@ export const AUTH0_REDIRECT_PATH = '/redirect';
|
||||
|
||||
export const DEFAULT_AUTH0_APP_REDIRECTION_PATH = ROUTES.APPLICATION;
|
||||
|
||||
export const IS_SIDEBAR_COLLAPSED = 'isSideBarCollapsed';
|
||||
export const INVITE_MEMBERS_HASH = '#invite-team-members';
|
||||
|
||||
export const SIGNOZ_UPGRADE_PLAN_URL =
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS';
|
||||
|
||||
const userOS = getUserOperatingSystem();
|
||||
export const GlobalShortcuts = {
|
||||
SidebarCollapse: '\\+meta',
|
||||
NavigateToServices: 's+shift',
|
||||
NavigateToTraces: 't+shift',
|
||||
NavigateToLogs: 'l+shift',
|
||||
@@ -13,7 +9,6 @@ export const GlobalShortcuts = {
|
||||
};
|
||||
|
||||
export const GlobalShortcutsName = {
|
||||
SidebarCollapse: `${userOS === UserOperatingSystem.MACOS ? 'cmd' : 'ctrl'}+\\`,
|
||||
NavigateToServices: 'shift+s',
|
||||
NavigateToTraces: 'shift+t',
|
||||
NavigateToLogs: 'shift+l',
|
||||
@@ -24,7 +19,6 @@ export const GlobalShortcutsName = {
|
||||
};
|
||||
|
||||
export const GlobalShortcutsDescription = {
|
||||
SidebarCollapse: 'Collpase the sidebar',
|
||||
NavigateToServices: 'Navigate to Services page',
|
||||
NavigateToTraces: 'Navigate to Traces page',
|
||||
NavigateToLogs: 'Navigate to logs page',
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import history from 'lib/history';
|
||||
import heatmapPlugin from 'lib/uPlotLib/plugins/heatmapPlugin';
|
||||
import timelinePlugin from 'lib/uPlotLib/plugins/timelinePlugin';
|
||||
import { useMemo, useRef } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { AlertRuleTimelineGraphResponse } from 'types/api/alerts/def';
|
||||
import uPlot, { AlignedData } from 'uplot';
|
||||
|
||||
@@ -41,11 +46,13 @@ function HorizontalTimelineGraph({
|
||||
return [timestamps, states];
|
||||
}, [data]);
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const options: uPlot.Options = useMemo(
|
||||
() => ({
|
||||
width,
|
||||
height: 85,
|
||||
cursor: { show: false },
|
||||
|
||||
axes: [
|
||||
{
|
||||
@@ -66,6 +73,40 @@ function HorizontalTimelineGraph({
|
||||
label: 'States',
|
||||
},
|
||||
],
|
||||
hooks: {
|
||||
setSelect: [
|
||||
(self): void => {
|
||||
const selection = self.select;
|
||||
if (selection) {
|
||||
const startTime = self.posToVal(selection.left, 'x');
|
||||
const endTime = self.posToVal(selection.left + selection.width, 'x');
|
||||
|
||||
const diff = endTime - startTime;
|
||||
|
||||
if (diff > 0) {
|
||||
if (urlQuery.has(QueryParams.relativeTime)) {
|
||||
urlQuery.delete(QueryParams.relativeTime);
|
||||
}
|
||||
|
||||
const startTimestamp = Math.floor(startTime * 1000);
|
||||
const endTimestamp = Math.floor(endTime * 1000);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
|
||||
}
|
||||
|
||||
urlQuery.set(QueryParams.startTime, startTimestamp.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTimestamp.toString());
|
||||
|
||||
history.push({
|
||||
search: urlQuery.toString(),
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
plugins:
|
||||
transformedData?.length > 1
|
||||
? [
|
||||
@@ -76,7 +117,7 @@ function HorizontalTimelineGraph({
|
||||
]
|
||||
: [],
|
||||
}),
|
||||
[width, isDarkMode, transformedData],
|
||||
[width, isDarkMode, transformedData.length, urlQuery, dispatch],
|
||||
);
|
||||
return <Uplot data={transformedData} options={options} />;
|
||||
}
|
||||
|
||||
@@ -109,8 +109,8 @@
|
||||
}
|
||||
|
||||
.alert-rule {
|
||||
&-value,
|
||||
&-created-at {
|
||||
&__value,
|
||||
&__created-at {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
import './Table.styles.scss';
|
||||
|
||||
import { Table } from 'antd';
|
||||
import { initialFilters } from 'constants/queryBuilder';
|
||||
import {
|
||||
useGetAlertRuleDetailsTimelineTable,
|
||||
useTimelineTable,
|
||||
} from 'pages/AlertDetails/hooks';
|
||||
import { useMemo } from 'react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { timelineTableColumns } from './useTimelineTable';
|
||||
|
||||
function TimelineTable(): JSX.Element {
|
||||
const [filters, setFilters] = useState<TagFilter>(initialFilters);
|
||||
|
||||
const {
|
||||
isLoading,
|
||||
isRefetching,
|
||||
@@ -18,13 +22,14 @@ function TimelineTable(): JSX.Element {
|
||||
data,
|
||||
isValidRuleId,
|
||||
ruleId,
|
||||
} = useGetAlertRuleDetailsTimelineTable();
|
||||
} = useGetAlertRuleDetailsTimelineTable({ filters });
|
||||
|
||||
const { timelineData, totalItems } = useMemo(() => {
|
||||
const { timelineData, totalItems, labels } = useMemo(() => {
|
||||
const response = data?.payload?.data;
|
||||
return {
|
||||
timelineData: response?.items,
|
||||
totalItems: response?.total,
|
||||
labels: response?.labels,
|
||||
};
|
||||
}, [data?.payload?.data]);
|
||||
|
||||
@@ -42,7 +47,11 @@ function TimelineTable(): JSX.Element {
|
||||
<div className="timeline-table">
|
||||
<Table
|
||||
rowKey={(row): string => `${row.fingerprint}-${row.value}-${row.unixMilli}`}
|
||||
columns={timelineTableColumns()}
|
||||
columns={timelineTableColumns({
|
||||
filters,
|
||||
labels: labels ?? {},
|
||||
setFilters,
|
||||
})}
|
||||
dataSource={timelineData}
|
||||
pagination={paginationConfig}
|
||||
size="middle"
|
||||
|
||||
@@ -1,13 +1,84 @@
|
||||
import { EllipsisOutlined } from '@ant-design/icons';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import ClientSideQBSearch, {
|
||||
AttributeKey,
|
||||
} from 'components/ClientSideQBSearch/ClientSideQBSearch';
|
||||
import { ConditionalAlertPopover } from 'container/AlertHistory/AlertPopover/AlertPopover';
|
||||
import AlertLabels from 'pages/AlertDetails/AlertHeader/AlertLabels/AlertLabels';
|
||||
import { transformKeyValuesToAttributeValuesMap } from 'container/QueryBuilder/filters/utils';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { Search } from 'lucide-react';
|
||||
import AlertLabels, {
|
||||
AlertLabelsProps,
|
||||
} from 'pages/AlertDetails/AlertHeader/AlertLabels/AlertLabels';
|
||||
import AlertState from 'pages/AlertDetails/AlertHeader/AlertState/AlertState';
|
||||
import { useMemo } from 'react';
|
||||
import { AlertRuleTimelineTableResponse } from 'types/api/alerts/def';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { formatEpochTimestamp } from 'utils/timeUtils';
|
||||
|
||||
export const timelineTableColumns = (): ColumnsType<AlertRuleTimelineTableResponse> => [
|
||||
const transformLabelsToQbKeys = (
|
||||
labels: AlertRuleTimelineTableResponse['labels'],
|
||||
): AttributeKey[] => Object.keys(labels).flatMap((key) => [{ key }]);
|
||||
|
||||
function LabelFilter({
|
||||
filters,
|
||||
setFilters,
|
||||
labels,
|
||||
}: {
|
||||
setFilters: (filters: TagFilter) => void;
|
||||
filters: TagFilter;
|
||||
labels: AlertLabelsProps['labels'];
|
||||
}): JSX.Element | null {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const { transformedKeys, attributesMap } = useMemo(
|
||||
() => ({
|
||||
transformedKeys: transformLabelsToQbKeys(labels || {}),
|
||||
attributesMap: transformKeyValuesToAttributeValuesMap(labels),
|
||||
}),
|
||||
[labels],
|
||||
);
|
||||
|
||||
const handleSearch = (tagFilters: TagFilter): void => {
|
||||
const tagFiltersLength = tagFilters.items.length;
|
||||
|
||||
if (
|
||||
(!tagFiltersLength && (!filters || !filters.items.length)) ||
|
||||
tagFiltersLength === filters?.items.length
|
||||
) {
|
||||
return;
|
||||
}
|
||||
setFilters(tagFilters);
|
||||
};
|
||||
|
||||
return (
|
||||
<ClientSideQBSearch
|
||||
onChange={handleSearch}
|
||||
filters={filters}
|
||||
className="alert-history-label-search"
|
||||
attributeKeys={transformedKeys}
|
||||
attributeValuesMap={attributesMap}
|
||||
suffixIcon={
|
||||
<Search
|
||||
size={14}
|
||||
color={isDarkMode ? Color.TEXT_VANILLA_100 : Color.TEXT_INK_100}
|
||||
/>
|
||||
}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export const timelineTableColumns = ({
|
||||
filters,
|
||||
labels,
|
||||
setFilters,
|
||||
}: {
|
||||
filters: TagFilter;
|
||||
labels: AlertLabelsProps['labels'];
|
||||
setFilters: (filters: TagFilter) => void;
|
||||
}): ColumnsType<AlertRuleTimelineTableResponse> => [
|
||||
{
|
||||
title: 'STATE',
|
||||
dataIndex: 'state',
|
||||
@@ -20,7 +91,9 @@ export const timelineTableColumns = (): ColumnsType<AlertRuleTimelineTableRespon
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'LABELS',
|
||||
title: (
|
||||
<LabelFilter setFilters={setFilters} filters={filters} labels={labels} />
|
||||
),
|
||||
dataIndex: 'labels',
|
||||
render: (labels): JSX.Element => (
|
||||
<div className="alert-rule-labels">
|
||||
|
||||
@@ -16,12 +16,6 @@
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
&.docked {
|
||||
.app-content {
|
||||
width: calc(100% - 240px);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.chat-support-gateway {
|
||||
|
||||
@@ -5,13 +5,11 @@ import './AppLayout.styles.scss';
|
||||
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Flex } from 'antd';
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import getUserLatestVersion from 'api/user/getLatestVersion';
|
||||
import getUserVersion from 'api/user/getVersion';
|
||||
import cx from 'classnames';
|
||||
import ChatSupportGateway from 'components/ChatSupportGateway/ChatSupportGateway';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { IS_SIDEBAR_COLLAPSED } from 'constants/app';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import ROUTES from 'constants/routes';
|
||||
import SideNav from 'container/SideNav';
|
||||
@@ -22,22 +20,13 @@ import useLicense from 'hooks/useLicense';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import {
|
||||
ReactNode,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useLayoutEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { ReactNode, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Dispatch } from 'redux';
|
||||
import { sideBarCollapse } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import {
|
||||
@@ -59,10 +48,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
(state) => state.app,
|
||||
);
|
||||
|
||||
const [collapsed, setCollapsed] = useState<boolean>(
|
||||
getLocalStorageKey(IS_SIDEBAR_COLLAPSED) === 'true',
|
||||
);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
@@ -117,14 +102,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
const latestCurrentCounter = useRef(0);
|
||||
const latestVersionCounter = useRef(0);
|
||||
|
||||
const onCollapse = useCallback(() => {
|
||||
setCollapsed((collapsed) => !collapsed);
|
||||
}, []);
|
||||
|
||||
useLayoutEffect(() => {
|
||||
dispatch(sideBarCollapse(collapsed));
|
||||
}, [collapsed, dispatch]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
getUserLatestVersionResponse.isFetched &&
|
||||
@@ -255,19 +232,16 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
const isDashboardListView = (): boolean => routeKey === 'ALL_DASHBOARD';
|
||||
const isAlertHistory = (): boolean => routeKey === 'ALERT_HISTORY';
|
||||
const isAlertOverview = (): boolean => routeKey === 'ALERT_OVERVIEW';
|
||||
const isDashboardView = (): boolean => {
|
||||
/**
|
||||
* need to match using regex here as the getRoute function will not work for
|
||||
* routes with id
|
||||
*/
|
||||
const regex = /^\/dashboard\/[a-zA-Z0-9_-]+$/;
|
||||
return regex.test(pathname);
|
||||
};
|
||||
const isPathMatch = (regex: RegExp): boolean => regex.test(pathname);
|
||||
|
||||
const isDashboardWidgetView = (): boolean => {
|
||||
const regex = /^\/dashboard\/[a-zA-Z0-9_-]+\/new$/;
|
||||
return regex.test(pathname);
|
||||
};
|
||||
const isDashboardView = (): boolean =>
|
||||
isPathMatch(/^\/dashboard\/[a-zA-Z0-9_-]+$/);
|
||||
|
||||
const isDashboardWidgetView = (): boolean =>
|
||||
isPathMatch(/^\/dashboard\/[a-zA-Z0-9_-]+\/new$/);
|
||||
|
||||
const isTraceDetailsView = (): boolean =>
|
||||
isPathMatch(/^\/trace\/[a-zA-Z0-9]+(\?.*)?$/);
|
||||
|
||||
useEffect(() => {
|
||||
if (isDarkMode) {
|
||||
@@ -279,23 +253,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
}
|
||||
}, [isDarkMode]);
|
||||
|
||||
const isSideNavCollapsed = getLocalStorageKey(IS_SIDEBAR_COLLAPSED);
|
||||
|
||||
/**
|
||||
* Note: Right now we don't have a page-level method to pass the sidebar collapse state.
|
||||
* Since the use case for overriding is not widely needed, we are setting it here
|
||||
* so that the workspace locked page will have an expanded sidebar regardless of how users
|
||||
* have set it or what is stored in localStorage. This will not affect the localStorage config.
|
||||
*/
|
||||
const isWorkspaceLocked = pathname === ROUTES.WORKSPACE_LOCKED;
|
||||
|
||||
return (
|
||||
<Layout
|
||||
className={cx(
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
isSideNavCollapsed ? 'sidebarCollapsed' : '',
|
||||
)}
|
||||
>
|
||||
<Layout className={cx(isDarkMode ? 'darkMode' : 'lightMode')}>
|
||||
<Helmet>
|
||||
<title>{pageTitle}</title>
|
||||
</Helmet>
|
||||
@@ -321,25 +280,11 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Flex
|
||||
className={cx(
|
||||
'app-layout',
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
!collapsed && !renderFullScreen ? 'docked' : '',
|
||||
)}
|
||||
>
|
||||
<Flex className={cx('app-layout', isDarkMode ? 'darkMode' : 'lightMode')}>
|
||||
{isToDisplayLayout && !renderFullScreen && (
|
||||
<SideNav
|
||||
licenseData={licenseData}
|
||||
isFetching={isFetching}
|
||||
onCollapse={onCollapse}
|
||||
collapsed={isWorkspaceLocked ? false : collapsed}
|
||||
/>
|
||||
<SideNav licenseData={licenseData} isFetching={isFetching} />
|
||||
)}
|
||||
<div
|
||||
className={cx('app-content', collapsed ? 'collapsed' : '')}
|
||||
data-overlayscrollbars-initialize
|
||||
>
|
||||
<div className="app-content" data-overlayscrollbars-initialize>
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<LayoutContent data-overlayscrollbars-initialize>
|
||||
<OverlayScrollbar>
|
||||
@@ -356,6 +301,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
isMessagingQueues()
|
||||
? 0
|
||||
: '0 1rem',
|
||||
|
||||
...(isTraceDetailsView() ? { marginRight: 0 } : {}),
|
||||
}}
|
||||
>
|
||||
{isToDisplayLayout && !renderFullScreen && <TopNav />}
|
||||
|
||||
@@ -50,6 +50,13 @@
|
||||
align-items: center;
|
||||
}
|
||||
}
|
||||
|
||||
.billing-update-note {
|
||||
text-align: left;
|
||||
font-size: 13px;
|
||||
color: var(--bg-vanilla-200);
|
||||
margin-top: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-skeleton.ant-skeleton-element.ant-skeleton-active {
|
||||
@@ -75,5 +82,9 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.billing-update-note {
|
||||
color: var(--bg-ink-200);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -348,7 +348,12 @@ export default function BillingContainer(): JSX.Element {
|
||||
const BillingUsageGraphCallback = useCallback(
|
||||
() =>
|
||||
!isLoading && !isFetchingBillingData ? (
|
||||
<BillingUsageGraph data={apiResponse} billAmount={billAmount} />
|
||||
<>
|
||||
<BillingUsageGraph data={apiResponse} billAmount={billAmount} />
|
||||
<div className="billing-update-note">
|
||||
Note: Billing metrics are updated once every 24 hours.
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<Card className="empty-graph-card" bordered={false}>
|
||||
<Spinner size="large" tip="Loading..." height="35vh" />
|
||||
|
||||
@@ -58,6 +58,21 @@ const calculateStartEndTime = (
|
||||
|
||||
export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element {
|
||||
const { data, billAmount } = props;
|
||||
// Added this to fix the issue where breakdown with one day data are causing the bars to spread across multiple days
|
||||
data?.details?.breakdown?.forEach((breakdown: any) => {
|
||||
if (breakdown?.dayWiseBreakdown?.breakdown?.length === 1) {
|
||||
const currentDay = breakdown.dayWiseBreakdown.breakdown[0];
|
||||
const nextDay = {
|
||||
...currentDay,
|
||||
timestamp: currentDay.timestamp + 86400,
|
||||
count: 0,
|
||||
size: 0,
|
||||
quantity: 0,
|
||||
total: 0,
|
||||
};
|
||||
breakdown.dayWiseBreakdown.breakdown.push(nextDay);
|
||||
}
|
||||
});
|
||||
const graphCompatibleData = useMemo(
|
||||
() => convertDataToMetricRangePayload(data),
|
||||
[data],
|
||||
|
||||
@@ -55,6 +55,7 @@ function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element {
|
||||
onClick={(): void => {
|
||||
onSelect(option.selection);
|
||||
}}
|
||||
data-testid={`alert-type-card-${option.selection}`}
|
||||
>
|
||||
{option.description}{' '}
|
||||
<Typography.Link
|
||||
|
||||
@@ -65,7 +65,7 @@ export const logAlertDefaults: AlertDef = {
|
||||
chQueries: {
|
||||
A: {
|
||||
name: 'A',
|
||||
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs_v2 \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
@@ -95,7 +95,7 @@ export const traceAlertDefaults: AlertDef = {
|
||||
chQueries: {
|
||||
A: {
|
||||
name: 'A',
|
||||
query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\tstringTagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE stringTagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
|
||||
@@ -24,6 +24,9 @@ import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import ExportPanelContainer from 'container/ExportPanel/ExportPanelContainer';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import { OptionsQuery } from 'container/OptionsMenu/types';
|
||||
import { useGetSearchQueryParam } from 'hooks/queryBuilder/useGetSearchQueryParam';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useGetAllViews } from 'hooks/saveViews/useGetAllViews';
|
||||
@@ -34,7 +37,7 @@ import useErrorNotification from 'hooks/useErrorNotification';
|
||||
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { mapCompositeQueryFromQuery } from 'lib/newQueryBuilder/queryBuilderMappers/mapCompositeQueryFromQuery';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { cloneDeep, isEqual } from 'lodash-es';
|
||||
import {
|
||||
Check,
|
||||
ConciergeBell,
|
||||
@@ -58,7 +61,9 @@ import { useSelector } from 'react-redux';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { ViewProps } from 'types/api/saveViews/types';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
@@ -252,6 +257,46 @@ function ExplorerOptions({
|
||||
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
|
||||
const { options, handleOptionsChange } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.TRACES_LIST_OPTIONS,
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: StringOperators.NOOP,
|
||||
});
|
||||
|
||||
type ExtraData = {
|
||||
selectColumns?: BaseAutocompleteData[];
|
||||
};
|
||||
|
||||
const updateOrRestoreSelectColumns = (
|
||||
key: string,
|
||||
allViewsData: ViewProps[] | undefined,
|
||||
options: OptionsQuery,
|
||||
handleOptionsChange: (newQueryData: OptionsQuery) => void,
|
||||
): void => {
|
||||
const currentViewDetails = getViewDetailsUsingViewKey(key, allViewsData);
|
||||
if (!currentViewDetails) {
|
||||
return;
|
||||
}
|
||||
|
||||
let extraData: ExtraData = {};
|
||||
try {
|
||||
extraData = JSON.parse(currentViewDetails?.extraData ?? '{}') as ExtraData;
|
||||
} catch (error) {
|
||||
console.error('Error parsing extraData:', error);
|
||||
}
|
||||
|
||||
if (extraData.selectColumns?.length) {
|
||||
handleOptionsChange({
|
||||
...options,
|
||||
selectColumns: extraData.selectColumns,
|
||||
});
|
||||
} else if (!isEqual(defaultTraceSelectedColumns, options.selectColumns)) {
|
||||
handleOptionsChange({
|
||||
...options,
|
||||
selectColumns: defaultTraceSelectedColumns,
|
||||
});
|
||||
}
|
||||
};
|
||||
const onMenuItemSelectHandler = useCallback(
|
||||
({ key }: { key: string }): void => {
|
||||
const currentViewDetails = getViewDetailsUsingViewKey(
|
||||
@@ -321,6 +366,13 @@ function ExplorerOptions({
|
||||
|
||||
updatePreservedViewInLocalStorage(option);
|
||||
|
||||
updateOrRestoreSelectColumns(
|
||||
option.key,
|
||||
viewsData?.data?.data,
|
||||
options,
|
||||
handleOptionsChange,
|
||||
);
|
||||
|
||||
if (ref.current) {
|
||||
ref.current.blur();
|
||||
}
|
||||
@@ -360,14 +412,20 @@ function ExplorerOptions({
|
||||
viewName: newViewName || '',
|
||||
compositeQuery,
|
||||
sourcePage: sourcepage,
|
||||
extraData: JSON.stringify({ color }),
|
||||
extraData: JSON.stringify({
|
||||
color,
|
||||
selectColumns: options.selectColumns,
|
||||
}),
|
||||
});
|
||||
|
||||
const onSaveHandler = (): void => {
|
||||
saveNewViewHandler({
|
||||
compositeQuery,
|
||||
handlePopOverClose: hideSaveViewModal,
|
||||
extraData: JSON.stringify({ color }),
|
||||
extraData: JSON.stringify({
|
||||
color,
|
||||
selectColumns: options.selectColumns,
|
||||
}),
|
||||
notifications,
|
||||
panelType: panelType || PANEL_TYPES.LIST,
|
||||
redirectWithQueryBuilderData,
|
||||
|
||||
@@ -189,6 +189,7 @@ function BasicInfo({
|
||||
checked={shouldBroadCastToAllChannels}
|
||||
onChange={handleBroadcastToAllChannels}
|
||||
disabled={noChannels || !!channels.loading}
|
||||
data-testid="alert-broadcast-to-all-channels"
|
||||
/>
|
||||
</Tooltip>
|
||||
</FormItemMedium>
|
||||
|
||||
@@ -63,6 +63,7 @@ function ChannelSelect({
|
||||
mode="multiple"
|
||||
style={{ width: '100%' }}
|
||||
placeholder={t('placeholder_channel_select')}
|
||||
data-testid="alert-channel-select"
|
||||
value={currentValue}
|
||||
onChange={(value): void => {
|
||||
handleChange(value as string[]);
|
||||
|
||||
@@ -99,7 +99,7 @@ function QuerySection({
|
||||
{
|
||||
label: (
|
||||
<Tooltip title="Query Builder">
|
||||
<Button className="nav-btns">
|
||||
<Button className="nav-btns" data-testid="query-builder-tab">
|
||||
<Atom size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
|
||||
@@ -103,6 +103,7 @@ function RuleOptions({
|
||||
<Select.Option value="2">{t('option_allthetimes')}</Select.Option>
|
||||
<Select.Option value="3">{t('option_onaverage')}</Select.Option>
|
||||
<Select.Option value="4">{t('option_intotal')}</Select.Option>
|
||||
<Select.Option value="5">{t('option_last')}</Select.Option>
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
@@ -322,6 +323,45 @@ function RuleOptions({
|
||||
<Typography.Text>{t('text_for')}</Typography.Text>
|
||||
</Space>
|
||||
</VerticalLine>
|
||||
|
||||
<VerticalLine>
|
||||
<Space direction="horizontal" align="center">
|
||||
<Form.Item noStyle name={['condition', 'requireMinPoints']}>
|
||||
<Checkbox
|
||||
checked={alertDef?.condition?.requireMinPoints}
|
||||
onChange={(e): void => {
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
requireMinPoints: e.target.checked,
|
||||
},
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Typography.Text>{t('text_require_min_points')}</Typography.Text>
|
||||
|
||||
<Form.Item noStyle name={['condition', 'requiredNumPoints']}>
|
||||
<InputNumber
|
||||
min={1}
|
||||
value={alertDef?.condition?.requiredNumPoints}
|
||||
onChange={(value): void => {
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
requiredNumPoints: Number(value) || 0,
|
||||
},
|
||||
});
|
||||
}}
|
||||
type="number"
|
||||
onWheel={(e): void => e.currentTarget.blur()}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Typography.Text>{t('text_num_points')}</Typography.Text>
|
||||
</Space>
|
||||
</VerticalLine>
|
||||
</Space>
|
||||
</Collapse.Panel>
|
||||
</Collapse>
|
||||
|
||||
@@ -370,7 +370,10 @@ function FormAlertRules({
|
||||
});
|
||||
|
||||
// invalidate rule in cache
|
||||
ruleCache.invalidateQueries([REACT_QUERY_KEY.ALERT_RULE_DETAILS, ruleId]);
|
||||
ruleCache.invalidateQueries([
|
||||
REACT_QUERY_KEY.ALERT_RULE_DETAILS,
|
||||
`${ruleId}`,
|
||||
]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setTimeout(() => {
|
||||
|
||||
@@ -15,6 +15,13 @@
|
||||
box-sizing: border-box;
|
||||
margin: 16px 0;
|
||||
border-radius: 3px;
|
||||
|
||||
.global-search {
|
||||
.ant-input-group-addon {
|
||||
border: none;
|
||||
background-color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.height-widget {
|
||||
@@ -55,3 +62,15 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.full-view-container {
|
||||
.graph-container {
|
||||
.global-search {
|
||||
.ant-input-group-addon {
|
||||
background-color: var(--bg-vanilla-200);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import './WidgetFullView.styles.scss';
|
||||
|
||||
import { LoadingOutlined, SyncOutlined } from '@ant-design/icons';
|
||||
import { Button, Spin } from 'antd';
|
||||
import {
|
||||
LoadingOutlined,
|
||||
SearchOutlined,
|
||||
SyncOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Button, Input, Spin } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { ToggleGraphProps } from 'components/Graph/types';
|
||||
import Spinner from 'components/Spinner';
|
||||
@@ -140,7 +144,7 @@ function FullView({
|
||||
|
||||
const [graphsVisibilityStates, setGraphsVisibilityStates] = useState<
|
||||
boolean[]
|
||||
>(Array(response.data?.payload.data.result.length).fill(true));
|
||||
>(Array(response.data?.payload?.data?.result?.length).fill(true));
|
||||
|
||||
useEffect(() => {
|
||||
const {
|
||||
@@ -172,6 +176,10 @@ function FullView({
|
||||
|
||||
const isListView = widget.panelTypes === PANEL_TYPES.LIST;
|
||||
|
||||
const isTablePanel = widget.panelTypes === PANEL_TYPES.TABLE;
|
||||
|
||||
const [searchTerm, setSearchTerm] = useState<string>('');
|
||||
|
||||
if (response.isLoading && widget.panelTypes !== PANEL_TYPES.LIST) {
|
||||
return <Spinner height="100%" size="large" tip="Loading..." />;
|
||||
}
|
||||
@@ -216,6 +224,18 @@ function FullView({
|
||||
}}
|
||||
isGraphLegendToggleAvailable={canModifyChart}
|
||||
>
|
||||
{isTablePanel && (
|
||||
<Input
|
||||
addonBefore={<SearchOutlined size={14} />}
|
||||
className="global-search"
|
||||
placeholder="Search..."
|
||||
allowClear
|
||||
key={widget.id}
|
||||
onChange={(e): void => {
|
||||
setSearchTerm(e.target.value || '');
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<PanelWrapper
|
||||
queryResponse={response}
|
||||
widget={widget}
|
||||
@@ -226,6 +246,7 @@ function FullView({
|
||||
graphVisibility={graphsVisibilityStates}
|
||||
onDragSelect={onDragSelect}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
searchTerm={searchTerm}
|
||||
/>
|
||||
</GraphContainer>
|
||||
</div>
|
||||
|
||||
@@ -234,6 +234,8 @@ function WidgetGraphComponent({
|
||||
});
|
||||
};
|
||||
|
||||
const [searchTerm, setSearchTerm] = useState<string>('');
|
||||
|
||||
const loadingState =
|
||||
(queryResponse.isLoading || queryResponse.status === 'idle') &&
|
||||
widget.panelTypes !== PANEL_TYPES.LIST;
|
||||
@@ -317,6 +319,7 @@ function WidgetGraphComponent({
|
||||
isWarning={isWarning}
|
||||
isFetchingResponse={isFetchingResponse}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={setSearchTerm}
|
||||
/>
|
||||
</div>
|
||||
{queryResponse.isLoading && widget.panelTypes !== PANEL_TYPES.LIST && (
|
||||
@@ -337,6 +340,7 @@ function WidgetGraphComponent({
|
||||
onDragSelect={onDragSelect}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
customTooltipElement={customTooltipElement}
|
||||
searchTerm={searchTerm}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -11,6 +11,7 @@ import { isEqual } from 'lodash-es';
|
||||
import isEmpty from 'lodash-es/isEmpty';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { memo, useEffect, useRef, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -48,6 +49,7 @@ function GridCardGraph({
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const handleBackNavigation = (): void => {
|
||||
const searchParams = new URLSearchParams(window.location.search);
|
||||
@@ -136,6 +138,25 @@ function GridCardGraph({
|
||||
};
|
||||
});
|
||||
|
||||
// TODO [vikrantgupta25] remove this useEffect with refactor as this is prone to race condition
|
||||
// this is added to tackle the case of async communication between VariableItem.tsx and GridCard.tsx
|
||||
useEffect(() => {
|
||||
if (variablesToGetUpdated.length > 0) {
|
||||
queryClient.cancelQueries([
|
||||
maxTime,
|
||||
minTime,
|
||||
globalSelectedInterval,
|
||||
variables,
|
||||
widget?.query,
|
||||
widget?.panelTypes,
|
||||
widget.timePreferance,
|
||||
widget.fillSpans,
|
||||
requestData,
|
||||
]);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [variablesToGetUpdated]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isEqual(updatedQuery, requestData.query)) {
|
||||
setRequestData((prev) => ({
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
height: 30px;
|
||||
height: 36px;
|
||||
width: 100%;
|
||||
padding: 0.5rem;
|
||||
box-sizing: border-box;
|
||||
@@ -10,6 +10,22 @@
|
||||
font-weight: 600;
|
||||
|
||||
cursor: move;
|
||||
|
||||
.ant-input-group-addon {
|
||||
border: none;
|
||||
background-color: var(--bg-ink-500);
|
||||
}
|
||||
.search-header-icons {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
.widget-header-title-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.widget-header-title {
|
||||
@@ -19,6 +35,7 @@
|
||||
.widget-header-actions {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
.widget-header-more-options {
|
||||
visibility: hidden;
|
||||
@@ -30,6 +47,10 @@
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
.widget-header-more-options-visible {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.widget-header-hover {
|
||||
visibility: visible;
|
||||
}
|
||||
@@ -37,3 +58,25 @@
|
||||
.widget-api-actions {
|
||||
padding-right: 0.25rem;
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.widget-header-container {
|
||||
.ant-input-group-addon {
|
||||
background-color: inherit;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.long-tooltip {
|
||||
.ant-tooltip-content {
|
||||
max-height: 500px;
|
||||
overflow: auto;
|
||||
}
|
||||
&.ant-tooltip {
|
||||
max-width: 500px;
|
||||
}
|
||||
}
|
||||
|
||||
.info-tooltip {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
@@ -6,12 +6,13 @@ import {
|
||||
CopyOutlined,
|
||||
DeleteOutlined,
|
||||
EditFilled,
|
||||
ExclamationCircleOutlined,
|
||||
FullscreenOutlined,
|
||||
InfoCircleOutlined,
|
||||
MoreOutlined,
|
||||
SearchOutlined,
|
||||
WarningOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Dropdown, MenuProps, Tooltip, Typography } from 'antd';
|
||||
import { Dropdown, Input, MenuProps, Tooltip, Typography } from 'antd';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
@@ -20,8 +21,9 @@ import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import history from 'lib/history';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { CircleX, X } from 'lucide-react';
|
||||
import { unparse } from 'papaparse';
|
||||
import { ReactNode, useCallback, useMemo } from 'react';
|
||||
import { ReactNode, useCallback, useMemo, useState } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -51,6 +53,7 @@ interface IWidgetHeaderProps {
|
||||
isWarning: boolean;
|
||||
isFetchingResponse: boolean;
|
||||
tableProcessedDataRef: React.MutableRefObject<RowData[]>;
|
||||
setSearchTerm: React.Dispatch<React.SetStateAction<string>>;
|
||||
}
|
||||
|
||||
function WidgetHeader({
|
||||
@@ -67,6 +70,7 @@ function WidgetHeader({
|
||||
isWarning,
|
||||
isFetchingResponse,
|
||||
tableProcessedDataRef,
|
||||
setSearchTerm,
|
||||
}: IWidgetHeaderProps): JSX.Element | null {
|
||||
const onEditHandler = useCallback((): void => {
|
||||
const widgetId = widget.id;
|
||||
@@ -187,6 +191,10 @@ function WidgetHeader({
|
||||
|
||||
const updatedMenuList = useMemo(() => generateMenuList(actions), [actions]);
|
||||
|
||||
const [showGlobalSearch, setShowGlobalSearch] = useState(false);
|
||||
|
||||
const globalSearchAvailable = widget.panelTypes === PANEL_TYPES.TABLE;
|
||||
|
||||
const menu = useMemo(
|
||||
() => ({
|
||||
items: updatedMenuList,
|
||||
@@ -201,46 +209,92 @@ function WidgetHeader({
|
||||
|
||||
return (
|
||||
<div className="widget-header-container">
|
||||
<Typography.Text
|
||||
ellipsis
|
||||
data-testid={title}
|
||||
className="widget-header-title"
|
||||
>
|
||||
{title}
|
||||
</Typography.Text>
|
||||
<div className="widget-header-actions">
|
||||
<div className="widget-api-actions">{threshold}</div>
|
||||
{isFetchingResponse && !queryResponse.isError && (
|
||||
<Spinner style={{ paddingRight: '0.25rem' }} />
|
||||
)}
|
||||
{queryResponse.isError && (
|
||||
<Tooltip
|
||||
title={errorMessage}
|
||||
placement={errorTooltipPosition}
|
||||
className="widget-api-actions"
|
||||
>
|
||||
<ExclamationCircleOutlined />
|
||||
</Tooltip>
|
||||
)}
|
||||
{showGlobalSearch ? (
|
||||
<Input
|
||||
addonBefore={<SearchOutlined size={14} />}
|
||||
placeholder="Search..."
|
||||
bordered={false}
|
||||
data-testid="widget-header-search-input"
|
||||
autoFocus
|
||||
addonAfter={
|
||||
<X
|
||||
size={14}
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
setShowGlobalSearch(false);
|
||||
}}
|
||||
className="search-header-icons"
|
||||
/>
|
||||
}
|
||||
key={widget.id}
|
||||
onChange={(e): void => {
|
||||
setSearchTerm(e.target.value || '');
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<>
|
||||
<div className="widget-header-title-container">
|
||||
<Typography.Text
|
||||
ellipsis
|
||||
data-testid={title}
|
||||
className="widget-header-title"
|
||||
>
|
||||
{title}
|
||||
</Typography.Text>
|
||||
{widget.description && (
|
||||
<Tooltip
|
||||
title={widget.description}
|
||||
overlayClassName="long-tooltip"
|
||||
className="info-tooltip"
|
||||
placement="right"
|
||||
>
|
||||
<InfoCircleOutlined />
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
<div className="widget-header-actions">
|
||||
<div className="widget-api-actions">{threshold}</div>
|
||||
{isFetchingResponse && !queryResponse.isError && (
|
||||
<Spinner style={{ paddingRight: '0.25rem' }} />
|
||||
)}
|
||||
{queryResponse.isError && (
|
||||
<Tooltip
|
||||
title={errorMessage}
|
||||
placement={errorTooltipPosition}
|
||||
className="widget-api-actions"
|
||||
>
|
||||
<CircleX size={20} />
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{isWarning && (
|
||||
<Tooltip
|
||||
title={WARNING_MESSAGE}
|
||||
placement={errorTooltipPosition}
|
||||
className="widget-api-actions"
|
||||
>
|
||||
<WarningOutlined />
|
||||
</Tooltip>
|
||||
)}
|
||||
<Dropdown menu={menu} trigger={['hover']} placement="bottomRight">
|
||||
<MoreOutlined
|
||||
data-testid="widget-header-options"
|
||||
className={`widget-header-more-options ${
|
||||
parentHover ? 'widget-header-hover' : ''
|
||||
}`}
|
||||
/>
|
||||
</Dropdown>
|
||||
</div>
|
||||
{isWarning && (
|
||||
<Tooltip
|
||||
title={WARNING_MESSAGE}
|
||||
placement={errorTooltipPosition}
|
||||
className="widget-api-actions"
|
||||
>
|
||||
<WarningOutlined />
|
||||
</Tooltip>
|
||||
)}
|
||||
{globalSearchAvailable && (
|
||||
<SearchOutlined
|
||||
className="search-header-icons"
|
||||
onClick={(): void => setShowGlobalSearch(true)}
|
||||
data-testid="widget-header-search"
|
||||
/>
|
||||
)}
|
||||
<Dropdown menu={menu} trigger={['hover']} placement="bottomRight">
|
||||
<MoreOutlined
|
||||
data-testid="widget-header-options"
|
||||
className={`widget-header-more-options ${
|
||||
parentHover ? 'widget-header-hover' : ''
|
||||
} ${globalSearchAvailable ? 'widget-header-more-options-visible' : ''}`}
|
||||
/>
|
||||
</Dropdown>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -33,7 +33,14 @@ export const Card = styled(CardComponent)<CardProps>`
|
||||
}
|
||||
|
||||
.ant-card-body {
|
||||
height: calc(100% - 30px);
|
||||
${({ $panelType }): StyledCSS =>
|
||||
$panelType === PANEL_TYPES.TABLE
|
||||
? css`
|
||||
height: 100%;
|
||||
`
|
||||
: css`
|
||||
height: calc(100% - 30px);
|
||||
`}
|
||||
padding: 0;
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
.long-text-tooltip {
|
||||
max-width: 500px;
|
||||
max-height: 500px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
@@ -1,10 +1,13 @@
|
||||
import './GridTableComponent.styles.scss';
|
||||
|
||||
import { ExclamationCircleFilled } from '@ant-design/icons';
|
||||
import { Space, Tooltip } from 'antd';
|
||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||
import { Events } from 'constants/events';
|
||||
import { QueryTable } from 'container/QueryTable';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { cloneDeep, get, isEmpty, set } from 'lodash-es';
|
||||
import { cloneDeep, get, isEmpty } from 'lodash-es';
|
||||
import LineClampedText from 'periscope/components/LineClampedText/LineClampedText';
|
||||
import { memo, ReactNode, useCallback, useEffect, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { eventEmitter } from 'utils/getEventEmitter';
|
||||
@@ -38,15 +41,13 @@ function GridTableComponent({
|
||||
const createDataInCorrectFormat = useCallback(
|
||||
(dataSource: RowData[]): RowData[] =>
|
||||
dataSource.map((d) => {
|
||||
const finalObject = {};
|
||||
const finalObject: Record<string, number | string> = {};
|
||||
|
||||
// we use the order of the columns here to have similar download as the user view
|
||||
// the [] access for the object is used because the titles can contain dot(.) as well
|
||||
columns.forEach((k) => {
|
||||
set(
|
||||
finalObject,
|
||||
get(k, 'title', '') as string,
|
||||
get(d, get(k, 'dataIndex', ''), 'n/a'),
|
||||
);
|
||||
finalObject[`${get(k, 'title', '')}`] =
|
||||
d[`${get(k, 'dataIndex', '')}`] || 'n/a';
|
||||
});
|
||||
return finalObject as RowData;
|
||||
}),
|
||||
@@ -86,6 +87,7 @@ function GridTableComponent({
|
||||
applyColumnUnits,
|
||||
originalDataSource,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (tableProcessedDataRef) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
@@ -117,7 +119,16 @@ function GridTableComponent({
|
||||
}
|
||||
>
|
||||
<Space>
|
||||
{text}
|
||||
<LineClampedText
|
||||
text={text}
|
||||
lines={3}
|
||||
tooltipProps={{
|
||||
placement: 'right',
|
||||
autoAdjustOverflow: true,
|
||||
overlayClassName: 'long-text-tooltip',
|
||||
}}
|
||||
/>
|
||||
|
||||
{hasMultipleMatches && (
|
||||
<Tooltip title={t('this_value_satisfies_multiple_thresholds')}>
|
||||
<ExclamationCircleFilled className="value-graph-icon" />
|
||||
@@ -128,7 +139,19 @@ function GridTableComponent({
|
||||
);
|
||||
}
|
||||
}
|
||||
return <div>{text}</div>;
|
||||
return (
|
||||
<div>
|
||||
<LineClampedText
|
||||
text={text}
|
||||
lines={3}
|
||||
tooltipProps={{
|
||||
placement: 'right',
|
||||
autoAdjustOverflow: true,
|
||||
overlayClassName: 'long-text-tooltip',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
}));
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ export type GridTableComponentProps = {
|
||||
columnUnits?: ColumnUnit;
|
||||
tableProcessedDataRef?: React.MutableRefObject<RowData[]>;
|
||||
sticky?: TableProps<RowData>['sticky'];
|
||||
searchTerm?: string;
|
||||
} & Pick<LogsExplorerTableProps, 'data'> &
|
||||
Omit<TableProps<RowData>, 'columns' | 'dataSource'>;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/* eslint-disable react/display-name */
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Input, Typography } from 'antd';
|
||||
import { Flex, Input, Typography } from 'antd';
|
||||
import type { ColumnsType } from 'antd/es/table/interface';
|
||||
import saveAlertApi from 'api/alerts/save';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
@@ -34,12 +34,7 @@ import { GettableAlert } from 'types/api/alerts/get';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
|
||||
import DeleteAlert from './DeleteAlert';
|
||||
import {
|
||||
Button,
|
||||
ButtonContainer,
|
||||
ColumnButton,
|
||||
SearchContainer,
|
||||
} from './styles';
|
||||
import { Button, ColumnButton, SearchContainer } from './styles';
|
||||
import Status from './TableComponents/Status';
|
||||
import ToggleAlertState from './ToggleAlertState';
|
||||
import { alertActionLogEvent, filterAlerts } from './utils';
|
||||
@@ -373,21 +368,25 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
onChange={handleSearch}
|
||||
defaultValue={searchString}
|
||||
/>
|
||||
<ButtonContainer>
|
||||
<Flex gap={12}>
|
||||
{addNewAlert && (
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={onClickNewAlertHandler}
|
||||
icon={<PlusOutlined />}
|
||||
>
|
||||
New Alert
|
||||
</Button>
|
||||
)}
|
||||
<TextToolTip
|
||||
{...{
|
||||
text: `More details on how to create alerts`,
|
||||
url:
|
||||
'https://signoz.io/docs/alerts/?utm_source=product&utm_medium=list-alerts',
|
||||
urlText: 'Learn More',
|
||||
}}
|
||||
/>
|
||||
|
||||
{addNewAlert && (
|
||||
<Button onClick={onClickNewAlertHandler} icon={<PlusOutlined />}>
|
||||
New Alert
|
||||
</Button>
|
||||
)}
|
||||
</ButtonContainer>
|
||||
</Flex>
|
||||
</SearchContainer>
|
||||
<DynamicColumnTable
|
||||
tablesource={TableDataSource.Alert}
|
||||
|
||||
@@ -9,12 +9,6 @@ export const SearchContainer = styled.div`
|
||||
gap: 2rem;
|
||||
}
|
||||
`;
|
||||
export const ButtonContainer = styled.div`
|
||||
&&& {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
`;
|
||||
|
||||
export const Button = styled(ButtonComponent)`
|
||||
&&& {
|
||||
|
||||
@@ -64,9 +64,9 @@
|
||||
|
||||
.dashboard-icon {
|
||||
display: inline-block;
|
||||
margin-top: 4px;
|
||||
margin-right: 4px;
|
||||
line-height: 20px;
|
||||
height: 14px;
|
||||
width: 14px;
|
||||
}
|
||||
|
||||
.dot {
|
||||
@@ -75,6 +75,12 @@
|
||||
border-radius: 50%;
|
||||
}
|
||||
|
||||
.title-link {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.title {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-size: var(--font-size-sm);
|
||||
|
||||
@@ -91,6 +91,7 @@ function DashboardsList(): JSX.Element {
|
||||
const {
|
||||
data: dashboardListResponse,
|
||||
isLoading: isDashboardListLoading,
|
||||
isRefetching: isDashboardListRefetching,
|
||||
error: dashboardFetchError,
|
||||
refetch: refetchDashboardList,
|
||||
} = useGetAllDashboard();
|
||||
@@ -458,17 +459,19 @@ function DashboardsList(): JSX.Element {
|
||||
placement="left"
|
||||
overlayClassName="title-toolip"
|
||||
>
|
||||
<Typography.Text data-testid={`dashboard-title-${index}`}>
|
||||
<Link to={getLink()} className="title">
|
||||
<img
|
||||
src={dashboard?.image || Base64Icons[0]}
|
||||
style={{ height: '14px', width: '14px' }}
|
||||
alt="dashboard-image"
|
||||
className="dashboard-icon"
|
||||
/>
|
||||
<Link to={getLink()} className="title-link">
|
||||
<img
|
||||
src={dashboard?.image || Base64Icons[0]}
|
||||
alt="dashboard-image"
|
||||
className="dashboard-icon"
|
||||
/>
|
||||
<Typography.Text
|
||||
data-testid={`dashboard-title-${index}`}
|
||||
className="title"
|
||||
>
|
||||
{dashboard.name}
|
||||
</Link>
|
||||
</Typography.Text>
|
||||
</Typography.Text>
|
||||
</Link>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
@@ -703,7 +706,9 @@ function DashboardsList(): JSX.Element {
|
||||
</Flex>
|
||||
</div>
|
||||
|
||||
{isDashboardListLoading || isFilteringDashboards ? (
|
||||
{isDashboardListLoading ||
|
||||
isFilteringDashboards ||
|
||||
isDashboardListRefetching ? (
|
||||
<div className="loading-dashboard-details">
|
||||
<Skeleton.Input active size="large" className="skeleton-1" />
|
||||
<Skeleton.Input active size="large" className="skeleton-1" />
|
||||
@@ -902,7 +907,11 @@ function DashboardsList(): JSX.Element {
|
||||
columns={columns}
|
||||
dataSource={data}
|
||||
showSorterTooltip
|
||||
loading={isDashboardListLoading || isFilteringDashboards}
|
||||
loading={
|
||||
isDashboardListLoading ||
|
||||
isFilteringDashboards ||
|
||||
isDashboardListRefetching
|
||||
}
|
||||
showHeader={false}
|
||||
pagination={paginationConfig}
|
||||
/>
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
.empty-container {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.infra-metrics-container {
|
||||
.views-tabs {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
}
|
||||
|
||||
.infra-metrics-card {
|
||||
margin: 1rem 0;
|
||||
height: 300px;
|
||||
padding: 10px;
|
||||
|
||||
.ant-card-body {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.chart-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.no-data-container {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
import './InfraMetrics.styles.scss';
|
||||
|
||||
import { Empty, Radio } from 'antd';
|
||||
import { RadioChangeEvent } from 'antd/lib';
|
||||
import { History, Table } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
|
||||
import { VIEW_TYPES } from './constants';
|
||||
import NodeMetrics from './NodeMetrics';
|
||||
import PodMetrics from './PodMetrics';
|
||||
|
||||
interface MetricsDataProps {
|
||||
podName: string;
|
||||
nodeName: string;
|
||||
hostName: string;
|
||||
clusterName: string;
|
||||
logLineTimestamp: string;
|
||||
}
|
||||
|
||||
function InfraMetrics({
|
||||
podName,
|
||||
nodeName,
|
||||
hostName,
|
||||
clusterName,
|
||||
logLineTimestamp,
|
||||
}: MetricsDataProps): JSX.Element {
|
||||
const [selectedView, setSelectedView] = useState<string>(() =>
|
||||
podName ? VIEW_TYPES.POD : VIEW_TYPES.NODE,
|
||||
);
|
||||
|
||||
const handleModeChange = (e: RadioChangeEvent): void => {
|
||||
setSelectedView(e.target.value);
|
||||
};
|
||||
|
||||
if (!podName && !nodeName && !hostName) {
|
||||
return (
|
||||
<div className="empty-container">
|
||||
<Empty
|
||||
image={Empty.PRESENTED_IMAGE_SIMPLE}
|
||||
description="No data available. Please select a valid log line containing a pod, node, or host attributes to view metrics."
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="infra-metrics-container">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
onChange={handleModeChange}
|
||||
value={selectedView}
|
||||
>
|
||||
<Radio.Button
|
||||
className={selectedView === VIEW_TYPES.NODE ? 'selected_view tab' : 'tab'}
|
||||
value={VIEW_TYPES.NODE}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Table size={14} />
|
||||
Node
|
||||
</div>
|
||||
</Radio.Button>
|
||||
{podName && (
|
||||
<Radio.Button
|
||||
className={selectedView === VIEW_TYPES.POD ? 'selected_view tab' : 'tab'}
|
||||
value={VIEW_TYPES.POD}
|
||||
>
|
||||
<div className="view-title">
|
||||
<History size={14} />
|
||||
Pod
|
||||
</div>
|
||||
</Radio.Button>
|
||||
)}
|
||||
</Radio.Group>
|
||||
{/* TODO(Rahul): Make a common config driven component for this and other infra metrics components */}
|
||||
{selectedView === VIEW_TYPES.NODE && (
|
||||
<NodeMetrics
|
||||
nodeName={nodeName}
|
||||
clusterName={clusterName}
|
||||
hostName={hostName}
|
||||
logLineTimestamp={logLineTimestamp}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.POD && podName && (
|
||||
<PodMetrics
|
||||
podName={podName}
|
||||
clusterName={clusterName}
|
||||
logLineTimestamp={logLineTimestamp}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default InfraMetrics;
|
||||
@@ -0,0 +1,140 @@
|
||||
import { Card, Col, Row, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import dayjs from 'dayjs';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useMemo, useRef } from 'react';
|
||||
import { useQueries, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
import {
|
||||
getHostQueryPayload,
|
||||
getNodeQueryPayload,
|
||||
hostWidgetInfo,
|
||||
nodeWidgetInfo,
|
||||
} from './constants';
|
||||
|
||||
function NodeMetrics({
|
||||
nodeName,
|
||||
clusterName,
|
||||
hostName,
|
||||
logLineTimestamp,
|
||||
}: {
|
||||
nodeName: string;
|
||||
clusterName: string;
|
||||
hostName: string;
|
||||
logLineTimestamp: string;
|
||||
}): JSX.Element {
|
||||
const { start, end, verticalLineTimestamp } = useMemo(() => {
|
||||
const logTimestamp = dayjs(logLineTimestamp);
|
||||
const now = dayjs();
|
||||
const startTime = logTimestamp.subtract(3, 'hour');
|
||||
|
||||
const endTime = logTimestamp.add(3, 'hour').isBefore(now)
|
||||
? logTimestamp.add(3, 'hour')
|
||||
: now;
|
||||
|
||||
return {
|
||||
start: startTime.unix(),
|
||||
end: endTime.unix(),
|
||||
verticalLineTimestamp: logTimestamp.unix(),
|
||||
};
|
||||
}, [logLineTimestamp]);
|
||||
|
||||
const queryPayloads = useMemo(() => {
|
||||
if (nodeName) {
|
||||
return getNodeQueryPayload(clusterName, nodeName, start, end);
|
||||
}
|
||||
return getHostQueryPayload(hostName, start, end);
|
||||
}, [nodeName, hostName, clusterName, start, end]);
|
||||
|
||||
const widgetInfo = nodeName ? nodeWidgetInfo : hostWidgetInfo;
|
||||
const queries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: ['metrics', payload, ENTITY_VERSION_V4, 'NODE'],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
|
||||
const chartData = useMemo(
|
||||
() => queries.map(({ data }) => getUPlotChartData(data?.payload)),
|
||||
[queries],
|
||||
);
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
queries.map(({ data }, idx) =>
|
||||
getUPlotChartOptions({
|
||||
apiResponse: data?.payload,
|
||||
isDarkMode,
|
||||
dimensions,
|
||||
yAxisUnit: widgetInfo[idx].yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
minTimeScale: start,
|
||||
maxTimeScale: end,
|
||||
verticalLineTimestamp,
|
||||
}),
|
||||
),
|
||||
[
|
||||
queries,
|
||||
isDarkMode,
|
||||
dimensions,
|
||||
widgetInfo,
|
||||
start,
|
||||
verticalLineTimestamp,
|
||||
end,
|
||||
],
|
||||
);
|
||||
|
||||
const renderCardContent = (
|
||||
query: UseQueryResult<SuccessResponse<MetricRangePayloadProps>, unknown>,
|
||||
idx: number,
|
||||
): JSX.Element => {
|
||||
if (query.isLoading) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
if (query.error) {
|
||||
const errorMessage =
|
||||
(query.error as Error)?.message || 'Something went wrong';
|
||||
return <div>{errorMessage}</div>;
|
||||
}
|
||||
return (
|
||||
<div
|
||||
className={cx('chart-container', {
|
||||
'no-data-container':
|
||||
!query.isLoading && !query?.data?.payload?.data?.result?.length,
|
||||
})}
|
||||
>
|
||||
<Uplot options={options[idx]} data={chartData[idx]} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
return (
|
||||
<Row gutter={24}>
|
||||
{queries.map((query, idx) => (
|
||||
<Col span={12} key={widgetInfo[idx].title}>
|
||||
<Typography.Text>{widgetInfo[idx].title}</Typography.Text>
|
||||
<Card bordered className="infra-metrics-card" ref={graphRef}>
|
||||
{renderCardContent(query, idx)}
|
||||
</Card>
|
||||
</Col>
|
||||
))}
|
||||
</Row>
|
||||
);
|
||||
}
|
||||
|
||||
export default NodeMetrics;
|
||||
@@ -0,0 +1,121 @@
|
||||
import { Card, Col, Row, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import dayjs from 'dayjs';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useMemo, useRef } from 'react';
|
||||
import { useQueries, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
import { getPodQueryPayload, podWidgetInfo } from './constants';
|
||||
|
||||
function PodMetrics({
|
||||
podName,
|
||||
clusterName,
|
||||
logLineTimestamp,
|
||||
}: {
|
||||
podName: string;
|
||||
clusterName: string;
|
||||
logLineTimestamp: string;
|
||||
}): JSX.Element {
|
||||
const { start, end, verticalLineTimestamp } = useMemo(() => {
|
||||
const logTimestamp = dayjs(logLineTimestamp);
|
||||
const now = dayjs();
|
||||
const startTime = logTimestamp.subtract(3, 'hour');
|
||||
|
||||
const endTime = logTimestamp.add(3, 'hour').isBefore(now)
|
||||
? logTimestamp.add(3, 'hour')
|
||||
: now;
|
||||
|
||||
return {
|
||||
start: startTime.unix(),
|
||||
end: endTime.unix(),
|
||||
verticalLineTimestamp: logTimestamp.unix(),
|
||||
};
|
||||
}, [logLineTimestamp]);
|
||||
const queryPayloads = useMemo(
|
||||
() => getPodQueryPayload(clusterName, podName, start, end),
|
||||
[clusterName, end, podName, start],
|
||||
);
|
||||
const queries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: ['metrics', payload, ENTITY_VERSION_V4, 'POD'],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
|
||||
const chartData = useMemo(
|
||||
() => queries.map(({ data }) => getUPlotChartData(data?.payload)),
|
||||
[queries],
|
||||
);
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
queries.map(({ data }, idx) =>
|
||||
getUPlotChartOptions({
|
||||
apiResponse: data?.payload,
|
||||
isDarkMode,
|
||||
dimensions,
|
||||
yAxisUnit: podWidgetInfo[idx].yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
minTimeScale: start,
|
||||
maxTimeScale: end,
|
||||
verticalLineTimestamp,
|
||||
}),
|
||||
),
|
||||
[queries, isDarkMode, dimensions, start, verticalLineTimestamp, end],
|
||||
);
|
||||
|
||||
const renderCardContent = (
|
||||
query: UseQueryResult<SuccessResponse<MetricRangePayloadProps>, unknown>,
|
||||
idx: number,
|
||||
): JSX.Element => {
|
||||
if (query.isLoading) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
if (query.error) {
|
||||
const errorMessage =
|
||||
(query.error as Error)?.message || 'Something went wrong';
|
||||
return <div>{errorMessage}</div>;
|
||||
}
|
||||
return (
|
||||
<div
|
||||
className={cx('chart-container', {
|
||||
'no-data-container':
|
||||
!query.isLoading && !query?.data?.payload?.data?.result?.length,
|
||||
})}
|
||||
>
|
||||
<Uplot options={options[idx]} data={chartData[idx]} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<Row gutter={24}>
|
||||
{queries.map((query, idx) => (
|
||||
<Col span={12} key={podWidgetInfo[idx].title}>
|
||||
<Typography.Text>{podWidgetInfo[idx].title}</Typography.Text>
|
||||
<Card bordered className="infra-metrics-card" ref={graphRef}>
|
||||
{renderCardContent(query, idx)}
|
||||
</Card>
|
||||
</Col>
|
||||
))}
|
||||
</Row>
|
||||
);
|
||||
}
|
||||
|
||||
export default PodMetrics;
|
||||
3033
frontend/src/container/LogDetailedView/InfraMetrics/constants.ts
Normal file
3033
frontend/src/container/LogDetailedView/InfraMetrics/constants.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -122,10 +122,10 @@ function TableView({
|
||||
fieldValue: string,
|
||||
) => (): void => {
|
||||
handleClick(operator, fieldKey, fieldValue);
|
||||
if (operator === OPERATORS.IN) {
|
||||
if (operator === OPERATORS['=']) {
|
||||
setIsFilterInLoading(true);
|
||||
}
|
||||
if (operator === OPERATORS.NIN) {
|
||||
if (operator === OPERATORS['!=']) {
|
||||
setIsFilterOutLoading(true);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -139,7 +139,7 @@ export function TableViewActions(
|
||||
<ArrowDownToDot size={14} style={{ transform: 'rotate(90deg)' }} />
|
||||
)
|
||||
}
|
||||
onClick={onClickHandler(OPERATORS.IN, fieldFilterKey, fieldData.value)}
|
||||
onClick={onClickHandler(OPERATORS['='], fieldFilterKey, fieldData.value)}
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip title="Filter out value">
|
||||
@@ -152,7 +152,11 @@ export function TableViewActions(
|
||||
<ArrowUpFromDot size={14} style={{ transform: 'rotate(90deg)' }} />
|
||||
)
|
||||
}
|
||||
onClick={onClickHandler(OPERATORS.NIN, fieldFilterKey, fieldData.value)}
|
||||
onClick={onClickHandler(
|
||||
OPERATORS['!='],
|
||||
fieldFilterKey,
|
||||
fieldData.value,
|
||||
)}
|
||||
/>
|
||||
</Tooltip>
|
||||
{!isOldLogsExplorerOrLiveLogsPage && (
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { getOldLogsOperatorFromNew } from 'hooks/logs/useActiveLog';
|
||||
import { getGeneratedFilterQueryString } from 'lib/getGeneratedFilterQueryString';
|
||||
import getStep from 'lib/getStep';
|
||||
import { getIdConditions } from 'pages/Logs/utils';
|
||||
@@ -57,10 +58,11 @@ function LogDetailedView({
|
||||
|
||||
const handleAddToQuery = useCallback(
|
||||
(fieldKey: string, fieldValue: string, operator: string) => {
|
||||
const newOperator = getOldLogsOperatorFromNew(operator);
|
||||
const updatedQueryString = getGeneratedFilterQueryString(
|
||||
fieldKey,
|
||||
fieldValue,
|
||||
operator,
|
||||
newOperator,
|
||||
queryString,
|
||||
);
|
||||
|
||||
@@ -71,10 +73,11 @@ function LogDetailedView({
|
||||
|
||||
const handleClickActionItem = useCallback(
|
||||
(fieldKey: string, fieldValue: string, operator: string): void => {
|
||||
const newOperator = getOldLogsOperatorFromNew(operator);
|
||||
const updatedQueryString = getGeneratedFilterQueryString(
|
||||
fieldKey,
|
||||
fieldValue,
|
||||
operator,
|
||||
newOperator,
|
||||
queryString,
|
||||
);
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import './LogsExplorerQuerySection.styles.scss';
|
||||
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import {
|
||||
initialQueriesMap,
|
||||
OPERATORS,
|
||||
@@ -9,14 +8,12 @@ import {
|
||||
import ExplorerOrderBy from 'container/ExplorerOrderBy';
|
||||
import { QueryBuilder } from 'container/QueryBuilder';
|
||||
import { OrderByFilterProps } from 'container/QueryBuilder/filters/OrderByFilter/OrderByFilter.interfaces';
|
||||
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces';
|
||||
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import useFeatureFlags from 'hooks/useFeatureFlag';
|
||||
import {
|
||||
prepareQueryWithDefaultTimestamp,
|
||||
SELECTED_VIEWS,
|
||||
@@ -89,26 +86,15 @@ function LogExplorerQuerySection({
|
||||
[handleChangeQueryData],
|
||||
);
|
||||
|
||||
const isSearchV2Enabled =
|
||||
useFeatureFlags(FeatureKeys.QUERY_BUILDER_SEARCH_V2)?.active || false;
|
||||
|
||||
return (
|
||||
<>
|
||||
{selectedView === SELECTED_VIEWS.SEARCH && (
|
||||
<div className="qb-search-view-container">
|
||||
{isSearchV2Enabled ? (
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={handleChangeTagFilters}
|
||||
whereClauseConfig={filterConfigs?.filters}
|
||||
/>
|
||||
) : (
|
||||
<QueryBuilderSearch
|
||||
query={query}
|
||||
onChange={handleChangeTagFilters}
|
||||
whereClauseConfig={filterConfigs?.filters}
|
||||
/>
|
||||
)}
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={handleChangeTagFilters}
|
||||
whereClauseConfig={filterConfigs?.filters}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import { QueryData } from 'types/api/widgets/getQuery';
|
||||
export type LogsExplorerChartProps = {
|
||||
data: QueryData[];
|
||||
isLoading: boolean;
|
||||
isLogsExplorerViews?: boolean;
|
||||
isLabelEnabled?: boolean;
|
||||
className?: string;
|
||||
};
|
||||
|
||||
@@ -16,12 +16,14 @@ import { UpdateTimeInterval } from 'store/actions';
|
||||
|
||||
import { LogsExplorerChartProps } from './LogsExplorerChart.interfaces';
|
||||
import { CardStyled } from './LogsExplorerChart.styled';
|
||||
import { getColorsForSeverityLabels } from './utils';
|
||||
|
||||
function LogsExplorerChart({
|
||||
data,
|
||||
isLoading,
|
||||
isLabelEnabled = true,
|
||||
className,
|
||||
isLogsExplorerViews = false,
|
||||
}: LogsExplorerChartProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
const urlQuery = useUrlQuery();
|
||||
@@ -29,15 +31,19 @@ function LogsExplorerChart({
|
||||
const handleCreateDatasets: Required<GetChartDataProps>['createDataset'] = useCallback(
|
||||
(element, index, allLabels) => ({
|
||||
data: element,
|
||||
backgroundColor: colors[index % colors.length] || themeColors.red,
|
||||
borderColor: colors[index % colors.length] || themeColors.red,
|
||||
backgroundColor: isLogsExplorerViews
|
||||
? getColorsForSeverityLabels(allLabels[index], index)
|
||||
: colors[index % colors.length] || themeColors.red,
|
||||
borderColor: isLogsExplorerViews
|
||||
? getColorsForSeverityLabels(allLabels[index], index)
|
||||
: colors[index % colors.length] || themeColors.red,
|
||||
...(isLabelEnabled
|
||||
? {
|
||||
label: allLabels[index],
|
||||
}
|
||||
: {}),
|
||||
}),
|
||||
[isLabelEnabled],
|
||||
[isLabelEnabled, isLogsExplorerViews],
|
||||
);
|
||||
|
||||
const onDragSelect = useCallback(
|
||||
@@ -112,6 +118,7 @@ function LogsExplorerChart({
|
||||
<Graph
|
||||
name="logsExplorerChart"
|
||||
data={graphData.data}
|
||||
isStacked={isLogsExplorerViews}
|
||||
type="bar"
|
||||
animate
|
||||
onDragSelect={onDragSelect}
|
||||
|
||||
39
frontend/src/container/LogsExplorerChart/utils.ts
Normal file
39
frontend/src/container/LogsExplorerChart/utils.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { colors } from 'lib/getRandomColor';
|
||||
|
||||
export function getColorsForSeverityLabels(
|
||||
label: string,
|
||||
index: number,
|
||||
): string {
|
||||
const lowerCaseLabel = label.toLowerCase();
|
||||
|
||||
if (lowerCaseLabel.includes(`{severity_text="trace"}`)) {
|
||||
return Color.BG_FOREST_400;
|
||||
}
|
||||
|
||||
if (lowerCaseLabel.includes(`{severity_text="debug"}`)) {
|
||||
return Color.BG_AQUA_500;
|
||||
}
|
||||
|
||||
if (
|
||||
lowerCaseLabel.includes(`{severity_text="info"}`) ||
|
||||
lowerCaseLabel.includes(`{severity_text=""}`)
|
||||
) {
|
||||
return Color.BG_ROBIN_500;
|
||||
}
|
||||
|
||||
if (lowerCaseLabel.includes(`{severity_text="warn"}`)) {
|
||||
return Color.BG_AMBER_500;
|
||||
}
|
||||
|
||||
if (lowerCaseLabel.includes(`{severity_text="error"}`)) {
|
||||
return Color.BG_CHERRY_500;
|
||||
}
|
||||
|
||||
if (lowerCaseLabel.includes(`{severity_text="fatal"}`)) {
|
||||
return Color.BG_SAKURA_500;
|
||||
}
|
||||
|
||||
return colors[index % colors.length] || themeColors.red;
|
||||
}
|
||||
@@ -147,6 +147,13 @@
|
||||
}
|
||||
|
||||
.logs-histogram {
|
||||
.ant-card-body {
|
||||
height: 140px;
|
||||
min-height: 140px;
|
||||
padding: 0 16px 22px 16px;
|
||||
font-family: 'Geist Mono';
|
||||
}
|
||||
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,6 +64,7 @@ import { useHistory } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
OrderByPayload,
|
||||
@@ -132,6 +133,9 @@ function LogsExplorerViews({
|
||||
// State
|
||||
const [page, setPage] = useState<number>(1);
|
||||
const [logs, setLogs] = useState<ILog[]>([]);
|
||||
const [lastLogLineTimestamp, setLastLogLineTimestamp] = useState<
|
||||
number | string | null
|
||||
>();
|
||||
const [requestData, setRequestData] = useState<Query | null>(null);
|
||||
const [showFormatMenuItems, setShowFormatMenuItems] = useState(false);
|
||||
const [queryId, setQueryId] = useState<string>(v4());
|
||||
@@ -188,6 +192,16 @@ function LogsExplorerViews({
|
||||
const modifiedQueryData: IBuilderQuery = {
|
||||
...listQuery,
|
||||
aggregateOperator: LogsAggregatorOperator.COUNT,
|
||||
groupBy: [
|
||||
{
|
||||
key: 'severity_text',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'severity_text--string----true',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const modifiedQuery: Query = {
|
||||
@@ -259,6 +273,14 @@ function LogsExplorerViews({
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
}),
|
||||
// send the lastLogTimeStamp only when the panel type is list and the orderBy is timestamp and the order is desc
|
||||
lastLogLineTimestamp:
|
||||
panelType === PANEL_TYPES.LIST &&
|
||||
requestData?.builder?.queryData?.[0]?.orderBy?.[0]?.columnName ===
|
||||
'timestamp' &&
|
||||
requestData?.builder?.queryData?.[0]?.orderBy?.[0]?.order === 'desc'
|
||||
? lastLogLineTimestamp
|
||||
: undefined,
|
||||
},
|
||||
undefined,
|
||||
listQueryKeyRef,
|
||||
@@ -336,6 +358,10 @@ function LogsExplorerViews({
|
||||
pageSize: nextPageSize,
|
||||
});
|
||||
|
||||
// initialise the last log timestamp to null as we don't have the logs.
|
||||
// as soon as we scroll to the end of the logs we set the lastLogLineTimestamp to the last log timestamp.
|
||||
setLastLogLineTimestamp(lastLog.timestamp);
|
||||
|
||||
setPage((prevPage) => prevPage + 1);
|
||||
|
||||
setRequestData(newRequestData);
|
||||
@@ -528,6 +554,11 @@ function LogsExplorerViews({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [data]);
|
||||
|
||||
useEffect(() => {
|
||||
// clear the lastLogLineTimestamp when the data changes
|
||||
setLastLogLineTimestamp(null);
|
||||
}, [data]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
requestData?.id !== stagedQuery?.id ||
|
||||
@@ -661,6 +692,7 @@ function LogsExplorerViews({
|
||||
className="logs-histogram"
|
||||
isLoading={isFetchingListChartData || isLoadingListChartData}
|
||||
data={chartData}
|
||||
isLogsExplorerViews={panelType === PANEL_TYPES.LIST}
|
||||
/>
|
||||
)}
|
||||
|
||||
|
||||
@@ -28,6 +28,20 @@ const lodsQueryServerRequest = (): void =>
|
||||
),
|
||||
);
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
// mocking the graph components in this test as this should be handled separately
|
||||
jest.mock(
|
||||
'container/TimeSeriesView/TimeSeriesView',
|
||||
|
||||
@@ -63,6 +63,7 @@
|
||||
height: 40px;
|
||||
justify-content: end;
|
||||
padding: 0 8px;
|
||||
margin: 12px 0 2px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,10 +14,12 @@ import {
|
||||
resourceAttributesToTagFilterItems,
|
||||
} from 'hooks/useResourceAttribute/utils';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import getStep from 'lib/getStep';
|
||||
import history from 'lib/history';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { useLocation, useParams } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
@@ -123,6 +125,16 @@ function DBCall(): JSX.Element {
|
||||
[servicename, tagFilterItems],
|
||||
);
|
||||
|
||||
const stepInterval = useMemo(
|
||||
() =>
|
||||
getStep({
|
||||
end: store.getState().globalTime.maxTime,
|
||||
inputFormat: 'ns',
|
||||
start: store.getState().globalTime.minTime,
|
||||
}),
|
||||
[],
|
||||
);
|
||||
|
||||
const logEventCalledRef = useRef(false);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -158,6 +170,7 @@ function DBCall(): JSX.Element {
|
||||
selectedTraceTags,
|
||||
timestamp: selectedTimeStamp,
|
||||
apmToTraceQuery,
|
||||
stepInterval,
|
||||
})}
|
||||
>
|
||||
View Traces
|
||||
@@ -192,6 +205,7 @@ function DBCall(): JSX.Element {
|
||||
selectedTraceTags,
|
||||
timestamp: selectedTimeStamp,
|
||||
apmToTraceQuery,
|
||||
stepInterval,
|
||||
})}
|
||||
>
|
||||
View Traces
|
||||
|
||||
@@ -16,10 +16,12 @@ import {
|
||||
resourceAttributesToTagFilterItems,
|
||||
} from 'hooks/useResourceAttribute/utils';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import getStep from 'lib/getStep';
|
||||
import history from 'lib/history';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { useLocation, useParams } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
@@ -141,6 +143,15 @@ function External(): JSX.Element {
|
||||
],
|
||||
});
|
||||
|
||||
const stepInterval = useMemo(
|
||||
() =>
|
||||
getStep({
|
||||
end: store.getState().globalTime.maxTime,
|
||||
inputFormat: 'ns',
|
||||
start: store.getState().globalTime.minTime,
|
||||
}),
|
||||
[],
|
||||
);
|
||||
const logEventCalledRef = useRef(false);
|
||||
useEffect(() => {
|
||||
if (!logEventCalledRef.current) {
|
||||
@@ -222,6 +233,7 @@ function External(): JSX.Element {
|
||||
selectedTraceTags,
|
||||
timestamp: selectedTimeStamp,
|
||||
apmToTraceQuery: errorApmToTraceQuery,
|
||||
stepInterval,
|
||||
})}
|
||||
>
|
||||
View Traces
|
||||
@@ -257,6 +269,7 @@ function External(): JSX.Element {
|
||||
selectedTraceTags,
|
||||
timestamp: selectedTimeStamp,
|
||||
apmToTraceQuery,
|
||||
stepInterval,
|
||||
})}
|
||||
>
|
||||
View Traces
|
||||
@@ -295,6 +308,7 @@ function External(): JSX.Element {
|
||||
selectedTraceTags,
|
||||
timestamp: selectedTimeStamp,
|
||||
apmToTraceQuery,
|
||||
stepInterval,
|
||||
})}
|
||||
>
|
||||
View Traces
|
||||
@@ -330,6 +344,7 @@ function External(): JSX.Element {
|
||||
selectedTraceTags,
|
||||
timestamp: selectedTimeStamp,
|
||||
apmToTraceQuery,
|
||||
stepInterval,
|
||||
})}
|
||||
>
|
||||
View Traces
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user