Compare commits
169 Commits
v0.55.0-cl
...
fix/remove
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bae443904e | ||
|
|
9419f56e95 | ||
|
|
347868c18b | ||
|
|
17e20e7f41 | ||
|
|
2b0da82f94 | ||
|
|
911362cecf | ||
|
|
481f9620d3 | ||
|
|
e5be431f18 | ||
|
|
503ed45a99 | ||
|
|
28818fbaac | ||
|
|
c0e40614bf | ||
|
|
2d732ae4a9 | ||
|
|
8466e31e02 | ||
|
|
efdaf7ee43 | ||
|
|
0dec94a5c6 | ||
|
|
204728ff60 | ||
|
|
e51f4d986d | ||
|
|
337a941d0d | ||
|
|
fc4b55cb34 | ||
|
|
96cb8053df | ||
|
|
5651d69485 | ||
|
|
a6e492880d | ||
|
|
80b3c3e256 | ||
|
|
0806420dd7 | ||
|
|
18e240e3d1 | ||
|
|
d0965a24c5 | ||
|
|
7ed689693f | ||
|
|
90ae55264a | ||
|
|
bf4c792cdb | ||
|
|
dd097821d1 | ||
|
|
701b8803ac | ||
|
|
2728ddd255 | ||
|
|
5187ed58a0 | ||
|
|
2180118094 | ||
|
|
ecae842fa1 | ||
|
|
291b3ba357 | ||
|
|
78d1e19e60 | ||
|
|
fa9e89bfe7 | ||
|
|
16f49a1d25 | ||
|
|
c95c0f9a15 | ||
|
|
5588c7dd3f | ||
|
|
679b5db5a2 | ||
|
|
64feff3539 | ||
|
|
1720d616f6 | ||
|
|
155a2ea557 | ||
|
|
d5c38ed0a4 | ||
|
|
b70d50f2b3 | ||
|
|
728f699051 | ||
|
|
3bbbc759d3 | ||
|
|
2230ca1740 | ||
|
|
440fd4e02b | ||
|
|
78a924d378 | ||
|
|
b03fadc2ec | ||
|
|
4b79d3b785 | ||
|
|
a24fb5d84f | ||
|
|
137059ded6 | ||
|
|
f1ce82ac25 | ||
|
|
4aeed392d7 | ||
|
|
4356ddae8c | ||
|
|
76e7de3aed | ||
|
|
ae5e63cc64 | ||
|
|
5ef05891ce | ||
|
|
c452e23b18 | ||
|
|
69aab87d72 | ||
|
|
a60674cf1b | ||
|
|
022b9226a7 | ||
|
|
36e2404814 | ||
|
|
2eb3f6cb06 | ||
|
|
98cbdf570f | ||
|
|
d380894c35 | ||
|
|
ea0263cc73 | ||
|
|
f38a1d9f1c | ||
|
|
9390a815a8 | ||
|
|
4f76e13dbe | ||
|
|
6a4643558c | ||
|
|
a98c8db949 | ||
|
|
5ba9c9d48c | ||
|
|
e1ca71dcea | ||
|
|
266ed58908 | ||
|
|
1411ae41c3 | ||
|
|
bc8891d2f8 | ||
|
|
3b7455ac4c | ||
|
|
5a0a7c2c60 | ||
|
|
794d6fc0ca | ||
|
|
4c95df44d5 | ||
|
|
717545e14c | ||
|
|
e4d1452f5f | ||
|
|
88ace79a64 | ||
|
|
9b42326f80 | ||
|
|
44a3469b9b | ||
|
|
ef4b70f67b | ||
|
|
7a125e31ec | ||
|
|
c7bd7566c5 | ||
|
|
f4fbe62169 | ||
|
|
6e3141a4ce | ||
|
|
fc8391c5aa | ||
|
|
87499d1ead | ||
|
|
5fa8686fcf | ||
|
|
dc2db524c7 | ||
|
|
b3545b767a | ||
|
|
55f653d92e | ||
|
|
35f8e133a9 | ||
|
|
58d6487f77 | ||
|
|
6685482ea6 | ||
|
|
708158f50f | ||
|
|
0feab5aa93 | ||
|
|
b49ed913c7 | ||
|
|
419d2da363 | ||
|
|
df2844ea74 | ||
|
|
5e5f0f167f | ||
|
|
a6b05f0a3d | ||
|
|
f69aaa2cfb | ||
|
|
3866f89d3e | ||
|
|
f9ac41b865 | ||
|
|
c5b5bfe540 | ||
|
|
f3c01a5155 | ||
|
|
033b64a62a | ||
|
|
4aabfe7cf5 | ||
|
|
0218f701b2 | ||
|
|
f6d3f95768 | ||
|
|
cb1cd3555b | ||
|
|
ced72f86a4 | ||
|
|
54d5666b92 | ||
|
|
4edc6dbeae | ||
|
|
e203276678 | ||
|
|
8eb2cf144e | ||
|
|
2f7d208eb5 | ||
|
|
70fb5af19f | ||
|
|
fc7a94fa66 | ||
|
|
0077714cb0 | ||
|
|
723c31f6c5 | ||
|
|
1024483e58 | ||
|
|
cbcef2c880 | ||
|
|
0711c8855e | ||
|
|
72cbc1a9e7 | ||
|
|
a9841755a7 | ||
|
|
03e6c33f82 | ||
|
|
3c5aa86ee2 | ||
|
|
06a89b21da | ||
|
|
8c891f0e87 | ||
|
|
49dd5f2ef7 | ||
|
|
83d01e7a0d | ||
|
|
f8e97c9c5c | ||
|
|
b78ade2cf2 | ||
|
|
1b59719891 | ||
|
|
540a2c6712 | ||
|
|
08f3b089f4 | ||
|
|
1d8e5b6c0f | ||
|
|
0dcded59e5 | ||
|
|
bfb63ca8c4 | ||
|
|
71e24483dd | ||
|
|
317c41a166 | ||
|
|
ed4613cb1b | ||
|
|
6c06fea1aa | ||
|
|
6bc2f9125c | ||
|
|
262beef8f9 | ||
|
|
43cc6dea92 | ||
|
|
6684640abe | ||
|
|
0a146910d6 | ||
|
|
690ed0f7f1 | ||
|
|
5bcf7de440 | ||
|
|
703983a5f9 | ||
|
|
766a2123c5 | ||
|
|
a476c68f7e | ||
|
|
fc15aa6f1c | ||
|
|
4192fd573d | ||
|
|
ca13d80205 | ||
|
|
8d84ce8f06 | ||
|
|
09ea7b9eb5 |
49
.github/ISSUE_TEMPLATE/request_dashboard.md
vendored
Normal file
49
.github/ISSUE_TEMPLATE/request_dashboard.md
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
name: Request Dashboard
|
||||
about: Request a new dashboard for the SigNoz Dashboards repository
|
||||
title: '[Dashboard Request] '
|
||||
labels: 'dashboard-template'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Use this template to request a new dashboard for the SigNoz Dashboards repository. Providing detailed information will help us understand your needs better and speed up the dashboard creation process. -->
|
||||
|
||||
## Dashboard Name
|
||||
|
||||
<!-- Provide the name for the requested dashboard. Be specific (e.g., "MySQL Monitoring Dashboard"). -->
|
||||
|
||||
## Expected Dashboard Sections and Panels
|
||||
|
||||
(Can be tweaked (add or remove panels/sections) according to available metrics)
|
||||
|
||||
### Section Name
|
||||
|
||||
<!-- Brief description of what this section should display (e.g., "Resource usage metrics for MySQL database"). -->
|
||||
|
||||
### Panel Name
|
||||
|
||||
<!-- Description of the panel (e.g., "Displays current CPU usage, memory usage, etc."). -->
|
||||
|
||||
<!-- - **Example:**
|
||||
- **Section**: Resource Metrics
|
||||
- **Panel**: CPU Usage - Displays the current CPU usage across all database instances.
|
||||
- **Panel**: Memory Usage - Displays the total memory used by the MySQL process. -->
|
||||
|
||||
<!-- Repeat this format for any additional sections or panels. -->
|
||||
|
||||
## Expected Dashboard Variables
|
||||
|
||||
<!-- List any dashboard variables that should be included in the dashboard. Examples could be `deployment.environment`, `hostname`, `region`, etc. -->
|
||||
|
||||
## Additional Comments or Requirements
|
||||
|
||||
<!-- Include any other details, special requirements, or specific visualizations you'd like to request for this dashboard. -->
|
||||
|
||||
## References or Screenshots
|
||||
|
||||
<!-- Add any references or screenshots of requested dashboard if available. -->
|
||||
|
||||
## 📋 Notes
|
||||
|
||||
Please review the [CONTRIBUTING.md](https://github.com/SigNoz/dashboards/blob/main/CONTRIBUTING.md) for guidelines on dashboard structure, naming conventions, and how to submit a pull request.
|
||||
@@ -30,6 +30,7 @@ Also, have a look at these [good first issues label](https://github.com/SigNoz/s
|
||||
- [To run ClickHouse setup](#41-to-run-clickhouse-setup-recommended-for-local-development)
|
||||
- [Contribute to SigNoz Helm Chart](#5-contribute-to-signoz-helm-chart-)
|
||||
- [To run helm chart for local development](#51-to-run-helm-chart-for-local-development)
|
||||
- [Contribute to Dashboards](#6-contribute-to-dashboards-)
|
||||
- [Other Ways to Contribute](#other-ways-to-contribute)
|
||||
|
||||
# 1. General Instructions 📝
|
||||
@@ -37,7 +38,7 @@ Also, have a look at these [good first issues label](https://github.com/SigNoz/s
|
||||
## 1.1 For Creating Issue(s)
|
||||
Before making any significant changes and before filing a new issue, please check [existing open](https://github.com/SigNoz/signoz/issues?q=is%3Aopen+is%3Aissue), or [recently closed](https://github.com/SigNoz/signoz/issues?q=is%3Aissue+is%3Aclosed) issues to make sure somebody else hasn't already reported the issue. Please try to include as much information as you can.
|
||||
|
||||
**Issue Types** - [Bug Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=) | [Feature Request](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=) | [Performance Issue Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=performance-issue-report.md&title=) | [Report a Security Vulnerability](https://github.com/SigNoz/signoz/security/policy)
|
||||
**Issue Types** - [Bug Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=) | [Feature Request](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=) | [Performance Issue Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=performance-issue-report.md&title=) | [Request Dashboard](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=dashboard-template&projects=&template=request_dashboard.md&title=%5BDashboard+Request%5D+) | [Report a Security Vulnerability](https://github.com/SigNoz/signoz/security/policy)
|
||||
|
||||
#### Details like these are incredibly useful:
|
||||
|
||||
@@ -56,7 +57,7 @@ Before making any significant changes and before filing a new issue, please chec
|
||||
Discussing your proposed changes ahead of time will make the contribution
|
||||
process smooth for everyone 🙌.
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -97,13 +98,14 @@ GitHub provides additional document on [forking a repository](https://help.githu
|
||||
stability and quality of the component.
|
||||
|
||||
|
||||
You can always reach out to `ankit@signoz.io` to understand more about the repo and product. We are very responsive over email and [SLACK](https://signoz.io/slack).
|
||||
You can always reach out to `ankit@signoz.io` to understand more about the repo and product. We are very responsive over email and [slack community](https://signoz.io/slack).
|
||||
|
||||
### Pointers:
|
||||
- If you find any **bugs** → please create an [**issue.**](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=)
|
||||
- If you find anything **missing** in documentation → you can create an issue with the label **`documentation`**.
|
||||
- If you want to build any **new feature** → please create an [issue with the label **`enhancement`**.](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=)
|
||||
- If you want to **discuss** something about the product, start a new [**discussion**.](https://github.com/SigNoz/signoz/discussions)
|
||||
- If you want to request a new **dashboard template** → please create an issue [here](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=dashboard-template&projects=&template=request_dashboard.md&title=%5BDashboard+Request%5D+).
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -117,7 +119,7 @@ e.g. If you are submitting a fix for an issue in frontend, the PR name should be
|
||||
|
||||
- Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :)
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -127,14 +129,13 @@ e.g. If you are submitting a fix for an issue in frontend, the PR name should be
|
||||
|
||||
- [**Frontend**](#3-develop-frontend-) (Written in Typescript, React)
|
||||
- [**Backend**](#4-contribute-to-backend-query-service-) (Query Service, written in Go)
|
||||
- [**Dashboard Templates**](#6-contribute-to-dashboards-) (JSON dashboard templates built with SigNoz)
|
||||
|
||||
Depending upon your area of expertise & interest, you can choose one or more to contribute. Below are detailed instructions to contribute in each area.
|
||||
|
||||
**Please note:** If you want to work on an issue, please ask the maintainers to assign the issue to you before starting work on it. This would help us understand who is working on an issue and prevent duplicate work. 🙏🏻
|
||||
**Please note:** If you want to work on an issue, please add a brief description of your solution on the issue before starting work on it.
|
||||
|
||||
⚠️ If you just raise a PR, without the corresponding issue being assigned to you - it may not be accepted.
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -188,7 +189,7 @@ Also, have a look at [Frontend README.md](https://github.com/SigNoz/signoz/blob/
|
||||
### Important Notes:
|
||||
The Maintainers / Contributors who will change Line Numbers of `Frontend` & `Query-Section`, please update line numbers in [`/.scripts/commentLinesForSetup.sh`](https://github.com/SigNoz/signoz/blob/develop/.scripts/commentLinesForSetup.sh)
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
## 3.2 Contribute to Frontend without installing SigNoz backend
|
||||
|
||||
@@ -209,7 +210,7 @@ Please ping us in the [`#contributing`](https://signoz-community.slack.com/archi
|
||||
|
||||
**Frontend should now be accessible at** [`http://localhost:3301/services`](http://localhost:3301/services)
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -309,7 +310,7 @@ Click the button below. A workspace with all required environments will be creat
|
||||
|
||||
> To use it on your forked repo, edit the 'Open in Gitpod' button URL to `https://gitpod.io/#https://github.com/<your-github-username>/signoz` -->
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
<hr>
|
||||
|
||||
@@ -365,10 +366,21 @@ curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-
|
||||
| HOTROD_NAMESPACE=sample-application bash
|
||||
```
|
||||
|
||||
**[`^top^`](#)**
|
||||
**[`^top^`](#contributing-guidelines)**
|
||||
|
||||
---
|
||||
|
||||
# 6. Contribute to Dashboards 📈
|
||||
|
||||
**Need to Update: [https://github.com/SigNoz/dashboards](https://github.com/SigNoz/dashboards)**
|
||||
|
||||
To contribute a new dashboard template for any service, follow the contribution guidelines in the [Dashboard Contributing Guide](https://github.com/SigNoz/dashboards/blob/main/CONTRIBUTING.md). In brief:
|
||||
|
||||
1. Create a dashboard JSON file.
|
||||
2. Add a README file explaining the dashboard, the metrics ingested, and the configurations needed.
|
||||
3. Include screenshots of the dashboard in the `assets/` directory.
|
||||
4. Submit a pull request for review.
|
||||
|
||||
## Other Ways to Contribute
|
||||
|
||||
There are many other ways to get involved with the community and to participate in this project:
|
||||
@@ -379,7 +391,6 @@ There are many other ways to get involved with the community and to participate
|
||||
- Help answer questions on forums such as Stack Overflow and [SigNoz Community Slack Channel](https://signoz.io/slack).
|
||||
- Tell others about the project on Twitter, your blog, etc.
|
||||
|
||||
|
||||
Again, Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :)
|
||||
|
||||
Thank You!
|
||||
|
||||
11
Makefile
11
Makefile
@@ -188,13 +188,4 @@ check-no-ee-references:
|
||||
fi
|
||||
|
||||
test:
|
||||
go test ./pkg/query-service/app/metrics/...
|
||||
go test ./pkg/query-service/cache/...
|
||||
go test ./pkg/query-service/app/...
|
||||
go test ./pkg/query-service/app/querier/...
|
||||
go test ./pkg/query-service/converter/...
|
||||
go test ./pkg/query-service/formatter/...
|
||||
go test ./pkg/query-service/tests/integration/...
|
||||
go test ./pkg/query-service/rules/...
|
||||
go test ./pkg/query-service/collectorsimulator/...
|
||||
go test ./pkg/query-service/postprocess/...
|
||||
go test ./pkg/query-service/...
|
||||
|
||||
200
README.md
200
README.md
@@ -1,8 +1,11 @@
|
||||
<p align="center">
|
||||
<img src="https://res.cloudinary.com/dcv3epinx/image/upload/v1618904450/signoz-images/LogoGithub_sigfbu.svg" alt="SigNoz-logo" width="240" />
|
||||
<h1 align="center" style="border-bottom: none">
|
||||
<a href="https://signoz.io" target="_blank">
|
||||
<img alt="SigNoz" src="https://github.com/user-attachments/assets/ef9a33f7-12d7-4c94-8908-0a02b22f0c18" width="100" height="100">
|
||||
</a>
|
||||
<br>SigNoz
|
||||
</h1>
|
||||
|
||||
<p align="center">Monitor your applications and troubleshoot problems in your deployed applications, an open-source alternative to DataDog, New Relic, etc.</p>
|
||||
</p>
|
||||
<p align="center">All your logs, metrics, and traces in one place. Monitor your application, spot issues before they occur and troubleshoot downtime quickly with rich context. SigNoz is a cost-effective open-source alternative to Datadog and New Relic. Visit <a href="https://signoz.io" target="_blank">signoz.io</a> for the full documentation, tutorials, and guide.</p>
|
||||
|
||||
<p align="center">
|
||||
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/query-service?label=Docker Downloads"> </a>
|
||||
@@ -21,55 +24,115 @@
|
||||
<a href="https://twitter.com/SigNozHq"><b>Twitter</b></a>
|
||||
</h3>
|
||||
|
||||
##
|
||||
|
||||
SigNoz helps developers monitor applications and troubleshoot problems in their deployed applications. With SigNoz, you can:
|
||||
|
||||
👉 Visualise Metrics, Traces and Logs in a single pane of glass
|
||||
|
||||
👉 You can see metrics like p99 latency, error rates for your services, external API calls and individual end points.
|
||||
|
||||
👉 You can find the root cause of the problem by going to the exact traces which are causing the problem and see detailed flamegraphs of individual request traces.
|
||||
|
||||
👉 Run aggregates on trace data to get business relevant metrics
|
||||
|
||||
👉 Filter and query logs, build dashboards and alerts based on attributes in logs
|
||||
|
||||
👉 Record exceptions automatically in Python, Java, Ruby, and Javascript
|
||||
|
||||
👉 Easy to set alerts with DIY query builder
|
||||
## Features
|
||||
|
||||
|
||||
### Application Metrics
|
||||
### Application Performance Monitoring
|
||||
|
||||

|
||||
Use SigNoz APM to monitor your applications and services. It comes with out-of-box charts for key application metrics like p99 latency, error rate, Apdex and operations per second. You can also monitor the database and external calls made from your application. Read [more](https://signoz.io/application-performance-monitoring/).
|
||||
|
||||
You can [instrument](https://signoz.io/docs/instrumentation/) your application with OpenTelemetry to get started.
|
||||
|
||||
### Distributed Tracing
|
||||
<img width="2068" alt="distributed_tracing_2 2" src="https://user-images.githubusercontent.com/83692067/226536447-bae58321-6a22-4ed3-af80-e3e964cb3489.png">
|
||||

|
||||
|
||||
<img width="2068" alt="distributed_tracing_1" src="https://user-images.githubusercontent.com/83692067/226536462-939745b6-4f9d-45a6-8016-814837e7f7b4.png">
|
||||
|
||||
### Logs Management
|
||||
|
||||
<img width="2068" alt="logs_management" src="https://user-images.githubusercontent.com/83692067/226536482-b8a5c4af-b69c-43d5-969c-338bd5eaf1a5.png">
|
||||
SigNoz can be used as a centralized log management solution. We use ClickHouse (used by likes of Uber & Cloudflare) as a datastore, ⎯ an extremely fast and highly optimized storage for logs data. Instantly search through all your logs using quick filters and a powerful query builder.
|
||||
|
||||
### Infrastructure Monitoring
|
||||
You can also create charts on your logs and monitor them with customized dashboards. Read [more](https://signoz.io/log-management/).
|
||||
|
||||
<img width="2068" alt="infrastructure_monitoring" src="https://user-images.githubusercontent.com/83692067/226536496-f38c4dbf-e03c-4158-8be0-32d4a61158c7.png">
|
||||

|
||||
|
||||
### Exceptions Monitoring
|
||||
|
||||

|
||||
### Distributed Tracing
|
||||
|
||||
Distributed Tracing is essential to troubleshoot issues in microservices applications. Powered by OpenTelemetry, distributed tracing in SigNoz can help you track user requests across services to help you identify performance bottlenecks.
|
||||
|
||||
See user requests in a detailed breakdown with the help of Flamegraphs and Gantt Charts. Click on any span to see the entire trace represented beautifully, which will help you make sense of where issues actually occurred in the flow of requests.
|
||||
|
||||
Read [more](https://signoz.io/distributed-tracing/).
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
### Metrics and Dashboards
|
||||
|
||||
Ingest metrics from your infrastructure or applications and create customized dashboards to monitor them. Create visualization that suits your needs with a variety of panel types like pie chart, time-series, bar chart, etc.
|
||||
|
||||
Create queries on your metrics data quickly with an easy-to-use metrics query builder. Add multiple queries and combine those queries with formulae to create really complex queries quickly.
|
||||
|
||||
Read [more](https://signoz.io/metrics-and-dashboards/).
|
||||
|
||||

|
||||
|
||||
### Alerts
|
||||
|
||||
<img width="2068" alt="alerts_management" src="https://user-images.githubusercontent.com/83692067/226536548-2c81e2e8-c12d-47e8-bad7-c6be79055def.png">
|
||||
Use alerts in SigNoz to get notified when anything unusual happens in your application. You can set alerts on any type of telemetry signal (logs, metrics, traces), create thresholds and set up a notification channel to get notified. Advanced features like alert history and anomaly detection can help you create smarter alerts.
|
||||
|
||||
Alerts in SigNoz help you identify issues proactively so that you can address them before they reach your customers.
|
||||
|
||||
Read [more](https://signoz.io/alerts-management/).
|
||||
|
||||

|
||||
|
||||
### Exceptions Monitoring
|
||||
|
||||
Monitor exceptions automatically in Python, Java, Ruby, and Javascript. For other languages, just drop in a few lines of code and start monitoring exceptions.
|
||||
|
||||
See the detailed stack trace for all exceptions caught in your application. You can also log in custom attributes to add more context to your exceptions. For example, you can add attributes to identify users for which exceptions occurred.
|
||||
|
||||
Read [more](https://signoz.io/exceptions-monitoring/).
|
||||
|
||||
|
||||

|
||||
|
||||
|
||||
<br /><br />
|
||||
|
||||
## Why SigNoz?
|
||||
|
||||
SigNoz is a single tool for all your monitoring and observability needs. Here are a few reasons why you should choose SigNoz:
|
||||
|
||||
- Single tool for observability(logs, metrics, and traces)
|
||||
|
||||
- Built on top of [OpenTelemetry](https://opentelemetry.io/), the open-source standard which frees you from any type of vendor lock-in
|
||||
|
||||
- Correlated logs, metrics and traces for much richer context while debugging
|
||||
|
||||
- Uses ClickHouse (used by likes of Uber & Cloudflare) as datastore - an extremely fast and highly optimized storage for observability data
|
||||
|
||||
- DIY Query builder, PromQL, and ClickHouse queries to fulfill all your use-cases around querying observability data
|
||||
|
||||
- Open-Source - you can use open-source, our [cloud service](https://signoz.io/teams/) or a mix of both based on your use case
|
||||
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Create a SigNoz Cloud Account
|
||||
|
||||
SigNoz cloud is the easiest way to get started with SigNoz. Our cloud service is for those users who want to spend more time in getting insights for their application performance without worrying about maintenance.
|
||||
|
||||
[Get started for free](https://signoz.io/teams/)
|
||||
|
||||
### Deploy using Docker(self-hosted)
|
||||
|
||||
Please follow the steps listed [here](https://signoz.io/docs/install/docker/) to install using docker
|
||||
|
||||
The [troubleshooting instructions](https://signoz.io/docs/install/troubleshooting/) may be helpful if you face any issues.
|
||||
|
||||
<p>  </p>
|
||||
|
||||
|
||||
### Deploy in Kubernetes using Helm(self-hosted)
|
||||
|
||||
Please follow the steps listed [here](https://signoz.io/docs/deployment/helm_chart) to install using helm charts
|
||||
|
||||
<br /><br />
|
||||
|
||||
We also offer managed services in your infra. Check our [pricing plans](https://signoz.io/pricing/) for all details.
|
||||
|
||||
|
||||
## Join our Slack community
|
||||
|
||||
@@ -78,64 +141,22 @@ Come say Hi to us on [Slack](https://signoz.io/slack) 👋
|
||||
<br /><br />
|
||||
|
||||
|
||||
## Features:
|
||||
|
||||
- Unified UI for metrics, traces and logs. No need to switch from Prometheus to Jaeger to debug issues, or use a logs tool like Elastic separate from your metrics and traces stack.
|
||||
- Application overview metrics like RPS, 50th/90th/99th Percentile latencies, and Error Rate
|
||||
- Slowest endpoints in your application
|
||||
- See exact request trace to figure out issues in downstream services, slow DB queries, call to 3rd party services like payment gateways, etc
|
||||
- Filter traces by service name, operation, latency, error, tags/annotations.
|
||||
- Run aggregates on trace data (events/spans) to get business relevant metrics. e.g. You can get error rate and 99th percentile latency of `customer_type: gold` or `deployment_version: v2` or `external_call: paypal`
|
||||
- Native support for OpenTelemetry Logs, advanced log query builder, and automatic log collection from k8s cluster
|
||||
- Lightning quick log analytics ([Logs Perf. Benchmark](https://signoz.io/blog/logs-performance-benchmark/))
|
||||
- End-to-End visibility into infrastructure performance, ingest metrics from all kinds of host environments
|
||||
- Easy to set alerts with DIY query builder
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
## Why SigNoz?
|
||||
|
||||
Being developers, we found it annoying to rely on closed source SaaS vendors for every small feature we wanted. Closed source vendors often surprise you with huge month end bills without any transparency.
|
||||
|
||||
We wanted to make a self-hosted & open source version of tools like DataDog, NewRelic for companies that have privacy and security concerns about having customer data going to third party services.
|
||||
|
||||
Being open source also gives you complete control of your configuration, sampling, uptimes. You can also build modules over SigNoz to extend business specific capabilities
|
||||
|
||||
### Languages supported:
|
||||
|
||||
We support [OpenTelemetry](https://opentelemetry.io) as the library which you can use to instrument your applications. So any framework and language supported by OpenTelemetry is also supported by SigNoz. Some of the main supported languages are:
|
||||
SigNoz supports all major programming languages for monitoring. Any framework and language supported by OpenTelemetry is supported by SigNoz. Find instructions for instrumenting different languages below:
|
||||
|
||||
- Java
|
||||
- Python
|
||||
- Node.js
|
||||
- Go
|
||||
- PHP
|
||||
- .NET
|
||||
- Ruby
|
||||
- Elixir
|
||||
- Rust
|
||||
- [Java](https://signoz.io/docs/instrumentation/java/)
|
||||
- [Python](https://signoz.io/docs/instrumentation/python/)
|
||||
- [Node.js or Javascript](https://signoz.io/docs/instrumentation/javascript/)
|
||||
- [Go](https://signoz.io/docs/instrumentation/golang/)
|
||||
- [PHP](https://signoz.io/docs/instrumentation/php/)
|
||||
- [.NET](https://signoz.io/docs/instrumentation/dotnet/)
|
||||
- [Ruby](https://signoz.io/docs/instrumentation/ruby-on-rails/)
|
||||
- [Elixir](https://signoz.io/docs/instrumentation/elixir/)
|
||||
- [Rust](https://signoz.io/docs/instrumentation/rust/)
|
||||
- [Swift](https://signoz.io/docs/instrumentation/swift/)
|
||||
|
||||
|
||||
You can find the complete list of languages here - https://opentelemetry.io/docs/
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Deploy using Docker
|
||||
|
||||
Please follow the steps listed [here](https://signoz.io/docs/install/docker/) to install using docker
|
||||
|
||||
The [troubleshooting instructions](https://signoz.io/docs/install/troubleshooting/) may be helpful if you face any issues.
|
||||
|
||||
<p>  </p>
|
||||
|
||||
|
||||
### Deploy in Kubernetes using Helm
|
||||
|
||||
Please follow the steps listed [here](https://signoz.io/docs/deployment/helm_chart) to install using helm charts
|
||||
You can find our entire documentation [here](https://signoz.io/docs/introduction/).
|
||||
|
||||
<br /><br />
|
||||
|
||||
@@ -144,9 +165,11 @@ Please follow the steps listed [here](https://signoz.io/docs/deployment/helm_cha
|
||||
|
||||
### SigNoz vs Prometheus
|
||||
|
||||
Prometheus is good if you want to do just metrics. But if you want to have a seamless experience between metrics and traces, then current experience of stitching together Prometheus & Jaeger is not great.
|
||||
Prometheus is good if you want to do just metrics. But if you want to have a seamless experience between metrics, logs and traces, then current experience of stitching together Prometheus & other tools is not great.
|
||||
|
||||
Our goal is to provide an integrated UI between metrics & traces - similar to what SaaS vendors like Datadog provides - and give advanced filtering and aggregation over traces, something which Jaeger currently lack.
|
||||
SigNoz is a one-stop solution for metrics and other telemetry signals. And because you will use the same standard(OpenTelemetry) to collect all telemetry signals, you can also correlate these signals to troubleshoot quickly.
|
||||
|
||||
For example, if you see that there are issues with infrastructure metrics of your k8s cluster at a timestamp, you can jump to other signals like logs and traces to understand the issue quickly.
|
||||
|
||||
<p>  </p>
|
||||
|
||||
@@ -158,6 +181,7 @@ Moreover, SigNoz has few more advanced features wrt Jaeger:
|
||||
|
||||
- Jaegar UI doesn’t show any metrics on traces or on filtered traces
|
||||
- Jaeger can’t get aggregates on filtered traces. For example, p99 latency of requests which have tag - customer_type='premium'. This can be done easily on SigNoz
|
||||
- You can also go from traces to logs easily in SigNoz
|
||||
|
||||
<p>  </p>
|
||||
|
||||
|
||||
@@ -133,7 +133,7 @@ services:
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:0.23.5
|
||||
image: signoz/alertmanager:0.23.7
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
command:
|
||||
@@ -146,11 +146,11 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.49.1
|
||||
image: signoz/query-service:0.56.0
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
# "--prefer-delta=true"
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -186,7 +186,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.48.0
|
||||
image: signoz/frontend:0.56.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -199,7 +199,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:0.102.12
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
@@ -238,7 +238,7 @@ services:
|
||||
- query-service
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:0.102.2
|
||||
image: signoz/signoz-schema-migrator:0.102.10
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -131,8 +131,8 @@ processors:
|
||||
exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/signoz_traces
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
docker_multi_node_cluster: ${env:DOCKER_MULTI_NODE_CLUSTER}
|
||||
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
@@ -142,8 +142,9 @@ exporters:
|
||||
# logging: {}
|
||||
clickhouselogsexporter:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
docker_multi_node_cluster: ${env:DOCKER_MULTI_NODE_CLUSTER}
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
extensions:
|
||||
health_check:
|
||||
endpoint: 0.0.0.0:13133
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
version: "2.4"
|
||||
|
||||
include:
|
||||
- test-app-docker-compose.yaml
|
||||
|
||||
services:
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
@@ -54,7 +57,7 @@ services:
|
||||
|
||||
alertmanager:
|
||||
container_name: signoz-alertmanager
|
||||
image: signoz/alertmanager:0.23.5
|
||||
image: signoz/alertmanager:0.23.7
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
@@ -66,7 +69,7 @@ services:
|
||||
- --storage.path=/data
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.10}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -81,7 +84,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: signoz-otel-collector
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:0.102.12
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
@@ -128,29 +131,3 @@ services:
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../common/locust-scripts:/locust
|
||||
|
||||
@@ -25,7 +25,7 @@ services:
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
# "--prefer-delta=true"
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
ports:
|
||||
- "6060:6060"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
version: "2.4"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
restart: on-failure
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
depends_on:
|
||||
@@ -35,7 +34,7 @@ x-db-depend: &db-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator:
|
||||
otel-collector-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
@@ -43,9 +42,11 @@ x-db-depend: &db-depend
|
||||
# condition: service_healthy
|
||||
|
||||
services:
|
||||
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: signoz-zookeeper-1
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
- "2181:2181"
|
||||
@@ -55,7 +56,6 @@ services:
|
||||
- ./data/zookeeper-1:/bitnami/zookeeper
|
||||
environment:
|
||||
- ZOO_SERVER_ID=1
|
||||
- ZOO_SERVERS=0.0.0.0:2888:3888
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
@@ -63,6 +63,7 @@ services:
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-2
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2182:2181"
|
||||
@@ -79,6 +80,7 @@ services:
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-3
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2183:2181"
|
||||
@@ -103,9 +105,11 @@ services:
|
||||
volumes:
|
||||
- ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
- ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
- ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
@@ -118,9 +122,12 @@ services:
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
@@ -133,12 +140,14 @@ services:
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:0.23.5
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.7}
|
||||
container_name: signoz-alertmanager
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
@@ -153,12 +162,12 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:latest
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
# "--prefer-delta=true"
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -191,11 +200,25 @@ services:
|
||||
retries: 3
|
||||
<<: *db-depend
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
container_name: otel-migrator
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
- alertmanager
|
||||
- query-service
|
||||
ports:
|
||||
- "3301:3301"
|
||||
volumes:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.10}
|
||||
container_name: otel-migrator-sync
|
||||
command:
|
||||
- "sync"
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
- "--up="
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
@@ -204,8 +227,25 @@ services:
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
otel-collector-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.10}
|
||||
container_name: otel-migrator-async
|
||||
command:
|
||||
- "async"
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
- "--up="
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.12}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
@@ -240,7 +280,7 @@ services:
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator:
|
||||
otel-collector-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
@@ -255,29 +295,3 @@ services:
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../common/locust-scripts:/locust
|
||||
@@ -1,5 +1,8 @@
|
||||
version: "2.4"
|
||||
|
||||
include:
|
||||
- test-app-docker-compose.yaml
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
restart: on-failure
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
@@ -149,7 +152,7 @@ services:
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.5}
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.7}
|
||||
container_name: signoz-alertmanager
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
@@ -164,13 +167,13 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
"-gateway-url=https://api.staging.signoz.cloud"
|
||||
# "--prefer-delta=true"
|
||||
"-gateway-url=https://api.staging.signoz.cloud",
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -204,7 +207,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -216,7 +219,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.10}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -230,7 +233,7 @@ services:
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.12}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
@@ -280,29 +283,3 @@ services:
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../common/locust-scripts:/locust
|
||||
|
||||
@@ -1,307 +1,3 @@
|
||||
version: "2.4"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
restart: on-failure
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
depends_on:
|
||||
- zookeeper-1
|
||||
# - zookeeper-2
|
||||
# - zookeeper-3
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-db-depend: &db-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator:
|
||||
condition: service_completed_successfully
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
services:
|
||||
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: signoz-zookeeper-1
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
- "2181:2181"
|
||||
- "2888:2888"
|
||||
- "3888:3888"
|
||||
volumes:
|
||||
- ./data/zookeeper-1:/bitnami/zookeeper
|
||||
environment:
|
||||
- ZOO_SERVER_ID=1
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-2
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2182:2181"
|
||||
# - "2889:2888"
|
||||
# - "3889:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-2:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=2
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,0.0.0.0:2888:3888,zookeeper-3:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-3
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2183:2181"
|
||||
# - "2890:2888"
|
||||
# - "3890:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-3:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=3
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
clickhouse:
|
||||
<<: *clickhouse-defaults
|
||||
container_name: signoz-clickhouse
|
||||
hostname: clickhouse
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "8123:8123"
|
||||
- "9181:9181"
|
||||
volumes:
|
||||
- ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
- ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
- ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-2
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
# - "8124:8123"
|
||||
# - "9182:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-3
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
# - "8125:8123"
|
||||
# - "9183:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.5}
|
||||
container_name: signoz-alertmanager
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
command:
|
||||
- --queryService.url=http://query-service:8085
|
||||
- --storage.path=/data
|
||||
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml"
|
||||
# "--prefer-delta=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
volumes:
|
||||
- ./prometheus.yml:/root/config/prometheus.yml
|
||||
- ../dashboards:/root/config/dashboards
|
||||
- ./data/signoz/:/var/lib/signoz/
|
||||
environment:
|
||||
- ClickHouseUrl=tcp://clickhouse:9000
|
||||
- ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/
|
||||
- SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db
|
||||
- DASHBOARDS_PATH=/root/config/dashboards
|
||||
- STORAGE=clickhouse
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
- alertmanager
|
||||
- query-service
|
||||
ports:
|
||||
- "3301:3301"
|
||||
volumes:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--manager-config=/etc/manager-config.yaml",
|
||||
"--copy-path=/var/tmp/collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
- /var/lib/docker/containers:/var/lib/docker/containers:ro
|
||||
- /:/hostfs:ro
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- DOCKER_MULTI_NODE_CLUSTER=false
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
# - "13133:13133" # health check extension
|
||||
# - "14250:14250" # Jaeger gRPC
|
||||
# - "14268:14268" # Jaeger thrift HTTP
|
||||
# - "55678:55678" # OpenCensus receiver
|
||||
# - "55679:55679" # zPages extension
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator:
|
||||
condition: service_completed_successfully
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../common/locust-scripts:/locust
|
||||
include:
|
||||
- test-app-docker-compose.yaml
|
||||
- docker-compose-minimal.yaml
|
||||
|
||||
@@ -142,8 +142,8 @@ extensions:
|
||||
exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/signoz_traces
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
docker_multi_node_cluster: ${env:DOCKER_MULTI_NODE_CLUSTER}
|
||||
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
@@ -152,8 +152,9 @@ exporters:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
clickhouselogsexporter:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
docker_multi_node_cluster: ${env:DOCKER_MULTI_NODE_CLUSTER}
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
# logging: {}
|
||||
|
||||
service:
|
||||
|
||||
26
deploy/docker/clickhouse-setup/test-app-docker-compose.yaml
Normal file
26
deploy/docker/clickhouse-setup/test-app-docker-compose.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
services:
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
hostname: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../common/locust-scripts:/locust
|
||||
@@ -2,6 +2,9 @@ package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
)
|
||||
|
||||
type DailyProvider struct {
|
||||
@@ -24,9 +27,18 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
|
||||
opt(dp)
|
||||
}
|
||||
|
||||
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
FeatureLookup: dp.ff,
|
||||
})
|
||||
|
||||
return dp
|
||||
}
|
||||
|
||||
func (p *DailyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
return nil, nil
|
||||
req.Seasonality = SeasonalityDaily
|
||||
return p.getAnomalies(ctx, req)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@ package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
)
|
||||
|
||||
type HourlyProvider struct {
|
||||
@@ -24,9 +27,18 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
|
||||
opt(hp)
|
||||
}
|
||||
|
||||
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
FeatureLookup: hp.ff,
|
||||
})
|
||||
|
||||
return hp
|
||||
}
|
||||
|
||||
func (p *HourlyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
return nil, nil
|
||||
req.Seasonality = SeasonalityHourly
|
||||
return p.getAnomalies(ctx, req)
|
||||
}
|
||||
|
||||
@@ -16,6 +16,17 @@ const (
|
||||
SeasonalityWeekly Seasonality = "weekly"
|
||||
)
|
||||
|
||||
func (s Seasonality) String() string {
|
||||
return string(s)
|
||||
}
|
||||
|
||||
var (
|
||||
oneWeekOffset = 24 * 7 * time.Hour.Milliseconds()
|
||||
oneDayOffset = 24 * time.Hour.Milliseconds()
|
||||
oneHourOffset = time.Hour.Milliseconds()
|
||||
fiveMinOffset = 5 * time.Minute.Milliseconds()
|
||||
)
|
||||
|
||||
func (s Seasonality) IsValid() bool {
|
||||
switch s {
|
||||
case SeasonalityHourly, SeasonalityDaily, SeasonalityWeekly:
|
||||
@@ -35,7 +46,7 @@ type GetAnomaliesResponse struct {
|
||||
}
|
||||
|
||||
// anomalyParams is the params for anomaly detection
|
||||
// prediction = avg(past_period_query) + avg(current_season_query) - avg(past_season_query)
|
||||
// prediction = avg(past_period_query) + avg(current_season_query) - mean(past_season_query, past2_season_query, past3_season_query)
|
||||
//
|
||||
// ^ ^
|
||||
// | |
|
||||
@@ -49,9 +60,9 @@ type anomalyQueryParams struct {
|
||||
// and to detect anomalies
|
||||
CurrentPeriodQuery *v3.QueryRangeParamsV3
|
||||
// PastPeriodQuery is the query range params for past seasonal period
|
||||
// Example: For weekly seasonality, (now-1w-4h-5m, now-1w)
|
||||
// : For daily seasonality, (now-1d-2h-5m, now-1d)
|
||||
// : For hourly seasonality, (now-1h-30m-5m, now-1h)
|
||||
// Example: For weekly seasonality, (now-1w-5m, now-1w)
|
||||
// : For daily seasonality, (now-1d-5m, now-1d)
|
||||
// : For hourly seasonality, (now-1h-5m, now-1h)
|
||||
PastPeriodQuery *v3.QueryRangeParamsV3
|
||||
// CurrentSeasonQuery is the query range params for current period (seasonal)
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-1w-5m, now)
|
||||
@@ -63,16 +74,17 @@ type anomalyQueryParams struct {
|
||||
// : For daily seasonality, this is the query range params for the (now-2d-5m, now-1d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-2h-5m, now-1h)
|
||||
PastSeasonQuery *v3.QueryRangeParamsV3
|
||||
}
|
||||
|
||||
func copyCompositeQuery(req *v3.QueryRangeParamsV3) *v3.CompositeQuery {
|
||||
deepCopyCompositeQuery := *req.CompositeQuery
|
||||
deepCopyCompositeQuery.BuilderQueries = make(map[string]*v3.BuilderQuery)
|
||||
for k, v := range req.CompositeQuery.BuilderQueries {
|
||||
query := *v
|
||||
deepCopyCompositeQuery.BuilderQueries[k] = &query
|
||||
}
|
||||
return &deepCopyCompositeQuery
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal period to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-3w-5m, now-2w)
|
||||
// : For daily seasonality, this is the query range params for the (now-3d-5m, now-2d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-3h-5m, now-2h)
|
||||
Past2SeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal period to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-4w-5m, now-3w)
|
||||
// : For daily seasonality, this is the query range params for the (now-4d-5m, now-3d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-4h-5m, now-3h)
|
||||
Past3SeasonQuery *v3.QueryRangeParamsV3
|
||||
}
|
||||
|
||||
func updateStepInterval(req *v3.QueryRangeParamsV3) {
|
||||
@@ -95,7 +107,7 @@ func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonali
|
||||
currentPeriodQuery := &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
CompositeQuery: req.CompositeQuery,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
@@ -104,24 +116,24 @@ func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonali
|
||||
var pastPeriodStart, pastPeriodEnd int64
|
||||
|
||||
switch seasonality {
|
||||
// for one week period, we fetch the data from the past week with 4 hours offset
|
||||
// for one week period, we fetch the data from the past week with 5 min offset
|
||||
case SeasonalityWeekly:
|
||||
pastPeriodStart = start - 166*time.Hour.Milliseconds() - 4*time.Hour.Milliseconds()
|
||||
pastPeriodEnd = end - 166*time.Hour.Milliseconds()
|
||||
// for one day period, we fetch the data from the past day with 2 hours offset
|
||||
pastPeriodStart = start - oneWeekOffset - fiveMinOffset
|
||||
pastPeriodEnd = end - oneWeekOffset
|
||||
// for one day period, we fetch the data from the past day with 5 min offset
|
||||
case SeasonalityDaily:
|
||||
pastPeriodStart = start - 23*time.Hour.Milliseconds() - 2*time.Hour.Milliseconds()
|
||||
pastPeriodEnd = end - 23*time.Hour.Milliseconds()
|
||||
// for one hour period, we fetch the data from the past hour with 30 minutes offset
|
||||
pastPeriodStart = start - oneDayOffset - fiveMinOffset
|
||||
pastPeriodEnd = end - oneDayOffset
|
||||
// for one hour period, we fetch the data from the past hour with 5 min offset
|
||||
case SeasonalityHourly:
|
||||
pastPeriodStart = start - 1*time.Hour.Milliseconds() - 30*time.Minute.Milliseconds()
|
||||
pastPeriodEnd = end - 1*time.Hour.Milliseconds()
|
||||
pastPeriodStart = start - oneHourOffset - fiveMinOffset
|
||||
pastPeriodEnd = end - oneHourOffset
|
||||
}
|
||||
|
||||
pastPeriodQuery := &v3.QueryRangeParamsV3{
|
||||
Start: pastPeriodStart,
|
||||
End: pastPeriodEnd,
|
||||
CompositeQuery: copyCompositeQuery(req),
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
@@ -131,20 +143,20 @@ func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonali
|
||||
var currentGrowthPeriodStart, currentGrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
currentGrowthPeriodStart = start - 7*24*time.Hour.Milliseconds()
|
||||
currentGrowthPeriodStart = start - oneWeekOffset
|
||||
currentGrowthPeriodEnd = end
|
||||
case SeasonalityDaily:
|
||||
currentGrowthPeriodStart = start - 23*time.Hour.Milliseconds()
|
||||
currentGrowthPeriodStart = start - oneDayOffset
|
||||
currentGrowthPeriodEnd = end
|
||||
case SeasonalityHourly:
|
||||
currentGrowthPeriodStart = start - 1*time.Hour.Milliseconds()
|
||||
currentGrowthPeriodStart = start - oneHourOffset
|
||||
currentGrowthPeriodEnd = end
|
||||
}
|
||||
|
||||
currentGrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: currentGrowthPeriodStart,
|
||||
End: currentGrowthPeriodEnd,
|
||||
CompositeQuery: copyCompositeQuery(req),
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
@@ -153,30 +165,76 @@ func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonali
|
||||
var pastGrowthPeriodStart, pastGrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
pastGrowthPeriodStart = start - 14*24*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodEnd = start - 7*24*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodStart = start - 2*oneWeekOffset
|
||||
pastGrowthPeriodEnd = start - 1*oneWeekOffset
|
||||
case SeasonalityDaily:
|
||||
pastGrowthPeriodStart = start - 2*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodEnd = start - 1*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodStart = start - 2*oneDayOffset
|
||||
pastGrowthPeriodEnd = start - 1*oneDayOffset
|
||||
case SeasonalityHourly:
|
||||
pastGrowthPeriodStart = start - 2*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodEnd = start - 1*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodStart = start - 2*oneHourOffset
|
||||
pastGrowthPeriodEnd = start - 1*oneHourOffset
|
||||
}
|
||||
|
||||
pastGrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: pastGrowthPeriodStart,
|
||||
End: pastGrowthPeriodEnd,
|
||||
CompositeQuery: copyCompositeQuery(req),
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(pastGrowthQuery)
|
||||
|
||||
var past2GrowthPeriodStart, past2GrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
past2GrowthPeriodStart = start - 3*oneWeekOffset
|
||||
past2GrowthPeriodEnd = start - 2*oneWeekOffset
|
||||
case SeasonalityDaily:
|
||||
past2GrowthPeriodStart = start - 3*oneDayOffset
|
||||
past2GrowthPeriodEnd = start - 2*oneDayOffset
|
||||
case SeasonalityHourly:
|
||||
past2GrowthPeriodStart = start - 3*oneHourOffset
|
||||
past2GrowthPeriodEnd = start - 2*oneHourOffset
|
||||
}
|
||||
|
||||
past2GrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: past2GrowthPeriodStart,
|
||||
End: past2GrowthPeriodEnd,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(past2GrowthQuery)
|
||||
|
||||
var past3GrowthPeriodStart, past3GrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
past3GrowthPeriodStart = start - 4*oneWeekOffset
|
||||
past3GrowthPeriodEnd = start - 3*oneWeekOffset
|
||||
case SeasonalityDaily:
|
||||
past3GrowthPeriodStart = start - 4*oneDayOffset
|
||||
past3GrowthPeriodEnd = start - 3*oneDayOffset
|
||||
case SeasonalityHourly:
|
||||
past3GrowthPeriodStart = start - 4*oneHourOffset
|
||||
past3GrowthPeriodEnd = start - 3*oneHourOffset
|
||||
}
|
||||
|
||||
past3GrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: past3GrowthPeriodStart,
|
||||
End: past3GrowthPeriodEnd,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(past3GrowthQuery)
|
||||
|
||||
return &anomalyQueryParams{
|
||||
CurrentPeriodQuery: currentPeriodQuery,
|
||||
PastPeriodQuery: pastPeriodQuery,
|
||||
CurrentSeasonQuery: currentGrowthQuery,
|
||||
PastSeasonQuery: pastGrowthQuery,
|
||||
Past2SeasonQuery: past2GrowthQuery,
|
||||
Past3SeasonQuery: past3GrowthQuery,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,4 +243,6 @@ type anomalyQueryResults struct {
|
||||
PastPeriodResults []*v3.Result
|
||||
CurrentSeasonResults []*v3.Result
|
||||
PastSeasonResults []*v3.Result
|
||||
Past2SeasonResults []*v3.Result
|
||||
Past3SeasonResults []*v3.Result
|
||||
}
|
||||
|
||||
@@ -3,14 +3,21 @@ package anomaly
|
||||
import (
|
||||
"context"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
// TODO(srikanthccv): make this configurable?
|
||||
movingAvgWindowSize = 7
|
||||
)
|
||||
|
||||
// BaseProvider is an interface that includes common methods for all provider types
|
||||
type BaseProvider interface {
|
||||
GetBaseSeasonalProvider() *BaseSeasonalProvider
|
||||
@@ -46,6 +53,7 @@ func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[
|
||||
type BaseSeasonalProvider struct {
|
||||
querierV2 interfaces.Querier
|
||||
reader interfaces.Reader
|
||||
fluxInterval time.Duration
|
||||
cache cache.Cache
|
||||
keyGenerator cache.KeyGenerator
|
||||
ff interfaces.FeatureLookup
|
||||
@@ -53,28 +61,74 @@ type BaseSeasonalProvider struct {
|
||||
|
||||
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
||||
if !req.Seasonality.IsValid() {
|
||||
req.Seasonality = SeasonalityWeekly
|
||||
req.Seasonality = SeasonalityDaily
|
||||
}
|
||||
return prepareAnomalyQueryParams(req.Params, req.Seasonality)
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQueryParams) (*anomalyQueryResults, error) {
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery, nil)
|
||||
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery, nil)
|
||||
currentPeriodResults, err = postprocess.PostProcessResult(currentPeriodResults, params.CurrentPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery, nil)
|
||||
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery, nil)
|
||||
pastPeriodResults, err = postprocess.PostProcessResult(pastPeriodResults, params.PastPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentSeasonResults, err = postprocess.PostProcessResult(currentSeasonResults, params.CurrentSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pastSeasonResults, err = postprocess.PostProcessResult(pastSeasonResults, params.PastSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
|
||||
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past2SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
past2SeasonResults, err = postprocess.PostProcessResult(past2SeasonResults, params.Past2SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
|
||||
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past3SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
past3SeasonResults, err = postprocess.PostProcessResult(past3SeasonResults, params.Past3SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -84,10 +138,18 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
PastPeriodResults: pastPeriodResults,
|
||||
CurrentSeasonResults: currentSeasonResults,
|
||||
PastSeasonResults: pastSeasonResults,
|
||||
Past2SeasonResults: past2SeasonResults,
|
||||
Past3SeasonResults: past3SeasonResults,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// getMatchingSeries gets the matching series from the query result
|
||||
// for the given series
|
||||
func (p *BaseSeasonalProvider) getMatchingSeries(queryResult *v3.Result, series *v3.Series) *v3.Series {
|
||||
if queryResult == nil || len(queryResult.Series) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, curr := range queryResult.Series {
|
||||
currLabels := labels.FromMap(curr.Labels)
|
||||
seriesLabels := labels.FromMap(series.Labels)
|
||||
@@ -99,6 +161,9 @@ func (p *BaseSeasonalProvider) getMatchingSeries(queryResult *v3.Result, series
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getAvg(series *v3.Series) float64 {
|
||||
if series == nil || len(series.Points) == 0 {
|
||||
return 0
|
||||
}
|
||||
var sum float64
|
||||
for _, smpl := range series.Points {
|
||||
sum += smpl.Value
|
||||
@@ -107,6 +172,9 @@ func (p *BaseSeasonalProvider) getAvg(series *v3.Series) float64 {
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getStdDev(series *v3.Series) float64 {
|
||||
if series == nil || len(series.Points) == 0 {
|
||||
return 0
|
||||
}
|
||||
avg := p.getAvg(series)
|
||||
var sum float64
|
||||
for _, smpl := range series.Points {
|
||||
@@ -115,15 +183,65 @@ func (p *BaseSeasonalProvider) getStdDev(series *v3.Series) float64 {
|
||||
return math.Sqrt(sum / float64(len(series.Points)))
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getPredictedSeries(series, prevSeries, currentSeasonSeries, pastSeasonSeries *v3.Series) *v3.Series {
|
||||
// getMovingAvg gets the moving average for the given series
|
||||
// for the given window size and start index
|
||||
func (p *BaseSeasonalProvider) getMovingAvg(series *v3.Series, movingAvgWindowSize, startIdx int) float64 {
|
||||
if series == nil || len(series.Points) == 0 {
|
||||
return 0
|
||||
}
|
||||
if startIdx >= len(series.Points)-movingAvgWindowSize {
|
||||
startIdx = int(math.Max(0, float64(len(series.Points)-movingAvgWindowSize)))
|
||||
}
|
||||
var sum float64
|
||||
points := series.Points[startIdx:]
|
||||
for i := 0; i < movingAvgWindowSize && i < len(points); i++ {
|
||||
sum += points[i].Value
|
||||
}
|
||||
avg := sum / float64(movingAvgWindowSize)
|
||||
return avg
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getMean(floats ...float64) float64 {
|
||||
if len(floats) == 0 {
|
||||
return 0
|
||||
}
|
||||
var sum float64
|
||||
for _, f := range floats {
|
||||
sum += f
|
||||
}
|
||||
return sum / float64(len(floats))
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getPredictedSeries(
|
||||
series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series,
|
||||
) *v3.Series {
|
||||
predictedSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
for _, curr := range series.Points {
|
||||
predictedValue := p.getAvg(prevSeries) + p.getAvg(currentSeasonSeries) - p.getAvg(pastSeasonSeries)
|
||||
// for each point in the series, get the predicted value
|
||||
// the predicted value is the moving average (with window size = 7) of the previous period series
|
||||
// plus the average of the current season series
|
||||
// minus the mean of the past season series, past2 season series and past3 season series
|
||||
for idx, curr := range series.Points {
|
||||
predictedValue :=
|
||||
p.getMovingAvg(prevSeries, movingAvgWindowSize, idx) +
|
||||
p.getAvg(currentSeasonSeries) -
|
||||
p.getMean(p.getAvg(pastSeasonSeries), p.getAvg(past2SeasonSeries), p.getAvg(past3SeasonSeries))
|
||||
|
||||
if predictedValue < 0 {
|
||||
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
}
|
||||
|
||||
zap.L().Info("predictedSeries",
|
||||
zap.Float64("movingAvg", p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)),
|
||||
zap.Float64("avg", p.getAvg(currentSeasonSeries)),
|
||||
zap.Float64("mean", p.getMean(p.getAvg(pastSeasonSeries), p.getAvg(past2SeasonSeries), p.getAvg(past3SeasonSeries))),
|
||||
zap.Any("labels", series.Labels),
|
||||
zap.Float64("predictedValue", predictedValue),
|
||||
)
|
||||
predictedSeries.Points = append(predictedSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: predictedValue,
|
||||
@@ -133,33 +251,80 @@ func (p *BaseSeasonalProvider) getPredictedSeries(series, prevSeries, currentSea
|
||||
return predictedSeries
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getExpectedValue(_, prevSeries, currentSeasonSeries, pastSeasonSeries *v3.Series) float64 {
|
||||
prevSeriesAvg := p.getAvg(prevSeries)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
zap.L().Debug("getExpectedValue",
|
||||
zap.Float64("prevSeriesAvg", prevSeriesAvg),
|
||||
zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg),
|
||||
zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg),
|
||||
zap.Float64("expectedValue", prevSeriesAvg+currentSeasonSeriesAvg-pastSeasonSeriesAvg),
|
||||
)
|
||||
return prevSeriesAvg + currentSeasonSeriesAvg - pastSeasonSeriesAvg
|
||||
// getBounds gets the upper and lower bounds for the given series
|
||||
// for the given z score threshold
|
||||
// moving avg of the previous period series + z score threshold * std dev of the series
|
||||
// moving avg of the previous period series - z score threshold * std dev of the series
|
||||
func (p *BaseSeasonalProvider) getBounds(
|
||||
series, predictedSeries *v3.Series,
|
||||
zScoreThreshold float64,
|
||||
) (*v3.Series, *v3.Series) {
|
||||
upperBoundSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
lowerBoundSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
for idx, curr := range series.Points {
|
||||
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(series)
|
||||
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(series)
|
||||
upperBoundSeries.Points = append(upperBoundSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: upperBound,
|
||||
})
|
||||
lowerBoundSeries.Points = append(lowerBoundSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: math.Max(lowerBound, 0),
|
||||
})
|
||||
}
|
||||
|
||||
return upperBoundSeries, lowerBoundSeries
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getScore(series, prevSeries, weekSeries, weekPrevSeries *v3.Series, value float64) float64 {
|
||||
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries)
|
||||
// getExpectedValue gets the expected value for the given series
|
||||
// for the given index
|
||||
// prevSeriesAvg + currentSeasonSeriesAvg - mean of past season series, past2 season series and past3 season series
|
||||
func (p *BaseSeasonalProvider) getExpectedValue(
|
||||
_, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, idx int,
|
||||
) float64 {
|
||||
prevSeriesAvg := p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
||||
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
||||
return prevSeriesAvg + currentSeasonSeriesAvg - p.getMean(pastSeasonSeriesAvg, past2SeasonSeriesAvg, past3SeasonSeriesAvg)
|
||||
}
|
||||
|
||||
// getScore gets the anomaly score for the given series
|
||||
// for the given index
|
||||
// (value - expectedValue) / std dev of the series
|
||||
func (p *BaseSeasonalProvider) getScore(
|
||||
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
|
||||
) float64 {
|
||||
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
|
||||
return (value - expectedValue) / p.getStdDev(weekSeries)
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getAnomalyScores(series, prevSeries, currentSeasonSeries, pastSeasonSeries *v3.Series) *v3.Series {
|
||||
// getAnomalyScores gets the anomaly scores for the given series
|
||||
// for the given index
|
||||
// (value - expectedValue) / std dev of the series
|
||||
func (p *BaseSeasonalProvider) getAnomalyScores(
|
||||
series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series,
|
||||
) *v3.Series {
|
||||
anomalyScoreSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
for _, curr := range series.Points {
|
||||
anomalyScore := p.getScore(series, prevSeries, currentSeasonSeries, pastSeasonSeries, curr.Value)
|
||||
for idx, curr := range series.Points {
|
||||
anomalyScore := p.getScore(series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries, curr.Value, idx)
|
||||
anomalyScoreSeries.Points = append(anomalyScoreSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: anomalyScore,
|
||||
@@ -169,7 +334,7 @@ func (p *BaseSeasonalProvider) getAnomalyScores(series, prevSeries, currentSeaso
|
||||
return anomalyScoreSeries
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
anomalyParams := p.getQueryParams(req)
|
||||
anomalyQueryResults, err := p.getResults(ctx, anomalyParams)
|
||||
if err != nil {
|
||||
@@ -196,7 +361,32 @@ func (p *BaseSeasonalProvider) GetAnomalies(ctx context.Context, req *GetAnomali
|
||||
pastSeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
past2SeasonResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.Past2SeasonResults {
|
||||
past2SeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
past3SeasonResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.Past3SeasonResults {
|
||||
past3SeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
for _, result := range currentPeriodResultsMap {
|
||||
funcs := req.Params.CompositeQuery.BuilderQueries[result.QueryName].Functions
|
||||
|
||||
var zScoreThreshold float64
|
||||
for _, f := range funcs {
|
||||
if f.Name == v3.FunctionNameAnomaly {
|
||||
value, ok := f.NamedArgs["z_score_threshold"]
|
||||
if ok {
|
||||
zScoreThreshold = value.(float64)
|
||||
} else {
|
||||
zScoreThreshold = 3
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
pastPeriodResult, ok := pastPeriodResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
@@ -209,21 +399,68 @@ func (p *BaseSeasonalProvider) GetAnomalies(ctx context.Context, req *GetAnomali
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
past2SeasonResult, ok := past2SeasonResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
past3SeasonResult, ok := past3SeasonResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, series := range result.Series {
|
||||
stdDev := p.getStdDev(series)
|
||||
zap.L().Info("stdDev", zap.Float64("stdDev", stdDev), zap.Any("labels", series.Labels))
|
||||
|
||||
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
|
||||
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
|
||||
pastSeasonSeries := p.getMatchingSeries(pastSeasonResult, series)
|
||||
past2SeasonSeries := p.getMatchingSeries(past2SeasonResult, series)
|
||||
past3SeasonSeries := p.getMatchingSeries(past3SeasonResult, series)
|
||||
|
||||
predictedSeries := p.getPredictedSeries(series, pastPeriodSeries, currentSeasonSeries, pastSeasonSeries)
|
||||
prevSeriesAvg := p.getAvg(pastPeriodSeries)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
||||
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
||||
zap.L().Info("getAvg", zap.Float64("prevSeriesAvg", prevSeriesAvg), zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg), zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg), zap.Float64("past2SeasonSeriesAvg", past2SeasonSeriesAvg), zap.Float64("past3SeasonSeriesAvg", past3SeasonSeriesAvg), zap.Any("labels", series.Labels))
|
||||
|
||||
predictedSeries := p.getPredictedSeries(
|
||||
series,
|
||||
pastPeriodSeries,
|
||||
currentSeasonSeries,
|
||||
pastSeasonSeries,
|
||||
past2SeasonSeries,
|
||||
past3SeasonSeries,
|
||||
)
|
||||
result.PredictedSeries = append(result.PredictedSeries, predictedSeries)
|
||||
|
||||
anomalyScoreSeries := p.getAnomalyScores(series, pastPeriodSeries, currentSeasonSeries, pastSeasonSeries)
|
||||
upperBoundSeries, lowerBoundSeries := p.getBounds(
|
||||
series,
|
||||
predictedSeries,
|
||||
zScoreThreshold,
|
||||
)
|
||||
result.UpperBoundSeries = append(result.UpperBoundSeries, upperBoundSeries)
|
||||
result.LowerBoundSeries = append(result.LowerBoundSeries, lowerBoundSeries)
|
||||
|
||||
anomalyScoreSeries := p.getAnomalyScores(
|
||||
series,
|
||||
pastPeriodSeries,
|
||||
currentSeasonSeries,
|
||||
pastSeasonSeries,
|
||||
past2SeasonSeries,
|
||||
past3SeasonSeries,
|
||||
)
|
||||
result.AnomalyScores = append(result.AnomalyScores, anomalyScoreSeries)
|
||||
}
|
||||
}
|
||||
|
||||
results := make([]*v3.Result, 0, len(currentPeriodResultsMap))
|
||||
for _, result := range currentPeriodResultsMap {
|
||||
results = append(results, result)
|
||||
}
|
||||
|
||||
return &GetAnomaliesResponse{
|
||||
Results: anomalyQueryResults.CurrentPeriodResults,
|
||||
Results: results,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@ package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
)
|
||||
|
||||
type WeeklyProvider struct {
|
||||
@@ -23,9 +26,18 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
|
||||
opt(wp)
|
||||
}
|
||||
|
||||
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
FeatureLookup: wp.ff,
|
||||
})
|
||||
|
||||
return wp
|
||||
}
|
||||
|
||||
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
return nil, nil
|
||||
req.Seasonality = SeasonalityWeekly
|
||||
return p.getAnomalies(ctx, req)
|
||||
}
|
||||
|
||||
@@ -177,6 +177,8 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
am.ViewAccess(ah.listLicensesV2)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.AdminAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
|
||||
@@ -9,7 +9,15 @@ import (
|
||||
|
||||
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
if !strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+gateway.AllowedPrefix) {
|
||||
validPath := false
|
||||
for _, allowedPrefix := range gateway.AllowedPrefix {
|
||||
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
|
||||
validPath = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !validPath {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
129
ee/query-service/app/api/queryrange.go
Normal file
129
ee/query-service/app/api/queryrange.go
Normal file
@@ -0,0 +1,129 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/anomaly"
|
||||
baseapp "go.signoz.io/signoz/pkg/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
bodyBytes, _ := io.ReadAll(r.Body)
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err))
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
queryRangeParams.Version = "v4"
|
||||
|
||||
// add temporality for each metric
|
||||
temporalityErr := aH.PopulateTemporality(r.Context(), queryRangeParams)
|
||||
if temporalityErr != nil {
|
||||
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
anomalyQueryExists := false
|
||||
anomalyQuery := &v3.BuilderQuery{}
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
|
||||
for _, fn := range query.Functions {
|
||||
if fn.Name == v3.FunctionNameAnomaly {
|
||||
anomalyQueryExists = true
|
||||
anomalyQuery = query
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if anomalyQueryExists {
|
||||
// ensure all queries have metric data source, and there should be only one anomaly query
|
||||
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
|
||||
// What is query.QueryName == query.Expression doing here?
|
||||
// In the current implementation, the way to recognize if a query is a formula is by
|
||||
// checking if the expression is the same as the query name. if the expression is different
|
||||
// then it is a formula. otherwise, it is simple builder query.
|
||||
if query.DataSource != v3.DataSourceMetrics && query.QueryName == query.Expression {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("all queries must have metric data source")}, nil)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// get the threshold, and seasonality from the anomaly query
|
||||
var seasonality anomaly.Seasonality
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == v3.FunctionNameAnomaly {
|
||||
seasonalityStr, ok := fn.NamedArgs["seasonality"].(string)
|
||||
if !ok {
|
||||
seasonalityStr = "daily"
|
||||
}
|
||||
if seasonalityStr == "weekly" {
|
||||
seasonality = anomaly.SeasonalityWeekly
|
||||
} else if seasonalityStr == "daily" {
|
||||
seasonality = anomaly.SeasonalityDaily
|
||||
} else {
|
||||
seasonality = anomaly.SeasonalityHourly
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
var provider anomaly.Provider
|
||||
switch seasonality {
|
||||
case anomaly.SeasonalityWeekly:
|
||||
provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityDaily:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityHourly:
|
||||
provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
default:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
}
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
resp := v3.QueryRangeResponse{
|
||||
Result: anomalies.Results,
|
||||
ResultType: "anomaly",
|
||||
}
|
||||
aH.Respond(w, resp)
|
||||
} else {
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
aH.QueryRangeV4(w, r)
|
||||
}
|
||||
}
|
||||
@@ -170,6 +170,14 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
<-readerReady
|
||||
rm, err := makeRulesManager(serverOptions.PromConfigPath,
|
||||
@@ -177,6 +185,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.RuleRepoURL,
|
||||
localDB,
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
lm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
@@ -237,15 +246,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
telemetry.GetInstance().SetReader(reader)
|
||||
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
|
||||
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
|
||||
|
||||
if err != nil {
|
||||
@@ -364,6 +364,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
apiHandler.RegisterIntegrationRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
apiHandler.RegisterWebSocketPaths(r, am)
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
@@ -732,6 +733,7 @@ func makeRulesManager(
|
||||
ruleRepoURL string,
|
||||
db *sqlx.DB,
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm baseint.FeatureLookup,
|
||||
useLogsNewSchema bool) (*baserules.Manager, error) {
|
||||
@@ -756,10 +758,11 @@ func makeRulesManager(
|
||||
RepoURL: ruleRepoURL,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: nil,
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
FeatureFlags: fm,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
|
||||
@@ -8,9 +8,9 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
var (
|
||||
RoutePrefix string = "/api/gateway"
|
||||
AllowedPrefix string = "/v1/workspaces/me"
|
||||
AllowedPrefix []string = []string{"/v1/workspaces/me", "/v2/profiles/me"}
|
||||
)
|
||||
|
||||
type proxy struct {
|
||||
|
||||
@@ -20,6 +20,8 @@ import (
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
|
||||
prommodel "github.com/prometheus/common/model"
|
||||
|
||||
zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder"
|
||||
zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync"
|
||||
|
||||
@@ -77,6 +79,10 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
|
||||
return logger
|
||||
}
|
||||
|
||||
func init() {
|
||||
prommodel.NameValidationScheme = prommodel.UTF8Validation
|
||||
}
|
||||
|
||||
func main() {
|
||||
var promConfigPath, skipTopLvlOpsPath string
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
const Gateway = "GATEWAY"
|
||||
const PremiumSupport = "PREMIUM_SUPPORT"
|
||||
const QueryBuilderSearchV2 = "QUERY_BUILDER_SEARCH_V2"
|
||||
|
||||
var BasicPlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
@@ -129,7 +128,7 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: QueryBuilderSearchV2,
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
@@ -244,8 +243,8 @@ var ProPlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: QueryBuilderSearchV2,
|
||||
Active: false,
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
@@ -373,7 +372,7 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: QueryBuilderSearchV2,
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
|
||||
393
ee/query-service/rules/anomaly.go
Normal file
393
ee/query-service/rules/anomaly.go
Normal file
@@ -0,0 +1,393 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/anomaly"
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/times"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/timestamp"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/formatter"
|
||||
|
||||
baserules "go.signoz.io/signoz/pkg/query-service/rules"
|
||||
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
const (
|
||||
RuleTypeAnomaly = "anomaly_rule"
|
||||
)
|
||||
|
||||
type AnomalyRule struct {
|
||||
*baserules.BaseRule
|
||||
|
||||
mtx sync.Mutex
|
||||
|
||||
reader interfaces.Reader
|
||||
|
||||
// querierV2 is used for alerts created after the introduction of new metrics query builder
|
||||
querierV2 interfaces.Querier
|
||||
|
||||
provider anomaly.Provider
|
||||
|
||||
seasonality anomaly.Seasonality
|
||||
}
|
||||
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
p *baserules.PostableRule,
|
||||
featureFlags interfaces.FeatureLookup,
|
||||
reader interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
opts ...baserules.RuleOption,
|
||||
) (*AnomalyRule, error) {
|
||||
|
||||
zap.L().Info("creating new AnomalyRule", zap.String("id", id), zap.Any("opts", opts))
|
||||
|
||||
baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
t := AnomalyRule{
|
||||
BaseRule: baseRule,
|
||||
}
|
||||
|
||||
switch strings.ToLower(p.RuleCondition.Seasonality) {
|
||||
case "hourly":
|
||||
t.seasonality = anomaly.SeasonalityHourly
|
||||
case "daily":
|
||||
t.seasonality = anomaly.SeasonalityDaily
|
||||
case "weekly":
|
||||
t.seasonality = anomaly.SeasonalityWeekly
|
||||
default:
|
||||
t.seasonality = anomaly.SeasonalityDaily
|
||||
}
|
||||
|
||||
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FeatureLookup: featureFlags,
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
t.reader = reader
|
||||
if t.seasonality == anomaly.SeasonalityHourly {
|
||||
t.provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
|
||||
)
|
||||
}
|
||||
return &t, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Type() baserules.RuleType {
|
||||
return RuleTypeAnomaly
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) prepareQueryRange(ts time.Time) (*v3.QueryRangeParamsV3, error) {
|
||||
|
||||
zap.L().Info("prepareQueryRange", zap.Int64("ts", ts.UnixMilli()), zap.Int64("evalWindow", r.EvalWindow().Milliseconds()), zap.Int64("evalDelay", r.EvalDelay().Milliseconds()))
|
||||
|
||||
start := ts.Add(-time.Duration(r.EvalWindow())).UnixMilli()
|
||||
end := ts.UnixMilli()
|
||||
|
||||
if r.EvalDelay() > 0 {
|
||||
start = start - int64(r.EvalDelay().Milliseconds())
|
||||
end = end - int64(r.EvalDelay().Milliseconds())
|
||||
}
|
||||
// round to minute otherwise we could potentially miss data
|
||||
start = start - (start % (60 * 1000))
|
||||
end = end - (end % (60 * 1000))
|
||||
|
||||
compositeQuery := r.Condition().CompositeQuery
|
||||
|
||||
if compositeQuery.PanelType != v3.PanelTypeGraph {
|
||||
compositeQuery.PanelType = v3.PanelTypeGraph
|
||||
}
|
||||
|
||||
// default mode
|
||||
return &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
|
||||
CompositeQuery: compositeQuery,
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) GetSelectedQuery() string {
|
||||
return r.Condition().GetSelectedQueryName()
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (baserules.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = r.PopulateTemporality(ctx, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("internal error while setting temporality")
|
||||
}
|
||||
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, &anomaly.GetAnomaliesRequest{
|
||||
Params: params,
|
||||
Seasonality: r.seasonality,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var queryResult *v3.Result
|
||||
for _, result := range anomalies.Results {
|
||||
if result.QueryName == r.GetSelectedQuery() {
|
||||
queryResult = result
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var resultVector baserules.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
zap.L().Info("anomaly scores", zap.String("scores", string(scoresJSON)))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
smpl, shouldAlert := r.ShouldAlert(*series)
|
||||
if shouldAlert {
|
||||
resultVector = append(resultVector, smpl)
|
||||
}
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, error) {
|
||||
|
||||
prevState := r.State()
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
res, err := r.buildAndRunQuery(ctx, ts)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r.mtx.Lock()
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
var alerts = make(map[uint64]*baserules.Alert, len(res))
|
||||
|
||||
for _, smpl := range res {
|
||||
l := make(map[string]string, len(smpl.Metric))
|
||||
for _, lbl := range smpl.Metric {
|
||||
l[lbl.Name] = lbl.Value
|
||||
}
|
||||
|
||||
value := valueFormatter.Format(smpl.V, r.Unit())
|
||||
threshold := valueFormatter.Format(r.TargetVal(), r.Unit())
|
||||
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
|
||||
|
||||
tmplData := baserules.AlertTemplateData(l, value, threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
// who are not used to Go's templating system.
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
|
||||
// utility function to apply go template on labels and annotations
|
||||
expand := func(text string) string {
|
||||
|
||||
tmpl := baserules.NewTemplateExpander(
|
||||
ctx,
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
tmplData,
|
||||
times.Time(timestamp.FromTime(ts)),
|
||||
nil,
|
||||
)
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
result = fmt.Sprintf("<error expanding template: %s>", err)
|
||||
zap.L().Error("Expanding alert template failed", zap.Error(err), zap.Any("data", tmplData))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
lb := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel)
|
||||
resultLabels := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel).Labels()
|
||||
|
||||
for name, value := range r.Labels().Map() {
|
||||
lb.Set(name, expand(value))
|
||||
}
|
||||
|
||||
lb.Set(labels.AlertNameLabel, r.Name())
|
||||
lb.Set(labels.AlertRuleIdLabel, r.ID())
|
||||
lb.Set(labels.RuleSourceLabel, r.GeneratorURL())
|
||||
|
||||
annotations := make(labels.Labels, 0, len(r.Annotations().Map()))
|
||||
for name, value := range r.Annotations().Map() {
|
||||
annotations = append(annotations, labels.Label{Name: name, Value: expand(value)})
|
||||
}
|
||||
if smpl.IsMissing {
|
||||
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
|
||||
}
|
||||
|
||||
lbs := lb.Labels()
|
||||
h := lbs.Hash()
|
||||
resultFPs[h] = struct{}{}
|
||||
|
||||
if _, ok := alerts[h]; ok {
|
||||
zap.L().Error("the alert query returns duplicate records", zap.String("ruleid", r.ID()), zap.Any("alert", alerts[h]))
|
||||
err = fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alerts[h] = &baserules.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
Annotations: annotations,
|
||||
ActiveAt: ts,
|
||||
State: model.StatePending,
|
||||
Value: smpl.V,
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: r.PreferredChannels(),
|
||||
Missing: smpl.IsMissing,
|
||||
}
|
||||
}
|
||||
|
||||
zap.L().Info("number of alerts found", zap.String("name", r.Name()), zap.Int("count", len(alerts)))
|
||||
|
||||
// alerts[h] is ready, add or update active list now
|
||||
for h, a := range alerts {
|
||||
// Check whether we already have alerting state for the identifying label set.
|
||||
// Update the last value and annotations if so, create a new alert entry otherwise.
|
||||
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
|
||||
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
alert.Receivers = r.PreferredChannels()
|
||||
continue
|
||||
}
|
||||
|
||||
r.Active[h] = a
|
||||
}
|
||||
|
||||
itemsToAdd := []model.RuleStateHistory{}
|
||||
|
||||
// Check if any pending alerts should be removed or fire now. Write out alert timeseries.
|
||||
for fp, a := range r.Active {
|
||||
labelsJSON, err := json.Marshal(a.QueryResultLables)
|
||||
if err != nil {
|
||||
zap.L().Error("error marshaling labels", zap.Error(err), zap.Any("labels", a.Labels))
|
||||
}
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
// retention time so it is reported as resolved to the AlertManager.
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > baserules.ResolvedRetention) {
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != model.StateInactive {
|
||||
a.State = model.StateInactive
|
||||
a.ResolvedAt = ts
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: model.StateInactive,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := model.StateFiring
|
||||
if a.Missing {
|
||||
state = model.StateNoData
|
||||
}
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
currentState := r.State()
|
||||
|
||||
overallStateChanged := currentState != prevState
|
||||
for idx, item := range itemsToAdd {
|
||||
item.OverallStateChanged = overallStateChanged
|
||||
item.OverallState = currentState
|
||||
itemsToAdd[idx] = item
|
||||
}
|
||||
|
||||
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
|
||||
|
||||
return len(r.Active), nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) String() string {
|
||||
|
||||
ar := baserules.PostableRule{
|
||||
AlertName: r.Name(),
|
||||
RuleCondition: r.Condition(),
|
||||
EvalWindow: baserules.Duration(r.EvalWindow()),
|
||||
Labels: r.Labels().Map(),
|
||||
Annotations: r.Annotations().Map(),
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
}
|
||||
|
||||
byt, err := yaml.Marshal(ar)
|
||||
if err != nil {
|
||||
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
|
||||
}
|
||||
|
||||
return string(byt)
|
||||
}
|
||||
@@ -53,8 +53,27 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
// create promql rule task for evalution
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
|
||||
} else if opts.Rule.RuleType == baserules.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
)
|
||||
if err != nil {
|
||||
return task, err
|
||||
}
|
||||
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type. Supported types: %s, %s", baserules.RuleTypeProm, baserules.RuleTypeThreshold)
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, baserules.RuleTypeProm, baserules.RuleTypeThreshold)
|
||||
}
|
||||
|
||||
return task, nil
|
||||
|
||||
@@ -68,7 +68,7 @@
|
||||
"css-loader": "5.0.0",
|
||||
"css-minimizer-webpack-plugin": "5.0.1",
|
||||
"dayjs": "^1.10.7",
|
||||
"dompurify": "3.0.0",
|
||||
"dompurify": "3.1.3",
|
||||
"dotenv": "8.2.0",
|
||||
"event-source-polyfill": "1.0.31",
|
||||
"eventemitter3": "5.0.1",
|
||||
@@ -239,6 +239,7 @@
|
||||
"debug": "4.3.4",
|
||||
"semver": "7.5.4",
|
||||
"xml2js": "0.5.0",
|
||||
"phin": "^3.7.1"
|
||||
"phin": "^3.7.1",
|
||||
"body-parser": "1.20.3"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
frontend/public/Images/signoz-hero-image.webp
Normal file
BIN
frontend/public/Images/signoz-hero-image.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 62 KiB |
@@ -1,11 +1,7 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_2048_2251)">
|
||||
<path opacity="0.9" d="M8.02226 15.9866C3.56539 15.9866 -6.10352e-05 12.4896 -6.10352e-05 8.11832C-6.10352e-05 3.79075 3.56539 0.25 8.02226 0.25H13.0584C14.7075 0.25 15.9999 1.56139 15.9999 3.13506V8.11832C15.9999 12.4896 12.4345 15.9866 8.02226 15.9866Z" fill="#F25733"/>
|
||||
<path d="M7.95919 4.71207C4.63025 4.71207 2.75514 7.46868 2.67693 7.58603C2.48413 7.87508 2.48413 8.24888 2.67707 8.53816C2.75514 8.65528 4.63025 11.4119 7.95919 11.4119C11.2881 11.4119 13.1633 8.65528 13.2414 8.53792C13.4342 8.24888 13.4342 7.87508 13.2413 7.58582C13.1632 7.46868 11.2881 4.71207 7.95919 4.71207ZM3.13771 8.23088C3.06925 8.12832 3.06925 7.99571 3.13771 7.89307C3.20059 7.79867 4.53564 5.83764 6.92256 5.36723C5.84092 5.78476 5.07127 6.83485 5.07127 8.062C5.07127 9.28912 5.84092 10.3392 6.92256 10.7567C4.53564 10.2863 3.20059 8.32528 3.13771 8.23088ZM6.62838 8.062C6.62838 8.21488 6.50443 8.3388 6.35151 8.3388C6.19859 8.3388 6.07465 8.21488 6.07465 8.062C6.07465 7.02287 6.92003 6.17748 7.95916 6.17748C8.11207 6.17748 8.23599 6.30141 8.23599 6.45434C8.23599 6.60727 8.11207 6.73119 7.95916 6.73119C7.22535 6.73119 6.62838 7.32815 6.62838 8.062ZM7.95919 8.73504C7.58803 8.73504 7.2861 8.43312 7.2861 8.062C7.2861 7.69085 7.58803 7.3889 7.95919 7.3889C8.33039 7.3889 8.63231 7.69083 8.63231 8.062C8.63231 8.43312 8.33039 8.73504 7.95919 8.73504ZM12.7806 8.23088C12.7178 8.32528 11.3827 10.2863 8.99583 10.7567C10.0775 10.3392 10.8471 9.28912 10.8471 8.062C10.8471 6.83487 10.0775 5.78477 8.99583 5.36724C11.3827 5.83768 12.7178 7.7987 12.7806 7.89307C12.8491 7.99571 12.8491 8.12832 12.7806 8.23088Z" fill="#F9F2F9"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_2048_2251">
|
||||
<rect width="16" height="16" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="100" height="100" fill="none"><rect width="100" height="100" fill="url(#a)" rx="20"/><g fill="#fff" fill-rule="evenodd" clip-rule="evenodd" filter="url(#b)"><path d="M11 49.941v-.003l.002-.005.003-.014.007-.035a8.37 8.37 0 0 1 .105-.42c.073-.263.184-.624.348-1.072.328-.896.866-2.135 1.73-3.617 1.732-2.97 4.753-6.883 9.95-10.955 5.223-4.092 10.295-6.293 14.08-7.471a35.328 35.328 0 0 1 4.585-1.114 23.628 23.628 0 0 1 1.687-.223 9.17 9.17 0 0 1 .108-.009l.034-.002h.011l.007-.001s.002 0 .133 2.217c.13 2.218.132 2.218.132 2.218h-.002l-.053.004a19.098 19.098 0 0 0-1.326.178c-.809.136-1.937.37-3.302.763l-.127.043c-2.745.94-6.666 2.775-11.249 6.362-4.572 3.577-7.142 6.95-8.563 9.393-.711 1.222-1.137 2.215-1.383 2.889a9.995 9.995 0 0 0-.29.933c.008.037.022.095.044.173.046.166.123.423.246.76.246.674.672 1.667 1.383 2.89 1.421 2.441 3.991 5.815 8.563 9.392 4.584 3.587 8.504 5.423 11.25 6.362l.126.043c1.365.393 2.493.627 3.302.763a19.098 19.098 0 0 0 1.326.178l.053.004h.002s-.002 0-.133 2.218C43.66 75 43.657 75 43.657 75h-.007l-.011-.001-.034-.002a9.17 9.17 0 0 1-.478-.046 23.628 23.628 0 0 1-1.317-.186 35.328 35.328 0 0 1-4.584-1.114c-3.786-1.178-8.858-3.38-14.081-7.471-5.197-4.072-8.218-7.985-9.95-10.955-.864-1.482-1.402-2.72-1.73-3.617-.164-.448-.275-.81-.348-1.072a8.37 8.37 0 0 1-.105-.42l-.007-.035-.003-.014-.002-.005v-.121Zm78 0v-.003l-.002-.005-.002-.014-.008-.035a8.532 8.532 0 0 0-.105-.42 14.049 14.049 0 0 0-.348-1.072c-.328-.896-.866-2.135-1.73-3.617-1.732-2.97-4.753-6.883-9.95-10.955-5.223-4.092-10.295-6.293-14.08-7.471a35.328 35.328 0 0 0-4.585-1.114 23.628 23.628 0 0 0-1.687-.223 9.17 9.17 0 0 0-.108-.009l-.034-.002h-.011L56.343 25s-.002 0-.133 2.217c-.13 2.218-.132 2.218-.132 2.218h.002l.053.004a19.098 19.098 0 0 1 1.326.178c.809.136 1.937.37 3.302.763l.127.043c2.745.94 6.666 2.775 11.249 6.362 4.572 3.577 7.141 6.95 8.563 9.393.711 1.222 1.137 2.215 1.383 2.889a9.995 9.995 0 0 1 .29.933 9.995 9.995 0 0 1-.29.934c-.246.673-.672 1.666-1.383 2.888-1.422 2.442-3.991 5.816-8.563 9.393-4.584 3.587-8.504 5.423-11.25 6.362l-.126.043a30.108 30.108 0 0 1-3.302.763 19.098 19.098 0 0 1-1.326.178l-.053.004h-.002s.002 0 .133 2.218C56.34 75 56.343 75 56.343 75h.007l.011-.001.034-.002a9.17 9.17 0 0 0 .478-.046c.314-.034.758-.092 1.317-.186a35.328 35.328 0 0 0 4.584-1.114c3.786-1.178 8.858-3.38 14.081-7.471 5.197-4.072 8.218-7.985 9.95-10.955.864-1.482 1.402-2.72 1.73-3.617.164-.448.275-.81.348-1.072a8.532 8.532 0 0 0 .105-.42l.008-.035.002-.014.001-.005.001-.003v-.118Z"/><path d="M68.342 49.998c0 9.846-7.924 17.827-17.7 17.827-9.775 0-17.7-7.981-17.7-17.827 0-9.846 7.925-17.827 17.7-17.827 9.776 0 17.7 7.981 17.7 17.827ZM46.218 39.97s-2.127 2.508-2.766 4.457c-.412 1.257-.553 3.343-.553 3.343h-5.531s0-1.672 1.106-4.457c1.106-2.786 2.212-3.343 2.212-3.343h5.532Zm-2.766 15.6c.639 1.949 2.766 4.457 2.766 4.457h-5.532s-1.106-.557-2.212-3.343c-1.106-2.785-1.106-4.457-1.106-4.457h5.53s.142 2.086.554 3.343Z"/></g><defs><radialGradient id="a" cx="0" cy="0" r="1" gradientTransform="matrix(40.99997 42 -42 40.99997 50 50)" gradientUnits="userSpaceOnUse"><stop offset=".33" stop-color="#F76526"/><stop offset="1" stop-color="#F43030"/></radialGradient><filter id="b" width="90" height="62" x="5" y="23" color-interpolation-filters="sRGB" filterUnits="userSpaceOnUse"><feFlood flood-opacity="0" result="BackgroundImageFix"/><feColorMatrix in="SourceAlpha" result="hardAlpha" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/><feOffset dy="4"/><feGaussianBlur stdDeviation="3"/><feComposite in2="hardAlpha" operator="out"/><feColorMatrix values="0 0 0 0 0.368384 0 0 0 0 0.0623777 0 0 0 0 0.0623777 0 0 0 0.25 0"/><feBlend in2="BackgroundImageFix" result="effect1_dropShadow_3909_18731"/><feBlend in="SourceGraphic" in2="effect1_dropShadow_3909_18731" result="shape"/><feColorMatrix in="SourceAlpha" result="hardAlpha" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/><feOffset dy="4"/><feGaussianBlur stdDeviation="3"/><feComposite in2="hardAlpha" k2="-1" k3="1" operator="arithmetic"/><feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.06 0"/><feBlend in2="shape" result="effect2_innerShadow_3909_18731"/></filter></defs></svg>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 4.1 KiB |
1
frontend/public/css/uPlot.min.css
vendored
Normal file
1
frontend/public/css/uPlot.min.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.uplot, .uplot *, .uplot *::before, .uplot *::after {box-sizing: border-box;}.uplot {font-family: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";line-height: 1.5;width: min-content;}.u-title {text-align: center;font-size: 18px;font-weight: bold;}.u-wrap {position: relative;user-select: none;}.u-over, .u-under {position: absolute;}.u-under {overflow: hidden;}.uplot canvas {display: block;position: relative;width: 100%;height: 100%;}.u-axis {position: absolute;}.u-legend {font-size: 14px;margin: auto;text-align: center;}.u-inline {display: block;}.u-inline * {display: inline-block;}.u-inline tr {margin-right: 16px;}.u-legend th {font-weight: 600;}.u-legend th > * {vertical-align: middle;display: inline-block;}.u-legend .u-marker {width: 1em;height: 1em;margin-right: 4px;background-clip: padding-box !important;}.u-inline.u-live th::after {content: ":";vertical-align: middle;}.u-inline:not(.u-live) .u-value {display: none;}.u-series > * {padding: 4px;}.u-series th {cursor: pointer;}.u-legend .u-off > * {opacity: 0.3;}.u-select {background: rgba(0,0,0,0.07);position: absolute;pointer-events: none;}.u-cursor-x, .u-cursor-y {position: absolute;left: 0;top: 0;pointer-events: none;will-change: transform;}.u-hz .u-cursor-x, .u-vt .u-cursor-y {height: 100%;border-right: 1px dashed #607D8B;}.u-hz .u-cursor-y, .u-vt .u-cursor-x {width: 100%;border-bottom: 1px dashed #607D8B;}.u-cursor-pt {position: absolute;top: 0;left: 0;border-radius: 50%;border: 0 solid;pointer-events: none;will-change: transform;/*this has to be !important since we set inline "background" shorthand */background-clip: padding-box !important;}.u-axis.u-off, .u-select.u-off, .u-cursor-x.u-off, .u-cursor-y.u-off, .u-cursor-pt.u-off {display: none;}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 13 KiB |
BIN
frontend/public/fonts/FiraCode-VariableFont_wght.ttf
Normal file
BIN
frontend/public/fonts/FiraCode-VariableFont_wght.ttf
Normal file
Binary file not shown.
BIN
frontend/public/fonts/Inter-VariableFont_opsz,wght.ttf
Normal file
BIN
frontend/public/fonts/Inter-VariableFont_opsz,wght.ttf
Normal file
Binary file not shown.
BIN
frontend/public/fonts/SpaceMono-Regular.ttf
Normal file
BIN
frontend/public/fonts/SpaceMono-Regular.ttf
Normal file
Binary file not shown.
BIN
frontend/public/fonts/WorkSans-VariableFont_wght.ttf
Normal file
BIN
frontend/public/fonts/WorkSans-VariableFont_wght.ttf
Normal file
Binary file not shown.
@@ -53,8 +53,10 @@
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_last": "last",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_above_below": "above/below",
|
||||
"option_equal": "is equal to",
|
||||
"option_notequal": "not equal to",
|
||||
"button_query": "Query",
|
||||
@@ -109,6 +111,8 @@
|
||||
"choose_alert_type": "Choose a type for the alert",
|
||||
"metric_based_alert": "Metric based Alert",
|
||||
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data.",
|
||||
"anomaly_based_alert": "Anomaly based Alert",
|
||||
"anomaly_based_alert_desc": "Send a notification when a condition occurs in the metric data.",
|
||||
"log_based_alert": "Log-based Alert",
|
||||
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
|
||||
"traces_based_alert": "Trace-based Alert",
|
||||
@@ -117,6 +121,8 @@
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit",
|
||||
"text_alert_on_absent": "Send a notification if data is missing for",
|
||||
"text_require_min_points": "Run alert evaluation only when there are minimum of",
|
||||
"text_num_points": "data points in each result group",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query"
|
||||
|
||||
@@ -40,8 +40,10 @@
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_last": "last",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_above_below": "above/below",
|
||||
"option_equal": "is equal to",
|
||||
"option_notequal": "not equal to",
|
||||
"button_query": "Query",
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support."
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
|
||||
}
|
||||
|
||||
@@ -13,9 +13,12 @@
|
||||
"button_no": "No",
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step1": "Choose a detection method",
|
||||
"alert_form_step2": "Define the metric",
|
||||
"alert_form_step3": "Define Alert Conditions",
|
||||
"alert_form_step4": "Alert Configuration",
|
||||
"threshold_alert_desc": "An alert is triggered whenever a metric deviates from an expected threshold.",
|
||||
"anomaly_detection_alert_desc": "An alert is triggered whenever a metric deviates from an expected pattern.",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
@@ -35,6 +38,7 @@
|
||||
"button_cancelchanges": "Cancel",
|
||||
"button_discard": "Discard",
|
||||
"text_condition1": "Send a notification when",
|
||||
"text_condition1_anomaly": "Send notification when the observed value for",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_1min": "1 min",
|
||||
@@ -53,8 +57,10 @@
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_last": "last",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_above_below": "above/below",
|
||||
"option_equal": "is equal to",
|
||||
"option_notequal": "not equal to",
|
||||
"button_query": "Query",
|
||||
@@ -108,7 +114,9 @@
|
||||
"user_tooltip_more_help": "More details on how to create alerts",
|
||||
"choose_alert_type": "Choose a type for the alert",
|
||||
"metric_based_alert": "Metric based Alert",
|
||||
"anomaly_based_alert": "Anomaly based Alert",
|
||||
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data.",
|
||||
"anomaly_based_alert_desc": "Send a notification when a condition occurs in the metric data.",
|
||||
"log_based_alert": "Log-based Alert",
|
||||
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
|
||||
"traces_based_alert": "Trace-based Alert",
|
||||
@@ -117,6 +125,8 @@
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit",
|
||||
"text_alert_on_absent": "Send a notification if data is missing for",
|
||||
"text_require_min_points": "Run alert evaluation only when there are minimum of",
|
||||
"text_num_points": "data points in each result group",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query"
|
||||
|
||||
@@ -40,8 +40,10 @@
|
||||
"option_atleastonce": "at least once",
|
||||
"option_onaverage": "on average",
|
||||
"option_intotal": "in total",
|
||||
"option_last": "last",
|
||||
"option_above": "above",
|
||||
"option_below": "below",
|
||||
"option_above_below": "above/below",
|
||||
"option_equal": "is equal to",
|
||||
"option_notequal": "not equal to",
|
||||
"button_query": "Query",
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support."
|
||||
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 10 KiB |
@@ -12,6 +12,7 @@ import useAnalytics from 'hooks/analytics/useAnalytics';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import useFeatureFlags from 'hooks/useFeatureFlag';
|
||||
import useGetFeatureFlag from 'hooks/useGetFeatureFlag';
|
||||
import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
@@ -58,23 +59,13 @@ function App(): JSX.Element {
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const featureResponse = useGetFeatureFlag((allFlags) => {
|
||||
const isOnboardingEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active ||
|
||||
false;
|
||||
|
||||
const isChatSupportEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.CHAT_SUPPORT)?.active ||
|
||||
false;
|
||||
useFeatureFlags(FeatureKeys.CHAT_SUPPORT)?.active || false;
|
||||
|
||||
const isPremiumSupportEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.PREMIUM_SUPPORT)?.active ||
|
||||
false;
|
||||
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled &&
|
||||
!licenseData?.payload?.trialConvertedToSubscription;
|
||||
useFeatureFlags(FeatureKeys.PREMIUM_SUPPORT)?.active || false;
|
||||
|
||||
const featureResponse = useGetFeatureFlag((allFlags) => {
|
||||
dispatch({
|
||||
type: UPDATE_FEATURE_FLAG_RESPONSE,
|
||||
payload: {
|
||||
@@ -83,6 +74,10 @@ function App(): JSX.Element {
|
||||
},
|
||||
});
|
||||
|
||||
const isOnboardingEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active ||
|
||||
false;
|
||||
|
||||
if (!isOnboardingEnabled || !isCloudUserVal) {
|
||||
const newRoutes = routes.filter(
|
||||
(route) => route?.path !== ROUTES.GET_STARTED,
|
||||
@@ -90,16 +85,6 @@ function App(): JSX.Element {
|
||||
|
||||
setRoutes(newRoutes);
|
||||
}
|
||||
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
name: user?.name || '',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const isOnBasicPlan =
|
||||
@@ -201,6 +186,26 @@ function App(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled &&
|
||||
!licenseData?.payload?.trialConvertedToSubscription;
|
||||
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
name: user?.name || '',
|
||||
});
|
||||
}
|
||||
}, [
|
||||
isLoggedInState,
|
||||
isChatSupportEnabled,
|
||||
user,
|
||||
licenseData,
|
||||
isPremiumSupportEnabled,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (user && user?.email && user?.userId && user?.name) {
|
||||
try {
|
||||
@@ -227,6 +232,10 @@ function App(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [user]);
|
||||
|
||||
useEffect(() => {
|
||||
console.info('We are hiring! https://jobs.gem.com/signoz');
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
|
||||
@@ -7,6 +7,7 @@ const create = async (
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
const response = await axios.post('/rules', {
|
||||
...props.data,
|
||||
version: 'v4',
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
.client-side-qb-search {
|
||||
.ant-select-selection-search {
|
||||
width: max-content !important;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,654 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
|
||||
import './ClientSideQBSearch.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Select, Tag, Tooltip } from 'antd';
|
||||
import {
|
||||
OPERATORS,
|
||||
QUERY_BUILDER_OPERATORS_BY_TYPES,
|
||||
QUERY_BUILDER_SEARCH_VALUES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { CustomTagProps } from 'container/QueryBuilder/filters/QueryBuilderSearch';
|
||||
import { selectStyle } from 'container/QueryBuilder/filters/QueryBuilderSearch/config';
|
||||
import { PLACEHOLDER } from 'container/QueryBuilder/filters/QueryBuilderSearch/constant';
|
||||
import { TypographyText } from 'container/QueryBuilder/filters/QueryBuilderSearch/style';
|
||||
import {
|
||||
checkCommaInValue,
|
||||
getOperatorFromValue,
|
||||
getOperatorValue,
|
||||
getTagToken,
|
||||
isInNInOperator,
|
||||
} from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
|
||||
import {
|
||||
DropdownState,
|
||||
ITag,
|
||||
Option,
|
||||
} from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import Suggestions from 'container/QueryBuilder/filters/QueryBuilderSearchV2/Suggestions';
|
||||
import { WhereClauseConfig } from 'hooks/queryBuilder/useAutoComplete';
|
||||
import { validationMapper } from 'hooks/queryBuilder/useIsValidTag';
|
||||
import { operatorTypeMapper } from 'hooks/queryBuilder/useOperatorType';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { isArray, isEmpty, isEqual, isObject } from 'lodash-es';
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import type { BaseSelectRef } from 'rc-select';
|
||||
import {
|
||||
KeyboardEvent,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilter,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
export interface AttributeKey {
|
||||
key: string;
|
||||
}
|
||||
|
||||
export interface AttributeValuesMap {
|
||||
[key: string]: AttributeValue;
|
||||
}
|
||||
|
||||
interface ClientSideQBSearchProps {
|
||||
filters: TagFilter;
|
||||
onChange: (value: TagFilter) => void;
|
||||
whereClauseConfig?: WhereClauseConfig;
|
||||
placeholder?: string;
|
||||
className?: string;
|
||||
suffixIcon?: React.ReactNode;
|
||||
attributeValuesMap?: AttributeValuesMap;
|
||||
attributeKeys: AttributeKey[];
|
||||
}
|
||||
|
||||
interface AttributeValue {
|
||||
stringAttributeValues: string[] | [];
|
||||
numberAttributeValues: number[] | [];
|
||||
boolAttributeValues: boolean[] | [];
|
||||
}
|
||||
|
||||
function ClientSideQBSearch(
|
||||
props: ClientSideQBSearchProps,
|
||||
): React.ReactElement {
|
||||
const {
|
||||
onChange,
|
||||
placeholder,
|
||||
className,
|
||||
suffixIcon,
|
||||
whereClauseConfig,
|
||||
attributeValuesMap,
|
||||
attributeKeys,
|
||||
filters,
|
||||
} = props;
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const selectRef = useRef<BaseSelectRef>(null);
|
||||
|
||||
const [isOpen, setIsOpen] = useState<boolean>(false);
|
||||
|
||||
// create the tags from the initial query here, this should only be computed on the first load as post that tags and query will be always in sync.
|
||||
const [tags, setTags] = useState<ITag[]>(filters.items as ITag[]);
|
||||
|
||||
// this will maintain the current state of in process filter item
|
||||
const [currentFilterItem, setCurrentFilterItem] = useState<ITag | undefined>();
|
||||
|
||||
const [currentState, setCurrentState] = useState<DropdownState>(
|
||||
DropdownState.ATTRIBUTE_KEY,
|
||||
);
|
||||
|
||||
// to maintain the current running state until the tokenization happens for the tag
|
||||
const [searchValue, setSearchValue] = useState<string>('');
|
||||
|
||||
const [dropdownOptions, setDropdownOptions] = useState<Option[]>([]);
|
||||
|
||||
const attributeValues = useMemo(() => {
|
||||
if (currentFilterItem?.key?.key) {
|
||||
return attributeValuesMap?.[currentFilterItem.key.key];
|
||||
}
|
||||
return {
|
||||
stringAttributeValues: [],
|
||||
numberAttributeValues: [],
|
||||
boolAttributeValues: [],
|
||||
};
|
||||
}, [attributeValuesMap, currentFilterItem?.key?.key]);
|
||||
|
||||
const handleDropdownSelect = useCallback(
|
||||
(value: string) => {
|
||||
let parsedValue: BaseAutocompleteData | string;
|
||||
|
||||
try {
|
||||
parsedValue = JSON.parse(value);
|
||||
} catch {
|
||||
parsedValue = value;
|
||||
}
|
||||
if (currentState === DropdownState.ATTRIBUTE_KEY) {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
...prev,
|
||||
key: parsedValue as BaseAutocompleteData,
|
||||
op: '',
|
||||
value: '',
|
||||
}));
|
||||
setCurrentState(DropdownState.OPERATOR);
|
||||
setSearchValue((parsedValue as BaseAutocompleteData)?.key);
|
||||
} else if (currentState === DropdownState.OPERATOR) {
|
||||
if (value === OPERATORS.EXISTS || value === OPERATORS.NOT_EXISTS) {
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: value,
|
||||
value: '',
|
||||
} as ITag,
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
key: prev?.key as BaseAutocompleteData,
|
||||
op: value as string,
|
||||
value: '',
|
||||
}));
|
||||
setCurrentState(DropdownState.ATTRIBUTE_VALUE);
|
||||
setSearchValue(`${currentFilterItem?.key?.key} ${value}`);
|
||||
}
|
||||
} else if (currentState === DropdownState.ATTRIBUTE_VALUE) {
|
||||
const operatorType =
|
||||
operatorTypeMapper[currentFilterItem?.op || ''] || 'NOT_VALID';
|
||||
const isMulti = operatorType === QUERY_BUILDER_SEARCH_VALUES.MULTIPLY;
|
||||
|
||||
if (isMulti) {
|
||||
const { tagKey, tagOperator, tagValue } = getTagToken(searchValue);
|
||||
// this condition takes care of adding the IN/NIN multi values when pressed enter on an already existing value.
|
||||
// not the best interaction but in sync with what we have today!
|
||||
if (tagValue.includes(String(value))) {
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
setCurrentFilterItem(undefined);
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: currentFilterItem?.op,
|
||||
value: tagValue,
|
||||
} as ITag,
|
||||
]);
|
||||
return;
|
||||
}
|
||||
// this is for adding subsequent comma seperated values
|
||||
const newSearch = [...tagValue];
|
||||
newSearch[newSearch.length === 0 ? 0 : newSearch.length - 1] = value;
|
||||
const newSearchValue = newSearch.join(',');
|
||||
setSearchValue(`${tagKey} ${tagOperator} ${newSearchValue},`);
|
||||
} else {
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
setCurrentFilterItem(undefined);
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: currentFilterItem?.op,
|
||||
value,
|
||||
} as ITag,
|
||||
]);
|
||||
}
|
||||
}
|
||||
},
|
||||
[currentFilterItem?.key, currentFilterItem?.op, currentState, searchValue],
|
||||
);
|
||||
|
||||
const handleSearch = useCallback((value: string) => {
|
||||
setSearchValue(value);
|
||||
}, []);
|
||||
|
||||
const onInputKeyDownHandler = useCallback(
|
||||
(event: KeyboardEvent<Element>): void => {
|
||||
if (event.key === 'Backspace' && !searchValue) {
|
||||
event.stopPropagation();
|
||||
setTags((prev) => prev.slice(0, -1));
|
||||
}
|
||||
},
|
||||
[searchValue],
|
||||
);
|
||||
|
||||
const handleOnBlur = useCallback((): void => {
|
||||
if (searchValue) {
|
||||
const operatorType =
|
||||
operatorTypeMapper[currentFilterItem?.op || ''] || 'NOT_VALID';
|
||||
// if key is added and operator is not present then convert to body CONTAINS key
|
||||
if (
|
||||
currentFilterItem?.key &&
|
||||
isEmpty(currentFilterItem?.op) &&
|
||||
whereClauseConfig?.customKey === 'body' &&
|
||||
whereClauseConfig?.customOp === OPERATORS.CONTAINS
|
||||
) {
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: {
|
||||
key: 'body',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'body--string----true',
|
||||
},
|
||||
op: OPERATORS.CONTAINS,
|
||||
value: currentFilterItem?.key?.key,
|
||||
},
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else if (
|
||||
currentFilterItem?.op === OPERATORS.EXISTS ||
|
||||
currentFilterItem?.op === OPERATORS.NOT_EXISTS
|
||||
) {
|
||||
// is exists and not exists operator is present then convert directly to tag! no need of value here
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: currentFilterItem?.op,
|
||||
value: '',
|
||||
},
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else if (
|
||||
// if the current state is in sync with the kind of operator used then convert into a tag
|
||||
validationMapper[operatorType]?.(
|
||||
isArray(currentFilterItem?.value)
|
||||
? currentFilterItem?.value.length || 0
|
||||
: 1,
|
||||
)
|
||||
) {
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key as BaseAutocompleteData,
|
||||
op: currentFilterItem?.op as string,
|
||||
value: currentFilterItem?.value || '',
|
||||
},
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
currentFilterItem?.key,
|
||||
currentFilterItem?.op,
|
||||
currentFilterItem?.value,
|
||||
searchValue,
|
||||
whereClauseConfig?.customKey,
|
||||
whereClauseConfig?.customOp,
|
||||
]);
|
||||
|
||||
// this useEffect takes care of tokenisation based on the search state
|
||||
useEffect(() => {
|
||||
// if there is no search value reset to the default state
|
||||
if (!searchValue) {
|
||||
setCurrentFilterItem(undefined);
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
}
|
||||
|
||||
// split the current search value based on delimiters
|
||||
const { tagKey, tagOperator, tagValue } = getTagToken(searchValue);
|
||||
|
||||
if (
|
||||
// Case 1 - if key is defined but the search text doesn't match with the set key,
|
||||
// can happen when user selects from dropdown and then deletes a few characters
|
||||
currentFilterItem?.key &&
|
||||
currentFilterItem?.key?.key !== tagKey.split(' ')[0]
|
||||
) {
|
||||
setCurrentFilterItem(undefined);
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else if (tagOperator && isEmpty(currentFilterItem?.op)) {
|
||||
// Case 2 -> key is set and now typing for the operator
|
||||
if (
|
||||
tagOperator === OPERATORS.EXISTS ||
|
||||
tagOperator === OPERATORS.NOT_EXISTS
|
||||
) {
|
||||
setTags((prev) => [
|
||||
...prev,
|
||||
{
|
||||
key: currentFilterItem?.key,
|
||||
op: tagOperator,
|
||||
value: '',
|
||||
} as ITag,
|
||||
]);
|
||||
setCurrentFilterItem(undefined);
|
||||
setSearchValue('');
|
||||
setCurrentState(DropdownState.ATTRIBUTE_KEY);
|
||||
} else {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
key: prev?.key as BaseAutocompleteData,
|
||||
op: tagOperator,
|
||||
value: '',
|
||||
}));
|
||||
|
||||
setCurrentState(DropdownState.ATTRIBUTE_VALUE);
|
||||
}
|
||||
} else if (
|
||||
// Case 3 -> selected operator from dropdown and then erased a part of it
|
||||
!isEmpty(currentFilterItem?.op) &&
|
||||
tagOperator !== currentFilterItem?.op
|
||||
) {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
key: prev?.key as BaseAutocompleteData,
|
||||
op: '',
|
||||
value: '',
|
||||
}));
|
||||
setCurrentState(DropdownState.OPERATOR);
|
||||
} else if (currentState === DropdownState.ATTRIBUTE_VALUE) {
|
||||
// Case 4 -> the final value state where we set the current filter values and the tokenisation happens on either
|
||||
// dropdown click or blur event
|
||||
const currentValue = {
|
||||
key: currentFilterItem?.key as BaseAutocompleteData,
|
||||
op: currentFilterItem?.op as string,
|
||||
value: tagValue,
|
||||
};
|
||||
if (!isEqual(currentValue, currentFilterItem)) {
|
||||
setCurrentFilterItem((prev) => ({
|
||||
key: prev?.key as BaseAutocompleteData,
|
||||
op: prev?.op as string,
|
||||
value: tagValue,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}, [
|
||||
currentFilterItem,
|
||||
currentFilterItem?.key,
|
||||
currentFilterItem?.op,
|
||||
searchValue,
|
||||
currentState,
|
||||
]);
|
||||
|
||||
// the useEffect takes care of setting the dropdown values correctly on change of the current state
|
||||
useEffect(() => {
|
||||
if (currentState === DropdownState.ATTRIBUTE_KEY) {
|
||||
const filteredAttributeKeys = attributeKeys.filter((key) =>
|
||||
key.key.startsWith(searchValue),
|
||||
);
|
||||
setDropdownOptions(
|
||||
filteredAttributeKeys?.map(
|
||||
(key) =>
|
||||
({
|
||||
label: key.key,
|
||||
value: key,
|
||||
} as Option),
|
||||
) || [],
|
||||
);
|
||||
}
|
||||
if (currentState === DropdownState.OPERATOR) {
|
||||
const keyOperator = searchValue.split(' ');
|
||||
const partialOperator = keyOperator?.[1];
|
||||
const strippedKey = keyOperator?.[0];
|
||||
|
||||
let operatorOptions;
|
||||
if (currentFilterItem?.key?.dataType) {
|
||||
operatorOptions = QUERY_BUILDER_OPERATORS_BY_TYPES[
|
||||
currentFilterItem.key
|
||||
.dataType as keyof typeof QUERY_BUILDER_OPERATORS_BY_TYPES
|
||||
].map((operator) => ({
|
||||
label: operator,
|
||||
value: operator,
|
||||
}));
|
||||
|
||||
if (partialOperator) {
|
||||
operatorOptions = operatorOptions.filter((op) =>
|
||||
op.label.startsWith(partialOperator.toLocaleUpperCase()),
|
||||
);
|
||||
}
|
||||
setDropdownOptions(operatorOptions);
|
||||
} else if (strippedKey.endsWith('[*]') && strippedKey.startsWith('body.')) {
|
||||
operatorOptions = [OPERATORS.HAS, OPERATORS.NHAS].map((operator) => ({
|
||||
label: operator,
|
||||
value: operator,
|
||||
}));
|
||||
setDropdownOptions(operatorOptions);
|
||||
} else {
|
||||
operatorOptions = QUERY_BUILDER_OPERATORS_BY_TYPES.universal.map(
|
||||
(operator) => ({
|
||||
label: operator,
|
||||
value: operator,
|
||||
}),
|
||||
);
|
||||
|
||||
if (partialOperator) {
|
||||
operatorOptions = operatorOptions.filter((op) =>
|
||||
op.label.startsWith(partialOperator.toLocaleUpperCase()),
|
||||
);
|
||||
}
|
||||
setDropdownOptions(operatorOptions);
|
||||
}
|
||||
}
|
||||
|
||||
if (currentState === DropdownState.ATTRIBUTE_VALUE) {
|
||||
const values: Array<string | number | boolean> = [];
|
||||
const { tagValue } = getTagToken(searchValue);
|
||||
if (isArray(tagValue)) {
|
||||
if (!isEmpty(tagValue[tagValue.length - 1]))
|
||||
values.push(tagValue[tagValue.length - 1]);
|
||||
} else if (!isEmpty(tagValue)) values.push(tagValue);
|
||||
|
||||
const currentAttributeValues =
|
||||
attributeValues?.stringAttributeValues ||
|
||||
attributeValues?.numberAttributeValues ||
|
||||
attributeValues?.boolAttributeValues ||
|
||||
[];
|
||||
|
||||
values.push(...currentAttributeValues);
|
||||
|
||||
if (attributeValuesMap) {
|
||||
setDropdownOptions(
|
||||
values.map(
|
||||
(val) =>
|
||||
({
|
||||
label: checkCommaInValue(String(val)),
|
||||
value: val,
|
||||
} as Option),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
// If attributeValuesMap is not provided, don't set dropdown options
|
||||
setDropdownOptions([]);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
attributeValues,
|
||||
currentFilterItem?.key?.dataType,
|
||||
currentState,
|
||||
attributeKeys,
|
||||
searchValue,
|
||||
attributeValuesMap,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
const filterTags: IBuilderQuery['filters'] = {
|
||||
op: 'AND',
|
||||
items: [],
|
||||
};
|
||||
tags.forEach((tag) => {
|
||||
const computedTagValue =
|
||||
tag.value &&
|
||||
Array.isArray(tag.value) &&
|
||||
tag.value[tag.value.length - 1] === ''
|
||||
? tag.value?.slice(0, -1)
|
||||
: tag.value ?? '';
|
||||
filterTags.items.push({
|
||||
id: tag.id || uuid().slice(0, 8),
|
||||
key: tag.key,
|
||||
op: getOperatorValue(tag.op),
|
||||
value: computedTagValue,
|
||||
});
|
||||
});
|
||||
|
||||
if (!isEqual(filters, filterTags)) {
|
||||
onChange(filterTags);
|
||||
setTags(
|
||||
filterTags.items.map((tag) => ({
|
||||
...tag,
|
||||
op: getOperatorFromValue(tag.op),
|
||||
})) as ITag[],
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [tags]);
|
||||
|
||||
const queryTags = useMemo(
|
||||
() => tags.map((tag) => `${tag.key.key} ${tag.op} ${tag.value}`),
|
||||
[tags],
|
||||
);
|
||||
|
||||
const onTagRender = ({
|
||||
value,
|
||||
closable,
|
||||
onClose,
|
||||
}: CustomTagProps): React.ReactElement => {
|
||||
const { tagOperator } = getTagToken(value);
|
||||
const isInNin = isInNInOperator(tagOperator);
|
||||
const chipValue = isInNin
|
||||
? value?.trim()?.replace(/,\s*$/, '')
|
||||
: value?.trim();
|
||||
|
||||
const indexInQueryTags = queryTags.findIndex((qTag) => isEqual(qTag, value));
|
||||
const tagDetails = tags[indexInQueryTags];
|
||||
|
||||
const onCloseHandler = (): void => {
|
||||
onClose();
|
||||
setSearchValue('');
|
||||
setTags((prev) => prev.filter((t) => !isEqual(t, tagDetails)));
|
||||
};
|
||||
|
||||
const tagEditHandler = (value: string): void => {
|
||||
setCurrentFilterItem(tagDetails);
|
||||
setSearchValue(value);
|
||||
setCurrentState(DropdownState.ATTRIBUTE_VALUE);
|
||||
setTags((prev) => prev.filter((t) => !isEqual(t, tagDetails)));
|
||||
};
|
||||
|
||||
const isDisabled = !!searchValue;
|
||||
|
||||
return (
|
||||
<span className="qb-search-bar-tokenised-tags">
|
||||
<Tag
|
||||
closable={!searchValue && closable}
|
||||
onClose={onCloseHandler}
|
||||
className={tagDetails?.key?.type || ''}
|
||||
>
|
||||
<Tooltip title={chipValue}>
|
||||
<TypographyText
|
||||
ellipsis
|
||||
$isInNin={isInNin}
|
||||
disabled={isDisabled}
|
||||
$isEnabled={!!searchValue}
|
||||
onClick={(): void => {
|
||||
if (!isDisabled) tagEditHandler(value);
|
||||
}}
|
||||
>
|
||||
{chipValue}
|
||||
</TypographyText>
|
||||
</Tooltip>
|
||||
</Tag>
|
||||
</span>
|
||||
);
|
||||
};
|
||||
|
||||
const suffixIconContent = useMemo(() => {
|
||||
if (suffixIcon) {
|
||||
return suffixIcon;
|
||||
}
|
||||
return isOpen ? (
|
||||
<ChevronUp
|
||||
size={14}
|
||||
color={isDarkMode ? Color.TEXT_VANILLA_100 : Color.TEXT_INK_100}
|
||||
/>
|
||||
) : (
|
||||
<ChevronDown
|
||||
size={14}
|
||||
color={isDarkMode ? Color.TEXT_VANILLA_100 : Color.TEXT_INK_100}
|
||||
/>
|
||||
);
|
||||
}, [isDarkMode, isOpen, suffixIcon]);
|
||||
|
||||
return (
|
||||
<div className="query-builder-search-v2 ">
|
||||
<Select
|
||||
ref={selectRef}
|
||||
getPopupContainer={popupContainer}
|
||||
virtual={false}
|
||||
showSearch
|
||||
tagRender={onTagRender}
|
||||
transitionName=""
|
||||
choiceTransitionName=""
|
||||
filterOption={false}
|
||||
open={isOpen}
|
||||
suffixIcon={suffixIconContent}
|
||||
onDropdownVisibleChange={setIsOpen}
|
||||
autoClearSearchValue={false}
|
||||
mode="multiple"
|
||||
placeholder={placeholder}
|
||||
value={queryTags}
|
||||
searchValue={searchValue}
|
||||
className={className}
|
||||
rootClassName="query-builder-search client-side-qb-search"
|
||||
disabled={!attributeKeys.length}
|
||||
style={selectStyle}
|
||||
onSearch={handleSearch}
|
||||
onSelect={handleDropdownSelect}
|
||||
onInputKeyDown={onInputKeyDownHandler}
|
||||
notFoundContent={null}
|
||||
showAction={['focus']}
|
||||
onBlur={handleOnBlur}
|
||||
>
|
||||
{dropdownOptions.map((option) => {
|
||||
let val = option.value;
|
||||
try {
|
||||
if (isObject(option.value)) {
|
||||
val = JSON.stringify(option.value);
|
||||
} else {
|
||||
val = option.value;
|
||||
}
|
||||
} catch {
|
||||
val = option.value;
|
||||
}
|
||||
return (
|
||||
<Select.Option key={isObject(val) ? `select-option` : val} value={val}>
|
||||
<Suggestions
|
||||
label={option.label}
|
||||
value={option.value}
|
||||
option={currentState}
|
||||
searchValue={searchValue}
|
||||
/>
|
||||
</Select.Option>
|
||||
);
|
||||
})}
|
||||
</Select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
ClientSideQBSearch.defaultProps = {
|
||||
placeholder: PLACEHOLDER,
|
||||
className: '',
|
||||
suffixIcon: null,
|
||||
whereClauseConfig: {},
|
||||
attributeValuesMap: {},
|
||||
};
|
||||
|
||||
export default ClientSideQBSearch;
|
||||
@@ -12,6 +12,20 @@ beforeAll(() => {
|
||||
matchMedia();
|
||||
});
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('react-dnd', () => ({
|
||||
useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
|
||||
@@ -22,7 +22,13 @@ export type GetViewDetailsUsingViewKey = (
|
||||
viewKey: string,
|
||||
data: ViewProps[] | undefined,
|
||||
) =>
|
||||
| { query: Query; name: string; uuid: string; panelType: PANEL_TYPES }
|
||||
| {
|
||||
query: Query;
|
||||
name: string;
|
||||
uuid: string;
|
||||
panelType: PANEL_TYPES;
|
||||
extraData?: string;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
export interface IsQueryUpdatedInViewProps {
|
||||
|
||||
@@ -29,9 +29,9 @@ export const getViewDetailsUsingViewKey: GetViewDetailsUsingViewKey = (
|
||||
) => {
|
||||
const selectedView = data?.find((view) => view.uuid === viewKey);
|
||||
if (selectedView) {
|
||||
const { compositeQuery, name, uuid } = selectedView;
|
||||
const { compositeQuery, name, uuid, extraData } = selectedView;
|
||||
const query = mapQueryDataFromApi(compositeQuery);
|
||||
return { query, name, uuid, panelType: compositeQuery.panelType };
|
||||
return { query, name, uuid, panelType: compositeQuery.panelType, extraData };
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
@@ -2,6 +2,14 @@ export const VIEW_TYPES = {
|
||||
OVERVIEW: 'OVERVIEW',
|
||||
JSON: 'JSON',
|
||||
CONTEXT: 'CONTEXT',
|
||||
INFRAMETRICS: 'INFRAMETRICS',
|
||||
} as const;
|
||||
|
||||
export type VIEWS = typeof VIEW_TYPES[keyof typeof VIEW_TYPES];
|
||||
|
||||
export const RESOURCE_KEYS = {
|
||||
CLUSTER_NAME: 'k8s.cluster.name',
|
||||
POD_NAME: 'k8s.pod.name',
|
||||
NODE_NAME: 'k8s.node.name',
|
||||
HOST_NAME: 'host.name',
|
||||
} as const;
|
||||
|
||||
@@ -9,6 +9,7 @@ import cx from 'classnames';
|
||||
import { LogType } from 'components/Logs/LogStateIndicator/LogStateIndicator';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ContextView from 'container/LogDetailedView/ContextView/ContextView';
|
||||
import InfraMetrics from 'container/LogDetailedView/InfraMetrics/InfraMetrics';
|
||||
import JSONView from 'container/LogDetailedView/JsonView';
|
||||
import Overview from 'container/LogDetailedView/Overview';
|
||||
import {
|
||||
@@ -22,6 +23,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import {
|
||||
BarChart2,
|
||||
Braces,
|
||||
Copy,
|
||||
Filter,
|
||||
@@ -36,7 +38,7 @@ import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
|
||||
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
import { RESOURCE_KEYS, VIEW_TYPES, VIEWS } from './constants';
|
||||
import { LogDetailProps } from './LogDetail.interfaces';
|
||||
import QueryBuilderSearchWrapper from './QueryBuilderSearchWrapper';
|
||||
|
||||
@@ -127,6 +129,7 @@ function LogDetail({
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
maskStyle={{ background: 'none' }}
|
||||
title={
|
||||
<>
|
||||
<Divider type="vertical" className={cx('log-type-indicator', LogType)} />
|
||||
@@ -192,6 +195,17 @@ function LogDetail({
|
||||
Context
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.INFRAMETRICS}
|
||||
>
|
||||
<div className="view-title">
|
||||
<BarChart2 size={14} />
|
||||
Metrics
|
||||
</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
|
||||
{selectedView === VIEW_TYPES.JSON && (
|
||||
@@ -246,6 +260,15 @@ function LogDetail({
|
||||
isEdit={isEdit}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.INFRAMETRICS && (
|
||||
<InfraMetrics
|
||||
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
|
||||
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
|
||||
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
|
||||
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
|
||||
logLineTimestamp={log.timestamp.toString()}
|
||||
/>
|
||||
)}
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -195,21 +195,20 @@ function ListLogView({
|
||||
return (
|
||||
<>
|
||||
<Container
|
||||
$isActiveLog={isHighlighted}
|
||||
$isActiveLog={
|
||||
isHighlighted ||
|
||||
activeLog?.id === logData.id ||
|
||||
activeContextLog?.id === logData.id
|
||||
}
|
||||
$isDarkMode={isDarkMode}
|
||||
$logType={logType}
|
||||
onMouseEnter={handleMouseEnter}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
onClick={handleDetailedView}
|
||||
fontSize={fontSize}
|
||||
>
|
||||
<div className="log-line">
|
||||
<LogStateIndicator
|
||||
type={logType}
|
||||
isActive={
|
||||
activeLog?.id === logData.id || activeContextLog?.id === logData.id
|
||||
}
|
||||
fontSize={fontSize}
|
||||
/>
|
||||
<LogStateIndicator type={logType} fontSize={fontSize} />
|
||||
<div>
|
||||
<LogContainer fontSize={fontSize}>
|
||||
<LogGeneralField
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Card, Typography } from 'antd';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import styled from 'styled-components';
|
||||
import { getActiveLogBackground } from 'utils/logs';
|
||||
|
||||
interface LogTextProps {
|
||||
linesPerRow?: number;
|
||||
@@ -15,6 +15,7 @@ interface LogContainerProps {
|
||||
export const Container = styled(Card)<{
|
||||
$isActiveLog: boolean;
|
||||
$isDarkMode: boolean;
|
||||
$logType: string;
|
||||
fontSize: FontSize;
|
||||
}>`
|
||||
width: 100% !important;
|
||||
@@ -41,13 +42,8 @@ export const Container = styled(Card)<{
|
||||
? `padding:0.3rem 0.6rem;`
|
||||
: ``}
|
||||
|
||||
${({ $isActiveLog, $isDarkMode }): string =>
|
||||
$isActiveLog
|
||||
? `background-color: ${
|
||||
$isDarkMode ? Color.BG_SLATE_500 : Color.BG_VANILLA_300
|
||||
} !important`
|
||||
: ''}
|
||||
}
|
||||
${({ $isActiveLog, $isDarkMode, $logType }): string =>
|
||||
getActiveLogBackground($isActiveLog, $isDarkMode, $logType)}
|
||||
`;
|
||||
|
||||
export const Text = styled(Typography.Text)`
|
||||
|
||||
@@ -22,34 +22,23 @@
|
||||
}
|
||||
|
||||
&.INFO {
|
||||
background-color: var(--bg-slate-400);
|
||||
background-color: var(--bg-robin-500);
|
||||
}
|
||||
|
||||
&.WARNING,
|
||||
&.WARN {
|
||||
background-color: var(--bg-amber-500);
|
||||
}
|
||||
|
||||
&.ERROR {
|
||||
background-color: var(--bg-cherry-500);
|
||||
}
|
||||
|
||||
&.TRACE {
|
||||
background-color: var(--bg-robin-300);
|
||||
background-color: var(--bg-forest-400);
|
||||
}
|
||||
|
||||
&.DEBUG {
|
||||
background-color: var(--bg-forest-500);
|
||||
background-color: var(--bg-aqua-500);
|
||||
}
|
||||
|
||||
&.FATAL {
|
||||
background-color: var(--bg-sakura-500);
|
||||
}
|
||||
}
|
||||
|
||||
&.isActive {
|
||||
.line {
|
||||
background-color: var(--bg-robin-400, #7190f9);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,14 +17,6 @@ describe('LogStateIndicator', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('renders correctly when isActive is true', () => {
|
||||
const { container } = render(
|
||||
<LogStateIndicator type="INFO" isActive fontSize={FontSize.MEDIUM} />,
|
||||
);
|
||||
const indicator = container.firstChild as HTMLElement;
|
||||
expect(indicator.classList.contains('isActive')).toBe(true);
|
||||
});
|
||||
|
||||
it('renders correctly with different types', () => {
|
||||
const { container: containerInfo } = render(
|
||||
<LogStateIndicator type="INFO" fontSize={FontSize.MEDIUM} />,
|
||||
|
||||
@@ -44,22 +44,16 @@ export const LogType = {
|
||||
|
||||
function LogStateIndicator({
|
||||
type,
|
||||
isActive,
|
||||
fontSize,
|
||||
}: {
|
||||
type: string;
|
||||
fontSize: FontSize;
|
||||
isActive?: boolean;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<div className={cx('log-state-indicator', isActive ? 'isActive' : '')}>
|
||||
<div className="log-state-indicator">
|
||||
<div className={cx('line', type, fontSize)}> </div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
LogStateIndicator.defaultProps = {
|
||||
isActive: false,
|
||||
};
|
||||
|
||||
export default LogStateIndicator;
|
||||
|
||||
@@ -162,20 +162,15 @@ function RawLogView({
|
||||
$isDarkMode={isDarkMode}
|
||||
$isReadOnly={isReadOnly}
|
||||
$isHightlightedLog={isHighlighted}
|
||||
$isActiveLog={isActiveLog}
|
||||
$isActiveLog={
|
||||
activeLog?.id === data.id || activeContextLog?.id === data.id || isActiveLog
|
||||
}
|
||||
$logType={logType}
|
||||
onMouseEnter={handleMouseEnter}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
fontSize={fontSize}
|
||||
>
|
||||
<LogStateIndicator
|
||||
type={logType}
|
||||
isActive={
|
||||
activeLog?.id === data.id ||
|
||||
activeContextLog?.id === data.id ||
|
||||
isActiveLog
|
||||
}
|
||||
fontSize={fontSize}
|
||||
/>
|
||||
<LogStateIndicator type={logType} fontSize={fontSize} />
|
||||
|
||||
<RawLogContent
|
||||
$isReadOnly={isReadOnly}
|
||||
|
||||
@@ -13,6 +13,7 @@ export const RawLogViewContainer = styled(Row)<{
|
||||
$isReadOnly?: boolean;
|
||||
$isActiveLog?: boolean;
|
||||
$isHightlightedLog: boolean;
|
||||
$logType: string;
|
||||
fontSize: FontSize;
|
||||
}>`
|
||||
position: relative;
|
||||
@@ -34,11 +35,12 @@ export const RawLogViewContainer = styled(Row)<{
|
||||
: `margin: 2px 0;`}
|
||||
}
|
||||
|
||||
${({ $isActiveLog }): string => getActiveLogBackground($isActiveLog)}
|
||||
${({ $isActiveLog, $logType }): string =>
|
||||
getActiveLogBackground($isActiveLog, true, $logType)}
|
||||
|
||||
${({ $isReadOnly, $isActiveLog, $isDarkMode }): string =>
|
||||
${({ $isReadOnly, $isActiveLog, $isDarkMode, $logType }): string =>
|
||||
$isActiveLog
|
||||
? getActiveLogBackground($isActiveLog, $isDarkMode)
|
||||
? getActiveLogBackground($isActiveLog, $isDarkMode, $logType)
|
||||
: getDefaultLogBackground($isReadOnly, $isDarkMode)}
|
||||
|
||||
${({ $isHightlightedLog, $isDarkMode }): string =>
|
||||
|
||||
@@ -35,8 +35,6 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
linesPerRow,
|
||||
fontSize,
|
||||
appendTo = 'center',
|
||||
activeContextLog,
|
||||
activeLog,
|
||||
isListViewPanel,
|
||||
} = props;
|
||||
|
||||
@@ -90,9 +88,6 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
<div className="table-timestamp">
|
||||
<LogStateIndicator
|
||||
type={getLogIndicatorTypeForTable(item)}
|
||||
isActive={
|
||||
activeLog?.id === item.id || activeContextLog?.id === item.id
|
||||
}
|
||||
fontSize={fontSize}
|
||||
/>
|
||||
<Typography.Paragraph ellipsis className={cx('text', fontSize)}>
|
||||
@@ -130,16 +125,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
},
|
||||
...(appendTo === 'end' ? fieldColumns : []),
|
||||
];
|
||||
}, [
|
||||
fields,
|
||||
isListViewPanel,
|
||||
appendTo,
|
||||
isDarkMode,
|
||||
linesPerRow,
|
||||
activeLog?.id,
|
||||
activeContextLog?.id,
|
||||
fontSize,
|
||||
]);
|
||||
}, [fields, isListViewPanel, appendTo, isDarkMode, linesPerRow, fontSize]);
|
||||
|
||||
return { columns, dataSource: flattenLogData };
|
||||
};
|
||||
|
||||
@@ -107,6 +107,7 @@ function DynamicColumnTable({
|
||||
className="dynamicColumnTable-button filter-btn"
|
||||
size="middle"
|
||||
icon={<SlidersHorizontal size={14} />}
|
||||
data-testid="additional-filters-button"
|
||||
/>
|
||||
</Dropdown>
|
||||
)}
|
||||
|
||||
@@ -16,7 +16,7 @@ function WelcomeLeftContainer({
|
||||
<Container>
|
||||
<LeftContainer direction="vertical">
|
||||
<Space align="center">
|
||||
<Logo src="signoz-signup.svg" alt="logo" />
|
||||
<Logo src="/Logos/signoz-brand-logo.svg" alt="logo" />
|
||||
<Title style={{ fontSize: '46px', margin: 0 }}>SigNoz</Title>
|
||||
</Space>
|
||||
<Typography>{t('monitor_signup')}</Typography>
|
||||
|
||||
@@ -2,6 +2,7 @@ import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
export const ALERTS_DATA_SOURCE_MAP: Record<AlertTypes, DataSource> = {
|
||||
[AlertTypes.ANOMALY_BASED_ALERT]: DataSource.METRICS,
|
||||
[AlertTypes.METRICS_BASED_ALERT]: DataSource.METRICS,
|
||||
[AlertTypes.LOGS_BASED_ALERT]: DataSource.LOGS,
|
||||
[AlertTypes.TRACES_BASED_ALERT]: DataSource.TRACES,
|
||||
|
||||
@@ -6,7 +6,6 @@ export const AUTH0_REDIRECT_PATH = '/redirect';
|
||||
|
||||
export const DEFAULT_AUTH0_APP_REDIRECTION_PATH = ROUTES.APPLICATION;
|
||||
|
||||
export const IS_SIDEBAR_COLLAPSED = 'isSideBarCollapsed';
|
||||
export const INVITE_MEMBERS_HASH = '#invite-team-members';
|
||||
|
||||
export const SIGNOZ_UPGRADE_PLAN_URL =
|
||||
|
||||
@@ -22,4 +22,5 @@ export enum FeatureKeys {
|
||||
GATEWAY = 'GATEWAY',
|
||||
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
|
||||
QUERY_BUILDER_SEARCH_V2 = 'QUERY_BUILDER_SEARCH_V2',
|
||||
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
||||
}
|
||||
|
||||
@@ -36,4 +36,5 @@ export enum QueryParams {
|
||||
topic = 'topic',
|
||||
partition = 'partition',
|
||||
selectedTimelineQuery = 'selectedTimelineQuery',
|
||||
ruleType = 'ruleType',
|
||||
}
|
||||
|
||||
@@ -67,6 +67,10 @@ export const metricQueryFunctionOptions: SelectOption<string, string>[] = [
|
||||
value: QueryFunctionsTypes.TIME_SHIFT,
|
||||
label: 'Time Shift',
|
||||
},
|
||||
{
|
||||
value: QueryFunctionsTypes.TIME_SHIFT,
|
||||
label: 'Time Shift',
|
||||
},
|
||||
];
|
||||
|
||||
export const logsQueryFunctionOptions: SelectOption<string, string>[] = [
|
||||
@@ -80,10 +84,15 @@ interface QueryFunctionConfigType {
|
||||
showInput: boolean;
|
||||
inputType?: string;
|
||||
placeholder?: string;
|
||||
disabled?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export const queryFunctionsTypesConfig: QueryFunctionConfigType = {
|
||||
anomaly: {
|
||||
showInput: false,
|
||||
disabled: true,
|
||||
},
|
||||
cutOffMin: {
|
||||
showInput: true,
|
||||
inputType: 'text',
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS';
|
||||
|
||||
const userOS = getUserOperatingSystem();
|
||||
export const GlobalShortcuts = {
|
||||
SidebarCollapse: '\\+meta',
|
||||
NavigateToServices: 's+shift',
|
||||
NavigateToTraces: 't+shift',
|
||||
NavigateToLogs: 'l+shift',
|
||||
@@ -13,7 +9,6 @@ export const GlobalShortcuts = {
|
||||
};
|
||||
|
||||
export const GlobalShortcutsName = {
|
||||
SidebarCollapse: `${userOS === UserOperatingSystem.MACOS ? 'cmd' : 'ctrl'}+\\`,
|
||||
NavigateToServices: 'shift+s',
|
||||
NavigateToTraces: 'shift+t',
|
||||
NavigateToLogs: 'shift+l',
|
||||
@@ -24,7 +19,6 @@ export const GlobalShortcutsName = {
|
||||
};
|
||||
|
||||
export const GlobalShortcutsDescription = {
|
||||
SidebarCollapse: 'Collpase the sidebar',
|
||||
NavigateToServices: 'Navigate to Services page',
|
||||
NavigateToTraces: 'Navigate to Traces page',
|
||||
NavigateToLogs: 'Navigate to logs page',
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import history from 'lib/history';
|
||||
import heatmapPlugin from 'lib/uPlotLib/plugins/heatmapPlugin';
|
||||
import timelinePlugin from 'lib/uPlotLib/plugins/timelinePlugin';
|
||||
import { useMemo, useRef } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { AlertRuleTimelineGraphResponse } from 'types/api/alerts/def';
|
||||
import uPlot, { AlignedData } from 'uplot';
|
||||
|
||||
@@ -41,11 +46,13 @@ function HorizontalTimelineGraph({
|
||||
return [timestamps, states];
|
||||
}, [data]);
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const options: uPlot.Options = useMemo(
|
||||
() => ({
|
||||
width,
|
||||
height: 85,
|
||||
cursor: { show: false },
|
||||
|
||||
axes: [
|
||||
{
|
||||
@@ -66,6 +73,40 @@ function HorizontalTimelineGraph({
|
||||
label: 'States',
|
||||
},
|
||||
],
|
||||
hooks: {
|
||||
setSelect: [
|
||||
(self): void => {
|
||||
const selection = self.select;
|
||||
if (selection) {
|
||||
const startTime = self.posToVal(selection.left, 'x');
|
||||
const endTime = self.posToVal(selection.left + selection.width, 'x');
|
||||
|
||||
const diff = endTime - startTime;
|
||||
|
||||
if (diff > 0) {
|
||||
if (urlQuery.has(QueryParams.relativeTime)) {
|
||||
urlQuery.delete(QueryParams.relativeTime);
|
||||
}
|
||||
|
||||
const startTimestamp = Math.floor(startTime * 1000);
|
||||
const endTimestamp = Math.floor(endTime * 1000);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
|
||||
}
|
||||
|
||||
urlQuery.set(QueryParams.startTime, startTimestamp.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTimestamp.toString());
|
||||
|
||||
history.push({
|
||||
search: urlQuery.toString(),
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
plugins:
|
||||
transformedData?.length > 1
|
||||
? [
|
||||
@@ -76,7 +117,7 @@ function HorizontalTimelineGraph({
|
||||
]
|
||||
: [],
|
||||
}),
|
||||
[width, isDarkMode, transformedData],
|
||||
[width, isDarkMode, transformedData.length, urlQuery, dispatch],
|
||||
);
|
||||
return <Uplot data={transformedData} options={options} />;
|
||||
}
|
||||
|
||||
@@ -109,8 +109,8 @@
|
||||
}
|
||||
|
||||
.alert-rule {
|
||||
&-value,
|
||||
&-created-at {
|
||||
&__value,
|
||||
&__created-at {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
import './Table.styles.scss';
|
||||
|
||||
import { Table } from 'antd';
|
||||
import { initialFilters } from 'constants/queryBuilder';
|
||||
import {
|
||||
useGetAlertRuleDetailsTimelineTable,
|
||||
useTimelineTable,
|
||||
} from 'pages/AlertDetails/hooks';
|
||||
import { useMemo } from 'react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { timelineTableColumns } from './useTimelineTable';
|
||||
|
||||
function TimelineTable(): JSX.Element {
|
||||
const [filters, setFilters] = useState<TagFilter>(initialFilters);
|
||||
|
||||
const {
|
||||
isLoading,
|
||||
isRefetching,
|
||||
@@ -18,13 +22,14 @@ function TimelineTable(): JSX.Element {
|
||||
data,
|
||||
isValidRuleId,
|
||||
ruleId,
|
||||
} = useGetAlertRuleDetailsTimelineTable();
|
||||
} = useGetAlertRuleDetailsTimelineTable({ filters });
|
||||
|
||||
const { timelineData, totalItems } = useMemo(() => {
|
||||
const { timelineData, totalItems, labels } = useMemo(() => {
|
||||
const response = data?.payload?.data;
|
||||
return {
|
||||
timelineData: response?.items,
|
||||
totalItems: response?.total,
|
||||
labels: response?.labels,
|
||||
};
|
||||
}, [data?.payload?.data]);
|
||||
|
||||
@@ -42,7 +47,11 @@ function TimelineTable(): JSX.Element {
|
||||
<div className="timeline-table">
|
||||
<Table
|
||||
rowKey={(row): string => `${row.fingerprint}-${row.value}-${row.unixMilli}`}
|
||||
columns={timelineTableColumns()}
|
||||
columns={timelineTableColumns({
|
||||
filters,
|
||||
labels: labels ?? {},
|
||||
setFilters,
|
||||
})}
|
||||
dataSource={timelineData}
|
||||
pagination={paginationConfig}
|
||||
size="middle"
|
||||
|
||||
@@ -1,13 +1,84 @@
|
||||
import { EllipsisOutlined } from '@ant-design/icons';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import ClientSideQBSearch, {
|
||||
AttributeKey,
|
||||
} from 'components/ClientSideQBSearch/ClientSideQBSearch';
|
||||
import { ConditionalAlertPopover } from 'container/AlertHistory/AlertPopover/AlertPopover';
|
||||
import AlertLabels from 'pages/AlertDetails/AlertHeader/AlertLabels/AlertLabels';
|
||||
import { transformKeyValuesToAttributeValuesMap } from 'container/QueryBuilder/filters/utils';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { Search } from 'lucide-react';
|
||||
import AlertLabels, {
|
||||
AlertLabelsProps,
|
||||
} from 'pages/AlertDetails/AlertHeader/AlertLabels/AlertLabels';
|
||||
import AlertState from 'pages/AlertDetails/AlertHeader/AlertState/AlertState';
|
||||
import { useMemo } from 'react';
|
||||
import { AlertRuleTimelineTableResponse } from 'types/api/alerts/def';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { formatEpochTimestamp } from 'utils/timeUtils';
|
||||
|
||||
export const timelineTableColumns = (): ColumnsType<AlertRuleTimelineTableResponse> => [
|
||||
const transformLabelsToQbKeys = (
|
||||
labels: AlertRuleTimelineTableResponse['labels'],
|
||||
): AttributeKey[] => Object.keys(labels).flatMap((key) => [{ key }]);
|
||||
|
||||
function LabelFilter({
|
||||
filters,
|
||||
setFilters,
|
||||
labels,
|
||||
}: {
|
||||
setFilters: (filters: TagFilter) => void;
|
||||
filters: TagFilter;
|
||||
labels: AlertLabelsProps['labels'];
|
||||
}): JSX.Element | null {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const { transformedKeys, attributesMap } = useMemo(
|
||||
() => ({
|
||||
transformedKeys: transformLabelsToQbKeys(labels || {}),
|
||||
attributesMap: transformKeyValuesToAttributeValuesMap(labels),
|
||||
}),
|
||||
[labels],
|
||||
);
|
||||
|
||||
const handleSearch = (tagFilters: TagFilter): void => {
|
||||
const tagFiltersLength = tagFilters.items.length;
|
||||
|
||||
if (
|
||||
(!tagFiltersLength && (!filters || !filters.items.length)) ||
|
||||
tagFiltersLength === filters?.items.length
|
||||
) {
|
||||
return;
|
||||
}
|
||||
setFilters(tagFilters);
|
||||
};
|
||||
|
||||
return (
|
||||
<ClientSideQBSearch
|
||||
onChange={handleSearch}
|
||||
filters={filters}
|
||||
className="alert-history-label-search"
|
||||
attributeKeys={transformedKeys}
|
||||
attributeValuesMap={attributesMap}
|
||||
suffixIcon={
|
||||
<Search
|
||||
size={14}
|
||||
color={isDarkMode ? Color.TEXT_VANILLA_100 : Color.TEXT_INK_100}
|
||||
/>
|
||||
}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export const timelineTableColumns = ({
|
||||
filters,
|
||||
labels,
|
||||
setFilters,
|
||||
}: {
|
||||
filters: TagFilter;
|
||||
labels: AlertLabelsProps['labels'];
|
||||
setFilters: (filters: TagFilter) => void;
|
||||
}): ColumnsType<AlertRuleTimelineTableResponse> => [
|
||||
{
|
||||
title: 'STATE',
|
||||
dataIndex: 'state',
|
||||
@@ -20,7 +91,9 @@ export const timelineTableColumns = (): ColumnsType<AlertRuleTimelineTableRespon
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'LABELS',
|
||||
title: (
|
||||
<LabelFilter setFilters={setFilters} filters={filters} labels={labels} />
|
||||
),
|
||||
dataIndex: 'labels',
|
||||
render: (labels): JSX.Element => (
|
||||
<div className="alert-rule-labels">
|
||||
|
||||
@@ -0,0 +1,180 @@
|
||||
.anomaly-alert-evaluation-view {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
gap: 8px;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
|
||||
.anomaly-alert-evaluation-view-chart-section {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
|
||||
&.has-multi-series-data {
|
||||
width: calc(100% - 240px);
|
||||
}
|
||||
|
||||
.anomaly-alert-evaluation-view-no-data-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.anomaly-alert-evaluation-view-series-selection {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
width: 240px;
|
||||
padding: 0px 8px;
|
||||
height: 100%;
|
||||
|
||||
.anomaly-alert-evaluation-view-series-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
height: 100%;
|
||||
|
||||
.anomaly-alert-evaluation-view-series-list-search {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.anomaly-alert-evaluation-view-series-list-title {
|
||||
margin-top: 12px;
|
||||
font-size: 13px !important;
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
.anomaly-alert-evaluation-view-series-list-items {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
height: 100%;
|
||||
overflow-y: auto;
|
||||
|
||||
.anomaly-alert-evaluation-view-series-list-item {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 8px;
|
||||
|
||||
.anomaly-alert-evaluation-view-series-list-item-color {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
border-radius: 50%;
|
||||
|
||||
display: inline-flex;
|
||||
margin-right: 8px;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.1rem;
|
||||
}
|
||||
&::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: rgb(136, 136, 136);
|
||||
border-radius: 0.625rem;
|
||||
}
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.uplot {
|
||||
.u-title {
|
||||
text-align: center;
|
||||
font-size: 18px;
|
||||
font-weight: 400;
|
||||
display: flex;
|
||||
height: 40px;
|
||||
font-size: 13px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.u-legend {
|
||||
display: flex;
|
||||
margin-top: 16px;
|
||||
|
||||
tbody {
|
||||
width: 100%;
|
||||
|
||||
.u-series {
|
||||
display: inline-flex;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.uplot-tooltip {
|
||||
background-color: rgba(0, 0, 0, 0.9);
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
|
||||
color: #ddd;
|
||||
font-size: 13px;
|
||||
line-height: 1.4;
|
||||
padding: 8px 12px;
|
||||
pointer-events: none;
|
||||
position: absolute;
|
||||
z-index: 100;
|
||||
max-height: 500px;
|
||||
width: 280px;
|
||||
overflow-y: auto;
|
||||
display: none; /* Hide tooltip by default */
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
&::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: rgb(136, 136, 136);
|
||||
border-radius: 0.625rem;
|
||||
}
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.uplot-tooltip-title {
|
||||
font-weight: bold;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.uplot-tooltip-series {
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
padding: 4px 0px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.uplot-tooltip-series-name {
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.uplot-tooltip-band {
|
||||
font-style: italic;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.uplot-tooltip-marker {
|
||||
display: inline-block;
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
border-radius: 50%;
|
||||
margin-right: 8px;
|
||||
vertical-align: middle;
|
||||
}
|
||||
@@ -0,0 +1,363 @@
|
||||
import 'uplot/dist/uPlot.min.css';
|
||||
import './AnomalyAlertEvaluationView.styles.scss';
|
||||
|
||||
import { Checkbox, Typography } from 'antd';
|
||||
import Search from 'antd/es/input/Search';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import getAxes from 'lib/uPlotLib/utils/getAxes';
|
||||
import { getUplotChartDataForAnomalyDetection } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { getYAxisScaleForAnomalyDetection } from 'lib/uPlotLib/utils/getYAxisScale';
|
||||
import { LineChart } from 'lucide-react';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import tooltipPlugin from './tooltipPlugin';
|
||||
|
||||
function UplotChart({
|
||||
data,
|
||||
options,
|
||||
chartRef,
|
||||
}: {
|
||||
data: any;
|
||||
options: any;
|
||||
chartRef: any;
|
||||
}): JSX.Element {
|
||||
const plotInstance = useRef(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (plotInstance.current) {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
plotInstance.current.destroy();
|
||||
}
|
||||
|
||||
if (data && data.length > 0) {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
// eslint-disable-next-line new-cap
|
||||
plotInstance.current = new uPlot(options, data, chartRef.current);
|
||||
}
|
||||
|
||||
return (): void => {
|
||||
if (plotInstance.current) {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
plotInstance.current.destroy();
|
||||
}
|
||||
};
|
||||
}, [data, options, chartRef]);
|
||||
|
||||
return <div ref={chartRef} />;
|
||||
}
|
||||
|
||||
function AnomalyAlertEvaluationView({
|
||||
data,
|
||||
yAxisUnit,
|
||||
}: {
|
||||
data: any;
|
||||
yAxisUnit: string;
|
||||
}): JSX.Element {
|
||||
const { spline } = uPlot.paths;
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const _spline = spline ? spline() : undefined;
|
||||
const chartRef = useRef<HTMLDivElement>(null);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const [seriesData, setSeriesData] = useState<any>({});
|
||||
const [selectedSeries, setSelectedSeries] = useState<string | null>(null);
|
||||
|
||||
const [filteredSeriesKeys, setFilteredSeriesKeys] = useState<string[]>([]);
|
||||
const [allSeries, setAllSeries] = useState<string[]>([]);
|
||||
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
|
||||
useEffect(() => {
|
||||
const chartData = getUplotChartDataForAnomalyDetection(data, isDarkMode);
|
||||
setSeriesData(chartData);
|
||||
|
||||
setAllSeries(Object.keys(chartData));
|
||||
|
||||
setFilteredSeriesKeys(Object.keys(chartData));
|
||||
}, [data, isDarkMode]);
|
||||
|
||||
useEffect(() => {
|
||||
const seriesKeys = Object.keys(seriesData);
|
||||
if (seriesKeys.length === 1) {
|
||||
setSelectedSeries(seriesKeys[0]); // Automatically select if only one series
|
||||
} else {
|
||||
setSelectedSeries(null); // Default to "Show All" if multiple series
|
||||
}
|
||||
}, [seriesData]);
|
||||
|
||||
const handleSeriesChange = (series: string | null): void => {
|
||||
setSelectedSeries(series);
|
||||
};
|
||||
|
||||
const bandsPlugin = {
|
||||
hooks: {
|
||||
draw: [
|
||||
(u: any): void => {
|
||||
if (!selectedSeries) return;
|
||||
|
||||
const { ctx } = u;
|
||||
const upperBandIdx = 3;
|
||||
const lowerBandIdx = 4;
|
||||
|
||||
const xData = u.data[0];
|
||||
const yUpperData = u.data[upperBandIdx];
|
||||
const yLowerData = u.data[lowerBandIdx];
|
||||
|
||||
const strokeStyle =
|
||||
u.series[1]?.stroke || seriesData[selectedSeries].color;
|
||||
const fillStyle =
|
||||
typeof strokeStyle === 'string'
|
||||
? strokeStyle.replace(')', ', 0.1)')
|
||||
: 'rgba(255, 255, 255, 0.1)';
|
||||
|
||||
ctx.beginPath();
|
||||
const firstX = u.valToPos(xData[0], 'x', true);
|
||||
const firstUpperY = u.valToPos(yUpperData[0], 'y', true);
|
||||
ctx.moveTo(firstX, firstUpperY);
|
||||
|
||||
for (let i = 0; i < xData.length; i++) {
|
||||
const x = u.valToPos(xData[i], 'x', true);
|
||||
const y = u.valToPos(yUpperData[i], 'y', true);
|
||||
ctx.lineTo(x, y);
|
||||
}
|
||||
|
||||
for (let i = xData.length - 1; i >= 0; i--) {
|
||||
const x = u.valToPos(xData[i], 'x', true);
|
||||
const y = u.valToPos(yLowerData[i], 'y', true);
|
||||
ctx.lineTo(x, y);
|
||||
}
|
||||
|
||||
ctx.closePath();
|
||||
ctx.fillStyle = fillStyle;
|
||||
ctx.fill();
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const initialData = allSeries.length
|
||||
? [
|
||||
seriesData[allSeries[0]].data[0], // Shared X-axis
|
||||
...allSeries.map((key) => seriesData[key].data[1]), // Map through Y-axis data for all series
|
||||
]
|
||||
: [];
|
||||
|
||||
const options = {
|
||||
width: dimensions.width,
|
||||
height: dimensions.height - 36,
|
||||
plugins: [bandsPlugin, tooltipPlugin(isDarkMode)],
|
||||
focus: {
|
||||
alpha: 0.3,
|
||||
},
|
||||
legend: {
|
||||
show: true,
|
||||
live: false,
|
||||
isolate: true,
|
||||
},
|
||||
cursor: {
|
||||
lock: false,
|
||||
focus: {
|
||||
prox: 1e6,
|
||||
bias: 1,
|
||||
},
|
||||
points: {
|
||||
size: (
|
||||
u: { series: { [x: string]: { points: { size: number } } } },
|
||||
seriesIdx: string | number,
|
||||
): number => u.series[seriesIdx].points.size * 3,
|
||||
width: (u: any, seriesIdx: any, size: number): number => size / 4,
|
||||
stroke: (
|
||||
u: {
|
||||
series: {
|
||||
[x: string]: { points: { stroke: (arg0: any, arg1: any) => any } };
|
||||
};
|
||||
},
|
||||
seriesIdx: string | number,
|
||||
): string => `${u.series[seriesIdx].points.stroke(u, seriesIdx)}90`,
|
||||
fill: (): string => '#fff',
|
||||
},
|
||||
},
|
||||
series: [
|
||||
{
|
||||
label: 'Time',
|
||||
},
|
||||
...(selectedSeries
|
||||
? [
|
||||
{
|
||||
label: `Main Series`,
|
||||
stroke: seriesData[selectedSeries].color,
|
||||
width: 2,
|
||||
show: true,
|
||||
paths: _spline,
|
||||
spanGaps: true,
|
||||
},
|
||||
{
|
||||
label: `Predicted Value`,
|
||||
stroke: seriesData[selectedSeries].color,
|
||||
width: 1,
|
||||
dash: [2, 2],
|
||||
show: true,
|
||||
paths: _spline,
|
||||
spanGaps: true,
|
||||
},
|
||||
{
|
||||
label: `Upper Band`,
|
||||
stroke: 'transparent',
|
||||
show: true,
|
||||
paths: _spline,
|
||||
spanGaps: true,
|
||||
points: {
|
||||
show: false,
|
||||
size: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
label: `Lower Band`,
|
||||
stroke: 'transparent',
|
||||
show: true,
|
||||
paths: _spline,
|
||||
spanGaps: true,
|
||||
points: {
|
||||
show: false,
|
||||
size: 1,
|
||||
},
|
||||
},
|
||||
]
|
||||
: allSeries.map((seriesKey) => ({
|
||||
label: seriesKey,
|
||||
stroke: seriesData[seriesKey].color,
|
||||
width: 2,
|
||||
show: true,
|
||||
paths: _spline,
|
||||
spanGaps: true,
|
||||
}))),
|
||||
],
|
||||
scales: {
|
||||
x: {
|
||||
time: true,
|
||||
spanGaps: true,
|
||||
},
|
||||
y: {
|
||||
...getYAxisScaleForAnomalyDetection({
|
||||
seriesData,
|
||||
selectedSeries,
|
||||
initialData,
|
||||
yAxisUnit,
|
||||
}),
|
||||
},
|
||||
},
|
||||
grid: {
|
||||
show: true,
|
||||
},
|
||||
axes: getAxes(isDarkMode, yAxisUnit),
|
||||
};
|
||||
|
||||
const handleSearch = (searchText: string): void => {
|
||||
if (!searchText || searchText.length === 0) {
|
||||
setFilteredSeriesKeys(allSeries);
|
||||
return;
|
||||
}
|
||||
|
||||
const filteredSeries = allSeries.filter((series) =>
|
||||
series.toLowerCase().includes(searchText.toLowerCase()),
|
||||
);
|
||||
|
||||
setFilteredSeriesKeys(filteredSeries);
|
||||
};
|
||||
|
||||
const handleSearchValueChange = useDebouncedFn((event): void => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
const value = event?.target?.value || '';
|
||||
|
||||
handleSearch(value);
|
||||
}, 300);
|
||||
|
||||
return (
|
||||
<div className="anomaly-alert-evaluation-view">
|
||||
<div
|
||||
className={`anomaly-alert-evaluation-view-chart-section ${
|
||||
allSeries.length > 1 ? 'has-multi-series-data' : ''
|
||||
}`}
|
||||
ref={graphRef}
|
||||
>
|
||||
{allSeries.length > 0 ? (
|
||||
<UplotChart
|
||||
data={selectedSeries ? seriesData[selectedSeries].data : initialData}
|
||||
options={options}
|
||||
chartRef={chartRef}
|
||||
/>
|
||||
) : (
|
||||
<div className="anomaly-alert-evaluation-view-no-data-container">
|
||||
<LineChart size={48} strokeWidth={0.5} />
|
||||
|
||||
<Typography>No Data</Typography>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{allSeries.length > 1 && (
|
||||
<div className="anomaly-alert-evaluation-view-series-selection">
|
||||
{allSeries.length > 1 && (
|
||||
<div className="anomaly-alert-evaluation-view-series-list">
|
||||
<Search
|
||||
className="anomaly-alert-evaluation-view-series-list-search"
|
||||
placeholder="Search a series"
|
||||
allowClear
|
||||
onChange={handleSearchValueChange}
|
||||
/>
|
||||
|
||||
<div className="anomaly-alert-evaluation-view-series-list-items">
|
||||
{filteredSeriesKeys.length > 0 && (
|
||||
<Checkbox
|
||||
className="anomaly-alert-evaluation-view-series-list-item"
|
||||
type="checkbox"
|
||||
name="series"
|
||||
value="all"
|
||||
checked={selectedSeries === null}
|
||||
onChange={(): void => handleSeriesChange(null)}
|
||||
>
|
||||
Show All
|
||||
</Checkbox>
|
||||
)}
|
||||
|
||||
{filteredSeriesKeys.map((seriesKey) => (
|
||||
<div key={seriesKey}>
|
||||
<Checkbox
|
||||
className="anomaly-alert-evaluation-view-series-list-item"
|
||||
key={seriesKey}
|
||||
type="checkbox"
|
||||
name="series"
|
||||
value={seriesKey}
|
||||
checked={selectedSeries === seriesKey}
|
||||
onChange={(): void => handleSeriesChange(seriesKey)}
|
||||
>
|
||||
<div
|
||||
className="anomaly-alert-evaluation-view-series-list-item-color"
|
||||
style={{ backgroundColor: seriesData[seriesKey].color }}
|
||||
/>
|
||||
|
||||
{seriesKey}
|
||||
</Checkbox>
|
||||
</div>
|
||||
))}
|
||||
|
||||
{filteredSeriesKeys.length === 0 && (
|
||||
<Typography>No series found</Typography>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default AnomalyAlertEvaluationView;
|
||||
@@ -0,0 +1,3 @@
|
||||
import AnomalyAlertEvaluationView from './AnomalyAlertEvaluationView';
|
||||
|
||||
export default AnomalyAlertEvaluationView;
|
||||
@@ -0,0 +1,148 @@
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||
|
||||
const tooltipPlugin = (
|
||||
isDarkMode: boolean,
|
||||
): { hooks: { init: (u: any) => void } } => {
|
||||
let tooltip: HTMLDivElement;
|
||||
const tooltipLeftOffset = 10;
|
||||
const tooltipTopOffset = 10;
|
||||
let isMouseOverPlot = false;
|
||||
|
||||
function formatValue(value: string | number | Date): string | number | Date {
|
||||
if (typeof value === 'string' && !Number.isNaN(parseFloat(value))) {
|
||||
return parseFloat(value).toFixed(3);
|
||||
}
|
||||
if (typeof value === 'number') {
|
||||
return value.toFixed(3);
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
return value.toLocaleString();
|
||||
}
|
||||
if (value == null) {
|
||||
return 'N/A';
|
||||
}
|
||||
|
||||
return String(value);
|
||||
}
|
||||
|
||||
function updateTooltip(u: any, left: number, top: number): void {
|
||||
const idx = u.posToIdx(left);
|
||||
const xVal = u.data[0][idx];
|
||||
|
||||
if (xVal == null) {
|
||||
tooltip.style.display = 'none';
|
||||
return;
|
||||
}
|
||||
|
||||
const xDate = new Date(xVal * 1000);
|
||||
const formattedXDate = formatValue(xDate);
|
||||
|
||||
let tooltipContent = `<div class="uplot-tooltip-title">Time: ${formattedXDate}</div>`;
|
||||
|
||||
let mainValue;
|
||||
let upperBand;
|
||||
let lowerBand;
|
||||
|
||||
let color = null;
|
||||
|
||||
// Loop through all series (excluding the x-axis series)
|
||||
for (let i = 1; i < u.series.length; i++) {
|
||||
const series = u.series[i];
|
||||
|
||||
const yVal = u.data[i][idx];
|
||||
const formattedYVal = formatValue(yVal);
|
||||
|
||||
color = generateColor(
|
||||
series.label,
|
||||
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
|
||||
);
|
||||
|
||||
// Create the round marker for the series
|
||||
const marker = `<span class="uplot-tooltip-marker" style="background-color: ${color};"></span>`;
|
||||
|
||||
if (series.label.toLowerCase().includes('upper band')) {
|
||||
upperBand = formattedYVal;
|
||||
} else if (series.label.toLowerCase().includes('lower band')) {
|
||||
lowerBand = formattedYVal;
|
||||
} else if (series.label.toLowerCase().includes('main series')) {
|
||||
mainValue = formattedYVal;
|
||||
} else {
|
||||
tooltipContent += `
|
||||
<div class="uplot-tooltip-series">
|
||||
${marker}
|
||||
<span class="uplot-tooltip-series-name">${series.label}:</span>
|
||||
<span class="uplot-tooltip-series-value">${formattedYVal}</span>
|
||||
</div>`;
|
||||
}
|
||||
}
|
||||
|
||||
// Add main value, upper band, and lower band to the tooltip
|
||||
if (mainValue !== undefined) {
|
||||
const marker = `<span class="uplot-tooltip-marker"></span>`;
|
||||
tooltipContent += `
|
||||
<div class="uplot-tooltip-series">
|
||||
${marker}
|
||||
<span class="uplot-tooltip-series-name">Main Series:</span>
|
||||
<span class="uplot-tooltip-series-value">${mainValue}</span>
|
||||
</div>`;
|
||||
}
|
||||
if (upperBand !== undefined) {
|
||||
const marker = `<span class="uplot-tooltip-marker"></span>`;
|
||||
tooltipContent += `
|
||||
<div class="uplot-tooltip-series">
|
||||
${marker}
|
||||
<span class="uplot-tooltip-series-name">Upper Band:</span>
|
||||
<span class="uplot-tooltip-series-value">${upperBand}</span>
|
||||
</div>`;
|
||||
}
|
||||
if (lowerBand !== undefined) {
|
||||
const marker = `<span class="uplot-tooltip-marker"></span>`;
|
||||
tooltipContent += `
|
||||
<div class="uplot-tooltip-series">
|
||||
${marker}
|
||||
<span class="uplot-tooltip-series-name">Lower Band:</span>
|
||||
<span class="uplot-tooltip-series-value">${lowerBand}</span>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
tooltip.innerHTML = tooltipContent;
|
||||
tooltip.style.display = 'block';
|
||||
tooltip.style.left = `${left + tooltipLeftOffset}px`;
|
||||
tooltip.style.top = `${top + tooltipTopOffset}px`;
|
||||
}
|
||||
|
||||
function init(u: any): void {
|
||||
tooltip = document.createElement('div');
|
||||
tooltip.className = 'uplot-tooltip';
|
||||
tooltip.style.display = 'none';
|
||||
u.over.appendChild(tooltip);
|
||||
|
||||
// Add event listeners
|
||||
u.over.addEventListener('mouseenter', () => {
|
||||
isMouseOverPlot = true;
|
||||
});
|
||||
|
||||
u.over.addEventListener('mouseleave', () => {
|
||||
isMouseOverPlot = false;
|
||||
tooltip.style.display = 'none';
|
||||
});
|
||||
|
||||
u.over.addEventListener('mousemove', (e: MouseEvent) => {
|
||||
if (isMouseOverPlot) {
|
||||
const rect = u.over.getBoundingClientRect();
|
||||
const left = e.clientX - rect.left;
|
||||
const top = e.clientY - rect.top;
|
||||
updateTooltip(u, left, top);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
hooks: {
|
||||
init,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default tooltipPlugin;
|
||||
@@ -16,12 +16,6 @@
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
&.docked {
|
||||
.app-content {
|
||||
width: calc(100% - 240px);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.chat-support-gateway {
|
||||
|
||||
@@ -5,13 +5,11 @@ import './AppLayout.styles.scss';
|
||||
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Flex } from 'antd';
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import getUserLatestVersion from 'api/user/getLatestVersion';
|
||||
import getUserVersion from 'api/user/getVersion';
|
||||
import cx from 'classnames';
|
||||
import ChatSupportGateway from 'components/ChatSupportGateway/ChatSupportGateway';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { IS_SIDEBAR_COLLAPSED } from 'constants/app';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import ROUTES from 'constants/routes';
|
||||
import SideNav from 'container/SideNav';
|
||||
@@ -22,22 +20,13 @@ import useLicense from 'hooks/useLicense';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import {
|
||||
ReactNode,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useLayoutEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { ReactNode, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Dispatch } from 'redux';
|
||||
import { sideBarCollapse } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import {
|
||||
@@ -59,10 +48,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
(state) => state.app,
|
||||
);
|
||||
|
||||
const [collapsed, setCollapsed] = useState<boolean>(
|
||||
getLocalStorageKey(IS_SIDEBAR_COLLAPSED) === 'true',
|
||||
);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
@@ -117,14 +102,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
const latestCurrentCounter = useRef(0);
|
||||
const latestVersionCounter = useRef(0);
|
||||
|
||||
const onCollapse = useCallback(() => {
|
||||
setCollapsed((collapsed) => !collapsed);
|
||||
}, []);
|
||||
|
||||
useLayoutEffect(() => {
|
||||
dispatch(sideBarCollapse(collapsed));
|
||||
}, [collapsed, dispatch]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
getUserLatestVersionResponse.isFetched &&
|
||||
@@ -234,6 +211,13 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
}
|
||||
}, [licenseData, isFetching]);
|
||||
|
||||
useEffect(() => {
|
||||
// after logging out hide the trial expiry banner
|
||||
if (!isLoggedIn) {
|
||||
setShowTrialExpiryBanner(false);
|
||||
}
|
||||
}, [isLoggedIn]);
|
||||
|
||||
const handleUpgrade = (): void => {
|
||||
if (role === 'ADMIN') {
|
||||
history.push(ROUTES.BILLING);
|
||||
@@ -255,19 +239,16 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
const isDashboardListView = (): boolean => routeKey === 'ALL_DASHBOARD';
|
||||
const isAlertHistory = (): boolean => routeKey === 'ALERT_HISTORY';
|
||||
const isAlertOverview = (): boolean => routeKey === 'ALERT_OVERVIEW';
|
||||
const isDashboardView = (): boolean => {
|
||||
/**
|
||||
* need to match using regex here as the getRoute function will not work for
|
||||
* routes with id
|
||||
*/
|
||||
const regex = /^\/dashboard\/[a-zA-Z0-9_-]+$/;
|
||||
return regex.test(pathname);
|
||||
};
|
||||
const isPathMatch = (regex: RegExp): boolean => regex.test(pathname);
|
||||
|
||||
const isDashboardWidgetView = (): boolean => {
|
||||
const regex = /^\/dashboard\/[a-zA-Z0-9_-]+\/new$/;
|
||||
return regex.test(pathname);
|
||||
};
|
||||
const isDashboardView = (): boolean =>
|
||||
isPathMatch(/^\/dashboard\/[a-zA-Z0-9_-]+$/);
|
||||
|
||||
const isDashboardWidgetView = (): boolean =>
|
||||
isPathMatch(/^\/dashboard\/[a-zA-Z0-9_-]+\/new$/);
|
||||
|
||||
const isTraceDetailsView = (): boolean =>
|
||||
isPathMatch(/^\/trace\/[a-zA-Z0-9]+(\?.*)?$/);
|
||||
|
||||
useEffect(() => {
|
||||
if (isDarkMode) {
|
||||
@@ -279,23 +260,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
}
|
||||
}, [isDarkMode]);
|
||||
|
||||
const isSideNavCollapsed = getLocalStorageKey(IS_SIDEBAR_COLLAPSED);
|
||||
|
||||
/**
|
||||
* Note: Right now we don't have a page-level method to pass the sidebar collapse state.
|
||||
* Since the use case for overriding is not widely needed, we are setting it here
|
||||
* so that the workspace locked page will have an expanded sidebar regardless of how users
|
||||
* have set it or what is stored in localStorage. This will not affect the localStorage config.
|
||||
*/
|
||||
const isWorkspaceLocked = pathname === ROUTES.WORKSPACE_LOCKED;
|
||||
|
||||
return (
|
||||
<Layout
|
||||
className={cx(
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
isSideNavCollapsed ? 'sidebarCollapsed' : '',
|
||||
)}
|
||||
>
|
||||
<Layout className={cx(isDarkMode ? 'darkMode' : 'lightMode')}>
|
||||
<Helmet>
|
||||
<title>{pageTitle}</title>
|
||||
</Helmet>
|
||||
@@ -321,25 +287,11 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Flex
|
||||
className={cx(
|
||||
'app-layout',
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
!collapsed && !renderFullScreen ? 'docked' : '',
|
||||
)}
|
||||
>
|
||||
<Flex className={cx('app-layout', isDarkMode ? 'darkMode' : 'lightMode')}>
|
||||
{isToDisplayLayout && !renderFullScreen && (
|
||||
<SideNav
|
||||
licenseData={licenseData}
|
||||
isFetching={isFetching}
|
||||
onCollapse={onCollapse}
|
||||
collapsed={isWorkspaceLocked ? false : collapsed}
|
||||
/>
|
||||
<SideNav licenseData={licenseData} isFetching={isFetching} />
|
||||
)}
|
||||
<div
|
||||
className={cx('app-content', collapsed ? 'collapsed' : '')}
|
||||
data-overlayscrollbars-initialize
|
||||
>
|
||||
<div className="app-content" data-overlayscrollbars-initialize>
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<LayoutContent data-overlayscrollbars-initialize>
|
||||
<OverlayScrollbar>
|
||||
@@ -356,6 +308,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
isMessagingQueues()
|
||||
? 0
|
||||
: '0 1rem',
|
||||
|
||||
...(isTraceDetailsView() ? { marginRight: 0 } : {}),
|
||||
}}
|
||||
>
|
||||
{isToDisplayLayout && !renderFullScreen && <TopNav />}
|
||||
|
||||
@@ -50,6 +50,13 @@
|
||||
align-items: center;
|
||||
}
|
||||
}
|
||||
|
||||
.billing-update-note {
|
||||
text-align: left;
|
||||
font-size: 13px;
|
||||
color: var(--bg-vanilla-200);
|
||||
margin-top: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-skeleton.ant-skeleton-element.ant-skeleton-active {
|
||||
@@ -75,5 +82,9 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.billing-update-note {
|
||||
color: var(--bg-ink-200);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -348,7 +348,12 @@ export default function BillingContainer(): JSX.Element {
|
||||
const BillingUsageGraphCallback = useCallback(
|
||||
() =>
|
||||
!isLoading && !isFetchingBillingData ? (
|
||||
<>
|
||||
<BillingUsageGraph data={apiResponse} billAmount={billAmount} />
|
||||
<div className="billing-update-note">
|
||||
Note: Billing metrics are updated once every 24 hours.
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<Card className="empty-graph-card" bordered={false}>
|
||||
<Spinner size="large" tip="Loading..." height="35vh" />
|
||||
|
||||
@@ -58,6 +58,21 @@ const calculateStartEndTime = (
|
||||
|
||||
export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element {
|
||||
const { data, billAmount } = props;
|
||||
// Added this to fix the issue where breakdown with one day data are causing the bars to spread across multiple days
|
||||
data?.details?.breakdown?.forEach((breakdown: any) => {
|
||||
if (breakdown?.dayWiseBreakdown?.breakdown?.length === 1) {
|
||||
const currentDay = breakdown.dayWiseBreakdown.breakdown[0];
|
||||
const nextDay = {
|
||||
...currentDay,
|
||||
timestamp: currentDay.timestamp + 86400,
|
||||
count: 0,
|
||||
size: 0,
|
||||
quantity: 0,
|
||||
total: 0,
|
||||
};
|
||||
breakdown.dayWiseBreakdown.breakdown.push(nextDay);
|
||||
}
|
||||
});
|
||||
const graphCompatibleData = useMemo(
|
||||
() => convertDataToMetricRangePayload(data),
|
||||
[data],
|
||||
|
||||
@@ -3,7 +3,11 @@ import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
|
||||
import { OptionType } from './types';
|
||||
|
||||
export const getOptionList = (t: TFunction): OptionType[] => [
|
||||
export const getOptionList = (
|
||||
t: TFunction,
|
||||
isAnomalyDetectionEnabled: boolean,
|
||||
): OptionType[] => {
|
||||
const optionList: OptionType[] = [
|
||||
{
|
||||
title: t('metric_based_alert'),
|
||||
selection: AlertTypes.METRICS_BASED_ALERT,
|
||||
@@ -24,4 +28,16 @@ export const getOptionList = (t: TFunction): OptionType[] => [
|
||||
selection: AlertTypes.EXCEPTIONS_BASED_ALERT,
|
||||
description: t('exceptions_based_alert_desc'),
|
||||
},
|
||||
];
|
||||
];
|
||||
|
||||
if (isAnomalyDetectionEnabled) {
|
||||
optionList.unshift({
|
||||
title: t('anomaly_based_alert'),
|
||||
selection: AlertTypes.ANOMALY_BASED_ALERT,
|
||||
description: t('anomaly_based_alert_desc'),
|
||||
isBeta: true,
|
||||
});
|
||||
}
|
||||
|
||||
return optionList;
|
||||
};
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { Row, Typography } from 'antd';
|
||||
import { Row, Tag, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import useFeatureFlags from 'hooks/useFeatureFlag';
|
||||
import { useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
@@ -12,11 +14,18 @@ import { OptionType } from './types';
|
||||
function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element {
|
||||
const { t } = useTranslation(['alerts']);
|
||||
|
||||
const optionList = getOptionList(t);
|
||||
const isAnomalyDetectionEnabled =
|
||||
useFeatureFlags(FeatureKeys.ANOMALY_DETECTION)?.active || false;
|
||||
|
||||
const optionList = getOptionList(t, isAnomalyDetectionEnabled);
|
||||
|
||||
function handleRedirection(option: AlertTypes): void {
|
||||
let url = '';
|
||||
switch (option) {
|
||||
case AlertTypes.ANOMALY_BASED_ALERT:
|
||||
url =
|
||||
'https://signoz.io/docs/alerts-management/anomaly-based-alerts/?utm_source=product&utm_medium=alert-source-selection-page#examples';
|
||||
break;
|
||||
case AlertTypes.METRICS_BASED_ALERT:
|
||||
url =
|
||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-source-selection-page#examples';
|
||||
@@ -52,9 +61,17 @@ function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element {
|
||||
<AlertTypeCard
|
||||
key={option.selection}
|
||||
title={option.title}
|
||||
extra={
|
||||
option.isBeta ? (
|
||||
<Tag bordered={false} color="geekblue">
|
||||
Beta
|
||||
</Tag>
|
||||
) : undefined
|
||||
}
|
||||
onClick={(): void => {
|
||||
onSelect(option.selection);
|
||||
}}
|
||||
data-testid={`alert-type-card-${option.selection}`}
|
||||
>
|
||||
{option.description}{' '}
|
||||
<Typography.Link
|
||||
|
||||
@@ -4,4 +4,5 @@ export interface OptionType {
|
||||
title: string;
|
||||
selection: AlertTypes;
|
||||
description: string;
|
||||
isBeta?: boolean;
|
||||
}
|
||||
|
||||
@@ -4,12 +4,15 @@ import {
|
||||
initialQueryPromQLData,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { AlertDetectionTypes } from 'container/FormAlertRules';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
import {
|
||||
AlertDef,
|
||||
defaultAlgorithm,
|
||||
defaultCompareOp,
|
||||
defaultEvalWindow,
|
||||
defaultMatchType,
|
||||
defaultSeasonality,
|
||||
} from 'types/api/alerts/def';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
|
||||
@@ -46,6 +49,51 @@ export const alertDefaults: AlertDef = {
|
||||
},
|
||||
op: defaultCompareOp,
|
||||
matchType: defaultMatchType,
|
||||
algorithm: defaultAlgorithm,
|
||||
seasonality: defaultSeasonality,
|
||||
},
|
||||
labels: {
|
||||
severity: 'warning',
|
||||
},
|
||||
annotations: defaultAnnotations,
|
||||
evalWindow: defaultEvalWindow,
|
||||
};
|
||||
|
||||
export const anamolyAlertDefaults: AlertDef = {
|
||||
alertType: AlertTypes.METRICS_BASED_ALERT,
|
||||
version: ENTITY_VERSION_V4,
|
||||
ruleType: AlertDetectionTypes.ANOMALY_DETECTION_ALERT,
|
||||
condition: {
|
||||
compositeQuery: {
|
||||
builderQueries: {
|
||||
A: {
|
||||
...initialQueryBuilderFormValuesMap.metrics,
|
||||
functions: [
|
||||
{
|
||||
name: 'anomaly',
|
||||
args: [],
|
||||
namedArgs: { z_score_threshold: 3 },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
promQueries: { A: initialQueryPromQLData },
|
||||
chQueries: {
|
||||
A: {
|
||||
name: 'A',
|
||||
query: ``,
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
},
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
unit: undefined,
|
||||
},
|
||||
op: defaultCompareOp,
|
||||
matchType: defaultMatchType,
|
||||
algorithm: defaultAlgorithm,
|
||||
seasonality: defaultSeasonality,
|
||||
},
|
||||
labels: {
|
||||
severity: 'warning',
|
||||
@@ -65,7 +113,7 @@ export const logAlertDefaults: AlertDef = {
|
||||
chQueries: {
|
||||
A: {
|
||||
name: 'A',
|
||||
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs_v2 \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
@@ -145,6 +193,7 @@ export const exceptionAlertDefaults: AlertDef = {
|
||||
};
|
||||
|
||||
export const ALERTS_VALUES_MAP: Record<AlertTypes, AlertDef> = {
|
||||
[AlertTypes.ANOMALY_BASED_ALERT]: anamolyAlertDefaults,
|
||||
[AlertTypes.METRICS_BASED_ALERT]: alertDefaults,
|
||||
[AlertTypes.LOGS_BASED_ALERT]: logAlertDefaults,
|
||||
[AlertTypes.TRACES_BASED_ALERT]: traceAlertDefaults,
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Form, Row } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import FormAlertRules from 'container/FormAlertRules';
|
||||
import FormAlertRules, { AlertDetectionTypes } from 'container/FormAlertRules';
|
||||
import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam';
|
||||
import history from 'lib/history';
|
||||
import { useEffect, useState } from 'react';
|
||||
@@ -13,6 +13,7 @@ import { AlertDef } from 'types/api/alerts/def';
|
||||
import { ALERT_TYPE_VS_SOURCE_MAPPING } from './config';
|
||||
import {
|
||||
alertDefaults,
|
||||
anamolyAlertDefaults,
|
||||
exceptionAlertDefaults,
|
||||
logAlertDefaults,
|
||||
traceAlertDefaults,
|
||||
@@ -24,8 +25,12 @@ function CreateRules(): JSX.Element {
|
||||
|
||||
const location = useLocation();
|
||||
const queryParams = new URLSearchParams(location.search);
|
||||
const alertTypeFromURL = queryParams.get(QueryParams.ruleType);
|
||||
const version = queryParams.get('version');
|
||||
const alertTypeFromParams = queryParams.get(QueryParams.alertType);
|
||||
const alertTypeFromParams =
|
||||
alertTypeFromURL === AlertDetectionTypes.ANOMALY_DETECTION_ALERT
|
||||
? AlertTypes.ANOMALY_BASED_ALERT
|
||||
: queryParams.get(QueryParams.alertType);
|
||||
|
||||
const compositeQuery = useGetCompositeQueryParam();
|
||||
function getAlertTypeFromDataSource(): AlertTypes | null {
|
||||
@@ -45,6 +50,7 @@ function CreateRules(): JSX.Element {
|
||||
|
||||
const onSelectType = (typ: AlertTypes): void => {
|
||||
setAlertType(typ);
|
||||
|
||||
switch (typ) {
|
||||
case AlertTypes.LOGS_BASED_ALERT:
|
||||
setInitValues(logAlertDefaults);
|
||||
@@ -55,13 +61,40 @@ function CreateRules(): JSX.Element {
|
||||
case AlertTypes.EXCEPTIONS_BASED_ALERT:
|
||||
setInitValues(exceptionAlertDefaults);
|
||||
break;
|
||||
case AlertTypes.ANOMALY_BASED_ALERT:
|
||||
setInitValues({
|
||||
...anamolyAlertDefaults,
|
||||
version: version || ENTITY_VERSION_V4,
|
||||
ruleType: AlertDetectionTypes.ANOMALY_DETECTION_ALERT,
|
||||
});
|
||||
break;
|
||||
default:
|
||||
setInitValues({
|
||||
...alertDefaults,
|
||||
version: version || ENTITY_VERSION_V4,
|
||||
ruleType: AlertDetectionTypes.THRESHOLD_ALERT,
|
||||
});
|
||||
}
|
||||
queryParams.set(QueryParams.alertType, typ);
|
||||
|
||||
queryParams.set(
|
||||
QueryParams.alertType,
|
||||
typ === AlertTypes.ANOMALY_BASED_ALERT
|
||||
? AlertTypes.METRICS_BASED_ALERT
|
||||
: typ,
|
||||
);
|
||||
|
||||
if (
|
||||
typ === AlertTypes.ANOMALY_BASED_ALERT ||
|
||||
alertTypeFromURL === AlertDetectionTypes.ANOMALY_DETECTION_ALERT
|
||||
) {
|
||||
queryParams.set(
|
||||
QueryParams.ruleType,
|
||||
AlertDetectionTypes.ANOMALY_DETECTION_ALERT,
|
||||
);
|
||||
} else {
|
||||
queryParams.set(QueryParams.ruleType, AlertDetectionTypes.THRESHOLD_ALERT);
|
||||
}
|
||||
|
||||
const generatedUrl = `${location.pathname}?${queryParams.toString()}`;
|
||||
history.replace(generatedUrl);
|
||||
};
|
||||
|
||||
@@ -7,7 +7,6 @@ function EditRules({ initialValue, ruleId }: EditRulesProps): JSX.Element {
|
||||
const [formInstance] = Form.useForm();
|
||||
|
||||
return (
|
||||
<div style={{ marginTop: '1rem' }}>
|
||||
<FormAlertRules
|
||||
alertType={
|
||||
initialValue.alertType
|
||||
@@ -18,7 +17,6 @@ function EditRules({ initialValue, ruleId }: EditRulesProps): JSX.Element {
|
||||
initialValue={initialValue}
|
||||
ruleId={ruleId}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
padding: 10px 12px;
|
||||
padding: 10px 10px;
|
||||
border-radius: 50px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: rgba(22, 24, 29, 0.6);
|
||||
@@ -33,6 +33,7 @@
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-slate-500);
|
||||
cursor: pointer;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
.hidden {
|
||||
|
||||
@@ -24,6 +24,9 @@ import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import ExportPanelContainer from 'container/ExportPanel/ExportPanelContainer';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import { OptionsQuery } from 'container/OptionsMenu/types';
|
||||
import { useGetSearchQueryParam } from 'hooks/queryBuilder/useGetSearchQueryParam';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useGetAllViews } from 'hooks/saveViews/useGetAllViews';
|
||||
@@ -34,7 +37,7 @@ import useErrorNotification from 'hooks/useErrorNotification';
|
||||
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { mapCompositeQueryFromQuery } from 'lib/newQueryBuilder/queryBuilderMappers/mapCompositeQueryFromQuery';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { cloneDeep, isEqual } from 'lodash-es';
|
||||
import {
|
||||
Check,
|
||||
ConciergeBell,
|
||||
@@ -42,7 +45,6 @@ import {
|
||||
PanelBottomClose,
|
||||
Plus,
|
||||
X,
|
||||
XCircle,
|
||||
} from 'lucide-react';
|
||||
import {
|
||||
CSSProperties,
|
||||
@@ -58,7 +60,9 @@ import { useSelector } from 'react-redux';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { ViewProps } from 'types/api/saveViews/types';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
@@ -252,6 +256,46 @@ function ExplorerOptions({
|
||||
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
|
||||
const { options, handleOptionsChange } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.TRACES_LIST_OPTIONS,
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: StringOperators.NOOP,
|
||||
});
|
||||
|
||||
type ExtraData = {
|
||||
selectColumns?: BaseAutocompleteData[];
|
||||
};
|
||||
|
||||
const updateOrRestoreSelectColumns = (
|
||||
key: string,
|
||||
allViewsData: ViewProps[] | undefined,
|
||||
options: OptionsQuery,
|
||||
handleOptionsChange: (newQueryData: OptionsQuery) => void,
|
||||
): void => {
|
||||
const currentViewDetails = getViewDetailsUsingViewKey(key, allViewsData);
|
||||
if (!currentViewDetails) {
|
||||
return;
|
||||
}
|
||||
|
||||
let extraData: ExtraData = {};
|
||||
try {
|
||||
extraData = JSON.parse(currentViewDetails?.extraData ?? '{}') as ExtraData;
|
||||
} catch (error) {
|
||||
console.error('Error parsing extraData:', error);
|
||||
}
|
||||
|
||||
if (extraData.selectColumns?.length) {
|
||||
handleOptionsChange({
|
||||
...options,
|
||||
selectColumns: extraData.selectColumns,
|
||||
});
|
||||
} else if (!isEqual(defaultTraceSelectedColumns, options.selectColumns)) {
|
||||
handleOptionsChange({
|
||||
...options,
|
||||
selectColumns: defaultTraceSelectedColumns,
|
||||
});
|
||||
}
|
||||
};
|
||||
const onMenuItemSelectHandler = useCallback(
|
||||
({ key }: { key: string }): void => {
|
||||
const currentViewDetails = getViewDetailsUsingViewKey(
|
||||
@@ -321,6 +365,13 @@ function ExplorerOptions({
|
||||
|
||||
updatePreservedViewInLocalStorage(option);
|
||||
|
||||
updateOrRestoreSelectColumns(
|
||||
option.key,
|
||||
viewsData?.data?.data,
|
||||
options,
|
||||
handleOptionsChange,
|
||||
);
|
||||
|
||||
if (ref.current) {
|
||||
ref.current.blur();
|
||||
}
|
||||
@@ -360,14 +411,20 @@ function ExplorerOptions({
|
||||
viewName: newViewName || '',
|
||||
compositeQuery,
|
||||
sourcePage: sourcepage,
|
||||
extraData: JSON.stringify({ color }),
|
||||
extraData: JSON.stringify({
|
||||
color,
|
||||
selectColumns: options.selectColumns,
|
||||
}),
|
||||
});
|
||||
|
||||
const onSaveHandler = (): void => {
|
||||
saveNewViewHandler({
|
||||
compositeQuery,
|
||||
handlePopOverClose: hideSaveViewModal,
|
||||
extraData: JSON.stringify({ color }),
|
||||
extraData: JSON.stringify({
|
||||
color,
|
||||
selectColumns: options.selectColumns,
|
||||
}),
|
||||
notifications,
|
||||
panelType: panelType || PANEL_TYPES.LIST,
|
||||
redirectWithQueryBuilderData,
|
||||
@@ -457,7 +514,11 @@ function ExplorerOptions({
|
||||
|
||||
return (
|
||||
<div className="explorer-options-container">
|
||||
{isQueryUpdated && !isExplorerOptionHidden && (
|
||||
{
|
||||
// if a viewName is selected and the explorer options are not hidden then
|
||||
// always show the clear option
|
||||
}
|
||||
{!isExplorerOptionHidden && viewName && (
|
||||
<div
|
||||
className={cx(
|
||||
isEditDeleteSupported ? '' : 'hide-update',
|
||||
@@ -471,6 +532,11 @@ function ExplorerOptions({
|
||||
icon={<X size={14} />}
|
||||
/>
|
||||
</Tooltip>
|
||||
{
|
||||
// only show the update view option when the query is updated
|
||||
}
|
||||
{isQueryUpdated && (
|
||||
<>
|
||||
<Divider
|
||||
type="vertical"
|
||||
className={isEditDeleteSupported ? '' : 'hidden'}
|
||||
@@ -483,6 +549,8 @@ function ExplorerOptions({
|
||||
icon={<Disc3 size={14} />}
|
||||
/>
|
||||
</Tooltip>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{!isExplorerOptionHidden && (
|
||||
@@ -506,10 +574,7 @@ function ExplorerOptions({
|
||||
}}
|
||||
dropdownStyle={dropdownStyle}
|
||||
className="views-dropdown"
|
||||
allowClear={{
|
||||
clearIcon: <XCircle size={16} style={{ marginTop: '-3px' }} />,
|
||||
}}
|
||||
onClear={handleClearSelect}
|
||||
allowClear={false}
|
||||
ref={ref}
|
||||
>
|
||||
{viewsData?.data?.data?.map((view) => {
|
||||
@@ -604,8 +669,8 @@ function ExplorerOptions({
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<ExplorerOptionsHideArea
|
||||
viewName={viewName}
|
||||
isExplorerOptionHidden={isExplorerOptionHidden}
|
||||
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
|
||||
sourcepage={sourcepage}
|
||||
@@ -614,7 +679,6 @@ function ExplorerOptions({
|
||||
onUpdateQueryHandler={onUpdateQueryHandler}
|
||||
isEditDeleteSupported={isEditDeleteSupported}
|
||||
/>
|
||||
|
||||
<Modal
|
||||
className="save-view-modal"
|
||||
title={<span className="title">Save this view</span>}
|
||||
@@ -647,7 +711,6 @@ function ExplorerOptions({
|
||||
/>
|
||||
</div>
|
||||
</Modal>
|
||||
|
||||
<Modal
|
||||
footer={null}
|
||||
onOk={onCancel(false)}
|
||||
|
||||
@@ -10,6 +10,7 @@ import { DataSource } from 'types/common/queryBuilder';
|
||||
import { setExplorerToolBarVisibility } from './utils';
|
||||
|
||||
interface DroppableAreaProps {
|
||||
viewName: string;
|
||||
isQueryUpdated: boolean;
|
||||
isExplorerOptionHidden?: boolean;
|
||||
sourcepage: DataSource;
|
||||
@@ -20,6 +21,7 @@ interface DroppableAreaProps {
|
||||
}
|
||||
|
||||
function ExplorerOptionsHideArea({
|
||||
viewName,
|
||||
isQueryUpdated,
|
||||
isExplorerOptionHidden,
|
||||
sourcepage,
|
||||
@@ -39,7 +41,7 @@ function ExplorerOptionsHideArea({
|
||||
<div className="explorer-option-droppable-container">
|
||||
{isExplorerOptionHidden && (
|
||||
<>
|
||||
{isQueryUpdated && (
|
||||
{viewName && (
|
||||
<div className="explorer-actions-btn">
|
||||
<Tooltip title="Clear this view">
|
||||
<Button
|
||||
@@ -49,7 +51,7 @@ function ExplorerOptionsHideArea({
|
||||
icon={<X size={14} color={Color.BG_INK_500} />}
|
||||
/>
|
||||
</Tooltip>
|
||||
{isEditDeleteSupported && (
|
||||
{isEditDeleteSupported && isQueryUpdated && (
|
||||
<Tooltip title="Update this View">
|
||||
<Button
|
||||
onClick={onUpdateQueryHandler}
|
||||
|
||||
@@ -96,7 +96,7 @@ function BasicInfo({
|
||||
|
||||
return (
|
||||
<>
|
||||
<StepHeading> {t('alert_form_step3')} </StepHeading>
|
||||
<StepHeading> {t('alert_form_step4')} </StepHeading>
|
||||
<FormContainer>
|
||||
<Form.Item
|
||||
label={t('field_severity')}
|
||||
@@ -189,6 +189,7 @@ function BasicInfo({
|
||||
checked={shouldBroadCastToAllChannels}
|
||||
onChange={handleBroadcastToAllChannels}
|
||||
disabled={noChannels || !!channels.loading}
|
||||
data-testid="alert-broadcast-to-all-channels"
|
||||
/>
|
||||
</Tooltip>
|
||||
</FormItemMedium>
|
||||
|
||||
@@ -63,6 +63,7 @@ function ChannelSelect({
|
||||
mode="multiple"
|
||||
style={{ width: '100%' }}
|
||||
placeholder={t('placeholder_channel_select')}
|
||||
data-testid="alert-channel-select"
|
||||
value={currentValue}
|
||||
onChange={(value): void => {
|
||||
handleChange(value as string[]);
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
.alert-chart-container {
|
||||
height: 57vh;
|
||||
width: 100%;
|
||||
|
||||
.threshold-alert-uplot-chart-container {
|
||||
height: calc(100% - 24px);
|
||||
}
|
||||
|
||||
.ant-card-body {
|
||||
padding: 12px;
|
||||
}
|
||||
|
||||
.anomaly-alert-evaluation-view-loading-container {
|
||||
height: 100%;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.anomaly-alert-evaluation-view-error-container {
|
||||
height: 100%;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,12 @@
|
||||
import './ChartPreview.styles.scss';
|
||||
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import AnomalyAlertEvaluationView from 'container/AnomalyAlertEvaluationView';
|
||||
import GridPanelSwitch from 'container/GridPanelSwitch';
|
||||
import { getFormatNameByOptionId } from 'container/NewWidget/RightContainer/alertFomatCategories';
|
||||
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
|
||||
@@ -14,6 +18,7 @@ import {
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import useFeatureFlags from 'hooks/useFeatureFlag';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import GetMinMax from 'lib/getMinMax';
|
||||
import getTimeString from 'lib/getTimeString';
|
||||
@@ -34,6 +39,7 @@ import { getGraphType } from 'utils/getGraphType';
|
||||
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
||||
import { getTimeRange } from 'utils/getTimeRange';
|
||||
|
||||
import { AlertDetectionTypes } from '..';
|
||||
import { ChartContainer, FailedMessageContainer } from './styles';
|
||||
import { getThresholdLabel } from './utils';
|
||||
|
||||
@@ -141,6 +147,7 @@ function ChartPreview({
|
||||
selectedInterval,
|
||||
minTime,
|
||||
maxTime,
|
||||
alertDef?.ruleType,
|
||||
],
|
||||
retry: false,
|
||||
enabled: canQuery,
|
||||
@@ -163,8 +170,6 @@ function ChartPreview({
|
||||
queryResponse.data.payload.data.result = sortedSeriesData;
|
||||
}
|
||||
|
||||
const chartData = getUPlotChartData(queryResponse?.data?.payload);
|
||||
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
@@ -202,7 +207,10 @@ function ChartPreview({
|
||||
id: 'alert_legend_widget',
|
||||
yAxisUnit,
|
||||
apiResponse: queryResponse?.data?.payload,
|
||||
dimensions: containerDimensions,
|
||||
dimensions: {
|
||||
height: containerDimensions?.height ? containerDimensions.height - 48 : 0,
|
||||
width: containerDimensions?.width,
|
||||
},
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
isDarkMode,
|
||||
@@ -245,22 +253,34 @@ function ChartPreview({
|
||||
],
|
||||
);
|
||||
|
||||
const chartData = getUPlotChartData(queryResponse?.data?.payload);
|
||||
|
||||
const isAnomalyDetectionAlert =
|
||||
alertDef?.ruleType === AlertDetectionTypes.ANOMALY_DETECTION_ALERT;
|
||||
|
||||
const chartDataAvailable =
|
||||
chartData && !queryResponse.isError && !queryResponse.isLoading;
|
||||
|
||||
const isAnomalyDetectionEnabled =
|
||||
useFeatureFlags(FeatureKeys.ANOMALY_DETECTION)?.active || false;
|
||||
|
||||
return (
|
||||
<div className="alert-chart-container" ref={graphRef}>
|
||||
<ChartContainer>
|
||||
{headline}
|
||||
|
||||
<div ref={graphRef} style={{ height: '100%' }}>
|
||||
<div className="threshold-alert-uplot-chart-container">
|
||||
{queryResponse.isLoading && (
|
||||
<Spinner size="large" tip="Loading..." height="100%" />
|
||||
)}
|
||||
{(queryResponse?.isError || queryResponse?.error) && (
|
||||
<FailedMessageContainer color="red" title="Failed to refresh the chart">
|
||||
<InfoCircleOutlined />{' '}
|
||||
<InfoCircleOutlined />
|
||||
{queryResponse.error.message || t('preview_chart_unexpected_error')}
|
||||
</FailedMessageContainer>
|
||||
)}
|
||||
|
||||
{chartData && !queryResponse.isError && !queryResponse.isLoading && (
|
||||
{chartDataAvailable && !isAnomalyDetectionAlert && (
|
||||
<GridPanelSwitch
|
||||
options={options}
|
||||
panelType={graphType}
|
||||
@@ -273,8 +293,19 @@ function ChartPreview({
|
||||
yAxisUnit={yAxisUnit}
|
||||
/>
|
||||
)}
|
||||
|
||||
{chartDataAvailable &&
|
||||
isAnomalyDetectionAlert &&
|
||||
isAnomalyDetectionEnabled &&
|
||||
queryResponse?.data?.payload?.data?.resultType === 'anomaly' && (
|
||||
<AnomalyAlertEvaluationView
|
||||
data={queryResponse?.data?.payload}
|
||||
yAxisUnit={yAxisUnit}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</ChartContainer>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,70 @@
|
||||
}
|
||||
}
|
||||
|
||||
.steps-container {
|
||||
width: 80%;
|
||||
}
|
||||
|
||||
.qb-chart-preview-container {
|
||||
margin-bottom: 1rem;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.overview-header {
|
||||
margin-bottom: 1rem;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
||||
.alert-type-container {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
.alert-type-title {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.ant-typography {
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.chart-preview-container {
|
||||
position: relative;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 1rem;
|
||||
|
||||
.ant-card {
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.detection-method-container {
|
||||
margin: 24px 0;
|
||||
|
||||
.ant-tabs-nav {
|
||||
margin-bottom: 0;
|
||||
|
||||
.ant-tabs-tab {
|
||||
padding: 12px 0;
|
||||
}
|
||||
}
|
||||
|
||||
.detection-method-description {
|
||||
padding: 8px 0;
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
.info-help-btns {
|
||||
display: grid;
|
||||
grid-template-columns: auto auto;
|
||||
|
||||
@@ -99,7 +99,7 @@ function QuerySection({
|
||||
{
|
||||
label: (
|
||||
<Tooltip title="Query Builder">
|
||||
<Button className="nav-btns">
|
||||
<Button className="nav-btns" data-testid="query-builder-tab">
|
||||
<Atom size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
@@ -222,7 +222,7 @@ function QuerySection({
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<StepHeading> {t('alert_form_step1')}</StepHeading>
|
||||
<StepHeading> {t('alert_form_step2')}</StepHeading>
|
||||
<FormContainer>
|
||||
<div>{renderTabs(alertType)}</div>
|
||||
{renderQuerySection(currentTab)}
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
.rule-definition {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
import './RuleOptions.styles.scss';
|
||||
|
||||
import {
|
||||
Checkbox,
|
||||
Collapse,
|
||||
@@ -18,14 +20,17 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import {
|
||||
AlertDef,
|
||||
defaultAlgorithm,
|
||||
defaultCompareOp,
|
||||
defaultEvalWindow,
|
||||
defaultFrequency,
|
||||
defaultMatchType,
|
||||
defaultSeasonality,
|
||||
} from 'types/api/alerts/def';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { AlertDetectionTypes } from '.';
|
||||
import {
|
||||
FormContainer,
|
||||
InlineSelect,
|
||||
@@ -43,6 +48,8 @@ function RuleOptions({
|
||||
const { t } = useTranslation('alerts');
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
const { ruleType } = alertDef;
|
||||
|
||||
const handleMatchOptChange = (value: string | unknown): void => {
|
||||
const m = (value as string) || alertDef.condition?.matchType;
|
||||
setAlertDef({
|
||||
@@ -86,8 +93,19 @@ function RuleOptions({
|
||||
>
|
||||
<Select.Option value="1">{t('option_above')}</Select.Option>
|
||||
<Select.Option value="2">{t('option_below')}</Select.Option>
|
||||
|
||||
{/* hide equal and not eqaul in case of analmoy based alert */}
|
||||
|
||||
{ruleType !== 'anomaly_rule' && (
|
||||
<>
|
||||
<Select.Option value="3">{t('option_equal')}</Select.Option>
|
||||
<Select.Option value="4">{t('option_notequal')}</Select.Option>
|
||||
</>
|
||||
)}
|
||||
|
||||
{ruleType === 'anomaly_rule' && (
|
||||
<Select.Option value="5">{t('option_above_below')}</Select.Option>
|
||||
)}
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
@@ -101,8 +119,14 @@ function RuleOptions({
|
||||
>
|
||||
<Select.Option value="1">{t('option_atleastonce')}</Select.Option>
|
||||
<Select.Option value="2">{t('option_allthetimes')}</Select.Option>
|
||||
|
||||
{ruleType !== 'anomaly_rule' && (
|
||||
<>
|
||||
<Select.Option value="3">{t('option_onaverage')}</Select.Option>
|
||||
<Select.Option value="4">{t('option_intotal')}</Select.Option>
|
||||
<Select.Option value="5">{t('option_last')}</Select.Option>
|
||||
</>
|
||||
)}
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
@@ -114,6 +138,37 @@ function RuleOptions({
|
||||
});
|
||||
};
|
||||
|
||||
const onChangeAlgorithm = (value: string | unknown): void => {
|
||||
const alg = (value as string) || alertDef.condition.algorithm;
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
algorithm: alg,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const onChangeSeasonality = (value: string | unknown): void => {
|
||||
const seasonality = (value as string) || alertDef.condition.seasonality;
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
seasonality,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const onChangeDeviation = (value: number): void => {
|
||||
const target = value || alertDef.condition.target || 3;
|
||||
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: { ...alertDef.condition, target: Number(target) },
|
||||
});
|
||||
};
|
||||
|
||||
const renderEvalWindows = (): JSX.Element => (
|
||||
<InlineSelect
|
||||
getPopupContainer={popupContainer}
|
||||
@@ -145,6 +200,54 @@ function RuleOptions({
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
const renderAlgorithms = (): JSX.Element => (
|
||||
<InlineSelect
|
||||
getPopupContainer={popupContainer}
|
||||
defaultValue={defaultAlgorithm}
|
||||
style={{ minWidth: '120px' }}
|
||||
value={alertDef.condition.algorithm}
|
||||
onChange={onChangeAlgorithm}
|
||||
>
|
||||
<Select.Option value="standard">Standard</Select.Option>
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
const renderDeviationOpts = (): JSX.Element => (
|
||||
<InlineSelect
|
||||
getPopupContainer={popupContainer}
|
||||
defaultValue={3}
|
||||
style={{ minWidth: '120px' }}
|
||||
value={alertDef.condition.target}
|
||||
onChange={(value: number | unknown): void => {
|
||||
if (typeof value === 'number') {
|
||||
onChangeDeviation(value);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Select.Option value={1}>1</Select.Option>
|
||||
<Select.Option value={2}>2</Select.Option>
|
||||
<Select.Option value={3}>3</Select.Option>
|
||||
<Select.Option value={4}>4</Select.Option>
|
||||
<Select.Option value={5}>5</Select.Option>
|
||||
<Select.Option value={6}>6</Select.Option>
|
||||
<Select.Option value={7}>7</Select.Option>
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
const renderSeasonality = (): JSX.Element => (
|
||||
<InlineSelect
|
||||
getPopupContainer={popupContainer}
|
||||
defaultValue={defaultSeasonality}
|
||||
style={{ minWidth: '120px' }}
|
||||
value={alertDef.condition.seasonality}
|
||||
onChange={onChangeSeasonality}
|
||||
>
|
||||
<Select.Option value="hourly">Hourly</Select.Option>
|
||||
<Select.Option value="daily">Daily</Select.Option>
|
||||
<Select.Option value="weekly">Weekly</Select.Option>
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
const renderThresholdRuleOpts = (): JSX.Element => (
|
||||
<Form.Item>
|
||||
<Typography.Text>
|
||||
@@ -215,6 +318,32 @@ function RuleOptions({
|
||||
});
|
||||
};
|
||||
|
||||
const renderAnomalyRuleOpts = (): JSX.Element => (
|
||||
<Form.Item>
|
||||
<Typography.Text className="rule-definition">
|
||||
{t('text_condition1_anomaly')}
|
||||
<InlineSelect
|
||||
getPopupContainer={popupContainer}
|
||||
allowClear
|
||||
showSearch
|
||||
options={queryOptions}
|
||||
placeholder={t('selected_query_placeholder')}
|
||||
value={alertDef.condition.selectedQueryName}
|
||||
onChange={onChangeSelectedQueryName}
|
||||
/>
|
||||
{t('text_condition3')} {renderEvalWindows()}
|
||||
<Typography.Text>is</Typography.Text>
|
||||
{renderDeviationOpts()}
|
||||
<Typography.Text>deviations</Typography.Text>
|
||||
{renderCompareOps()}
|
||||
<Typography.Text>the predicted data</Typography.Text>
|
||||
{renderMatchOpts()}
|
||||
using the {renderAlgorithms()} algorithm with {renderSeasonality()}{' '}
|
||||
seasonality
|
||||
</Typography.Text>
|
||||
</Form.Item>
|
||||
);
|
||||
|
||||
const renderFrequency = (): JSX.Element => (
|
||||
<InlineSelect
|
||||
getPopupContainer={popupContainer}
|
||||
@@ -244,13 +373,21 @@ function RuleOptions({
|
||||
|
||||
return (
|
||||
<>
|
||||
<StepHeading>{t('alert_form_step2')}</StepHeading>
|
||||
<StepHeading>{t('alert_form_step3')}</StepHeading>
|
||||
<FormContainer>
|
||||
{queryCategory === EQueryType.PROM
|
||||
? renderPromRuleOptions()
|
||||
: renderThresholdRuleOpts()}
|
||||
{queryCategory === EQueryType.PROM && renderPromRuleOptions()}
|
||||
{queryCategory !== EQueryType.PROM &&
|
||||
ruleType === AlertDetectionTypes.ANOMALY_DETECTION_ALERT && (
|
||||
<>{renderAnomalyRuleOpts()}</>
|
||||
)}
|
||||
|
||||
{queryCategory !== EQueryType.PROM &&
|
||||
ruleType === AlertDetectionTypes.THRESHOLD_ALERT &&
|
||||
renderThresholdRuleOpts()}
|
||||
|
||||
<Space direction="vertical" size="large">
|
||||
{queryCategory !== EQueryType.PROM &&
|
||||
ruleType !== AlertDetectionTypes.ANOMALY_DETECTION_ALERT && (
|
||||
<Space direction="horizontal" align="center">
|
||||
<Form.Item noStyle name={['condition', 'target']}>
|
||||
<InputNumber
|
||||
@@ -274,6 +411,8 @@ function RuleOptions({
|
||||
/>
|
||||
</Form.Item>
|
||||
</Space>
|
||||
)}
|
||||
|
||||
<Collapse>
|
||||
<Collapse.Panel header={t('More options')} key="1">
|
||||
<Space direction="vertical" size="large">
|
||||
@@ -322,6 +461,45 @@ function RuleOptions({
|
||||
<Typography.Text>{t('text_for')}</Typography.Text>
|
||||
</Space>
|
||||
</VerticalLine>
|
||||
|
||||
<VerticalLine>
|
||||
<Space direction="horizontal" align="center">
|
||||
<Form.Item noStyle name={['condition', 'requireMinPoints']}>
|
||||
<Checkbox
|
||||
checked={alertDef?.condition?.requireMinPoints}
|
||||
onChange={(e): void => {
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
requireMinPoints: e.target.checked,
|
||||
},
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Typography.Text>{t('text_require_min_points')}</Typography.Text>
|
||||
|
||||
<Form.Item noStyle name={['condition', 'requiredNumPoints']}>
|
||||
<InputNumber
|
||||
min={1}
|
||||
value={alertDef?.condition?.requiredNumPoints}
|
||||
onChange={(value): void => {
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
requiredNumPoints: Number(value) || 0,
|
||||
},
|
||||
});
|
||||
}}
|
||||
type="number"
|
||||
onWheel={(e): void => e.currentTarget.blur()}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Typography.Text>{t('text_num_points')}</Typography.Text>
|
||||
</Space>
|
||||
</VerticalLine>
|
||||
</Space>
|
||||
</Collapse.Panel>
|
||||
</Collapse>
|
||||
|
||||
@@ -3,7 +3,6 @@ import './FormAlertRules.styles.scss';
|
||||
import { ExclamationCircleOutlined, SaveOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
Button,
|
||||
Col,
|
||||
FormInstance,
|
||||
Modal,
|
||||
SelectProps,
|
||||
@@ -13,8 +12,6 @@ import {
|
||||
import saveAlertApi from 'api/alerts/save';
|
||||
import testAlertApi from 'api/alerts/testAlert';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
|
||||
import { alertHelpMessage } from 'components/LaunchChatSupport/util';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
@@ -26,17 +23,23 @@ import PlotTag from 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag';
|
||||
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { MESSAGE, useIsFeatureDisabled } from 'hooks/useFeatureFlag';
|
||||
import useFeatureFlag, {
|
||||
MESSAGE,
|
||||
useIsFeatureDisabled,
|
||||
} from 'hooks/useFeatureFlag';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import history from 'lib/history';
|
||||
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
|
||||
import { mapQueryDataToApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { BellDot, ExternalLink } from 'lucide-react';
|
||||
import Tabs2 from 'periscope/components/Tabs2';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
import {
|
||||
@@ -44,7 +47,11 @@ import {
|
||||
defaultEvalWindow,
|
||||
defaultMatchType,
|
||||
} from 'types/api/alerts/def';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
Query,
|
||||
QueryFunctionProps,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
@@ -56,13 +63,16 @@ import {
|
||||
ActionButton,
|
||||
ButtonContainer,
|
||||
MainFormContainer,
|
||||
PanelContainer,
|
||||
StepContainer,
|
||||
StyledLeftContainer,
|
||||
StepHeading,
|
||||
} from './styles';
|
||||
import UserGuide from './UserGuide';
|
||||
import { getSelectedQueryOptions } from './utils';
|
||||
|
||||
export enum AlertDetectionTypes {
|
||||
THRESHOLD_ALERT = 'threshold_rule',
|
||||
ANOMALY_DETECTION_ALERT = 'anomaly_rule',
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function FormAlertRules({
|
||||
alertType,
|
||||
@@ -79,6 +89,8 @@ function FormAlertRules({
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const location = useLocation();
|
||||
const queryParams = new URLSearchParams(location.search);
|
||||
|
||||
// In case of alert the panel types should always be "Graph" only
|
||||
const panelType = PANEL_TYPES.TIME_SERIES;
|
||||
@@ -86,6 +98,7 @@ function FormAlertRules({
|
||||
const {
|
||||
currentQuery,
|
||||
stagedQuery,
|
||||
handleSetQueryData,
|
||||
handleRunQuery,
|
||||
handleSetConfig,
|
||||
initialDataSource,
|
||||
@@ -108,6 +121,10 @@ function FormAlertRules({
|
||||
const [alertDef, setAlertDef] = useState<AlertDef>(initialValue);
|
||||
const [yAxisUnit, setYAxisUnit] = useState<string>(currentQuery.unit || '');
|
||||
|
||||
const alertTypeFromURL = urlQuery.get(QueryParams.ruleType);
|
||||
|
||||
const [detectionMethod, setDetectionMethod] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isEqual(currentQuery.unit, yAxisUnit)) {
|
||||
setYAxisUnit(currentQuery.unit || '');
|
||||
@@ -138,6 +155,89 @@ function FormAlertRules({
|
||||
|
||||
useShareBuilderUrl(sq);
|
||||
|
||||
const handleDetectionMethodChange = (value: string): void => {
|
||||
setAlertDef((def) => ({
|
||||
...def,
|
||||
ruleType: value,
|
||||
}));
|
||||
|
||||
logEvent(`Alert: Detection method changed`, {
|
||||
detectionMethod: value,
|
||||
});
|
||||
|
||||
setDetectionMethod(value);
|
||||
};
|
||||
|
||||
const updateFunctions = (data: IBuilderQuery): QueryFunctionProps[] => {
|
||||
const anomalyFunction = {
|
||||
name: 'anomaly',
|
||||
args: [],
|
||||
namedArgs: { z_score_threshold: alertDef.condition.target || 3 },
|
||||
};
|
||||
const functions = data.functions || [];
|
||||
|
||||
if (alertDef.ruleType === AlertDetectionTypes.ANOMALY_DETECTION_ALERT) {
|
||||
// Add anomaly if not already present
|
||||
if (!functions.some((func) => func.name === 'anomaly')) {
|
||||
functions.push(anomalyFunction);
|
||||
} else {
|
||||
const anomalyFuncIndex = functions.findIndex(
|
||||
(func) => func.name === 'anomaly',
|
||||
);
|
||||
|
||||
if (anomalyFuncIndex !== -1) {
|
||||
const anomalyFunc = {
|
||||
...functions[anomalyFuncIndex],
|
||||
namedArgs: { z_score_threshold: alertDef.condition.target || 3 },
|
||||
};
|
||||
functions.splice(anomalyFuncIndex, 1);
|
||||
functions.push(anomalyFunc);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Remove anomaly if present
|
||||
const index = functions.findIndex((func) => func.name === 'anomaly');
|
||||
if (index !== -1) {
|
||||
functions.splice(index, 1);
|
||||
}
|
||||
}
|
||||
|
||||
return functions;
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const ruleType =
|
||||
detectionMethod === AlertDetectionTypes.ANOMALY_DETECTION_ALERT
|
||||
? AlertDetectionTypes.ANOMALY_DETECTION_ALERT
|
||||
: AlertDetectionTypes.THRESHOLD_ALERT;
|
||||
|
||||
queryParams.set(QueryParams.ruleType, ruleType);
|
||||
|
||||
const generatedUrl = `${location.pathname}?${queryParams.toString()}`;
|
||||
|
||||
history.replace(generatedUrl);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [detectionMethod]);
|
||||
|
||||
const updateFunctionsBasedOnAlertType = (): void => {
|
||||
for (let index = 0; index < currentQuery.builder.queryData.length; index++) {
|
||||
const queryData = currentQuery.builder.queryData[index];
|
||||
|
||||
const updatedFunctions = updateFunctions(queryData);
|
||||
queryData.functions = updatedFunctions;
|
||||
handleSetQueryData(index, queryData);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
updateFunctionsBasedOnAlertType();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
detectionMethod,
|
||||
alertDef.condition.target,
|
||||
currentQuery.builder.queryData.length,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
const broadcastToSpecificChannels =
|
||||
(initialValue &&
|
||||
@@ -145,10 +245,22 @@ function FormAlertRules({
|
||||
initialValue.preferredChannels.length > 0) ||
|
||||
isNewRule;
|
||||
|
||||
let ruleType = AlertDetectionTypes.THRESHOLD_ALERT;
|
||||
|
||||
if (initialValue.ruleType) {
|
||||
ruleType = initialValue.ruleType as AlertDetectionTypes;
|
||||
} else if (alertTypeFromURL === AlertDetectionTypes.ANOMALY_DETECTION_ALERT) {
|
||||
ruleType = AlertDetectionTypes.ANOMALY_DETECTION_ALERT;
|
||||
}
|
||||
|
||||
setAlertDef({
|
||||
...initialValue,
|
||||
broadcastToAll: !broadcastToSpecificChannels,
|
||||
ruleType,
|
||||
});
|
||||
|
||||
setDetectionMethod(ruleType);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [initialValue, isNewRule]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -269,7 +381,11 @@ function FormAlertRules({
|
||||
return false;
|
||||
}
|
||||
|
||||
if (alertDef.condition?.target !== 0 && !alertDef.condition?.target) {
|
||||
if (
|
||||
alertDef.ruleType !== AlertDetectionTypes.ANOMALY_DETECTION_ALERT &&
|
||||
alertDef.condition?.target !== 0 &&
|
||||
!alertDef.condition?.target
|
||||
) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('target_missing'),
|
||||
@@ -300,12 +416,15 @@ function FormAlertRules({
|
||||
const postableAlert: AlertDef = {
|
||||
...alertDef,
|
||||
preferredChannels: alertDef.broadcastToAll ? [] : alertDef.preferredChannels,
|
||||
alertType,
|
||||
alertType:
|
||||
alertType === AlertTypes.ANOMALY_BASED_ALERT
|
||||
? AlertTypes.METRICS_BASED_ALERT
|
||||
: alertType,
|
||||
source: window?.location.toString(),
|
||||
ruleType:
|
||||
currentQuery.queryType === EQueryType.PROM
|
||||
? 'promql_rule'
|
||||
: 'threshold_rule',
|
||||
: alertDef.ruleType,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
compositeQuery: {
|
||||
@@ -322,6 +441,12 @@ function FormAlertRules({
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (alertDef.ruleType === AlertDetectionTypes.ANOMALY_DETECTION_ALERT) {
|
||||
postableAlert.condition.algorithm = alertDef.condition.algorithm;
|
||||
postableAlert.condition.seasonality = alertDef.condition.seasonality;
|
||||
}
|
||||
|
||||
return postableAlert;
|
||||
};
|
||||
|
||||
@@ -370,7 +495,10 @@ function FormAlertRules({
|
||||
});
|
||||
|
||||
// invalidate rule in cache
|
||||
ruleCache.invalidateQueries([REACT_QUERY_KEY.ALERT_RULE_DETAILS, ruleId]);
|
||||
ruleCache.invalidateQueries([
|
||||
REACT_QUERY_KEY.ALERT_RULE_DETAILS,
|
||||
`${ruleId}`,
|
||||
]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setTimeout(() => {
|
||||
@@ -415,6 +543,7 @@ function FormAlertRules({
|
||||
queryType: currentQuery.queryType,
|
||||
alertId: postableAlert?.id,
|
||||
alertName: postableAlert?.alert,
|
||||
ruleType: postableAlert?.ruleType,
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
@@ -499,6 +628,7 @@ function FormAlertRules({
|
||||
queryType: currentQuery.queryType,
|
||||
status: statusResponse.status,
|
||||
statusMessage: statusResponse.message,
|
||||
ruleType: postableAlert.ruleType,
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [t, isFormValid, memoizedPreparePostData, notifications]);
|
||||
@@ -582,56 +712,65 @@ function FormAlertRules({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
function handleRedirection(option: AlertTypes): void {
|
||||
let url = '';
|
||||
switch (option) {
|
||||
case AlertTypes.METRICS_BASED_ALERT:
|
||||
url =
|
||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-creation-page#examples';
|
||||
break;
|
||||
case AlertTypes.LOGS_BASED_ALERT:
|
||||
url =
|
||||
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-creation-page#examples';
|
||||
break;
|
||||
case AlertTypes.TRACES_BASED_ALERT:
|
||||
url =
|
||||
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-creation-page#examples';
|
||||
break;
|
||||
case AlertTypes.EXCEPTIONS_BASED_ALERT:
|
||||
url =
|
||||
'https://signoz.io/docs/alerts-management/exceptions-based-alerts/?utm_source=product&utm_medium=alert-creation-page#examples';
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
logEvent('Alert: Check example alert clicked', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
link: url,
|
||||
});
|
||||
window.open(url, '_blank');
|
||||
}
|
||||
const tabs = [
|
||||
{
|
||||
value: AlertDetectionTypes.THRESHOLD_ALERT,
|
||||
label: 'Threshold Alert',
|
||||
},
|
||||
{
|
||||
value: AlertDetectionTypes.ANOMALY_DETECTION_ALERT,
|
||||
label: 'Anomaly Detection Alert',
|
||||
isBeta: true,
|
||||
},
|
||||
];
|
||||
|
||||
const isAnomalyDetectionEnabled =
|
||||
useFeatureFlag(FeatureKeys.ANOMALY_DETECTION)?.active || false;
|
||||
|
||||
return (
|
||||
<>
|
||||
{Element}
|
||||
|
||||
<PanelContainer id="top">
|
||||
<StyledLeftContainer flex="5 1 600px" md={18}>
|
||||
<div id="top">
|
||||
<div className="overview-header">
|
||||
<div className="alert-type-container">
|
||||
{isNewRule && (
|
||||
<Typography.Title level={5} className="alert-type-title">
|
||||
<BellDot size={14} />
|
||||
|
||||
{alertDef.alertType === AlertTypes.ANOMALY_BASED_ALERT &&
|
||||
'Anomaly Detection Alert'}
|
||||
{alertDef.alertType === AlertTypes.METRICS_BASED_ALERT &&
|
||||
'Metrics Based Alert'}
|
||||
{alertDef.alertType === AlertTypes.LOGS_BASED_ALERT &&
|
||||
'Logs Based Alert'}
|
||||
{alertDef.alertType === AlertTypes.TRACES_BASED_ALERT &&
|
||||
'Traces Based Alert'}
|
||||
{alertDef.alertType === AlertTypes.EXCEPTIONS_BASED_ALERT &&
|
||||
'Exceptions Based Alert'}
|
||||
</Typography.Title>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<Button className="periscope-btn" icon={<ExternalLink size={14} />}>
|
||||
Alert Setup Guide
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<MainFormContainer
|
||||
initialValues={initialValue}
|
||||
layout="vertical"
|
||||
form={formInstance}
|
||||
className="main-container"
|
||||
>
|
||||
<div className="chart-preview-container">
|
||||
{currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
||||
renderQBChartPreview()}
|
||||
{currentQuery.queryType === EQueryType.PROM &&
|
||||
renderPromAndChQueryChartPreview()}
|
||||
{currentQuery.queryType === EQueryType.CLICKHOUSE &&
|
||||
renderPromAndChQueryChartPreview()}
|
||||
</div>
|
||||
|
||||
<StepContainer>
|
||||
<BuilderUnitsFilter
|
||||
@@ -640,6 +779,27 @@ function FormAlertRules({
|
||||
/>
|
||||
</StepContainer>
|
||||
|
||||
<div className="steps-container">
|
||||
{alertDef.alertType === AlertTypes.METRICS_BASED_ALERT &&
|
||||
isAnomalyDetectionEnabled && (
|
||||
<div className="detection-method-container">
|
||||
<StepHeading> {t('alert_form_step1')}</StepHeading>
|
||||
|
||||
<Tabs2
|
||||
key={detectionMethod}
|
||||
tabs={tabs}
|
||||
initialSelectedTab={detectionMethod || ''}
|
||||
onSelectTab={handleDetectionMethodChange}
|
||||
/>
|
||||
|
||||
<div className="detection-method-description">
|
||||
{detectionMethod === AlertDetectionTypes.ANOMALY_DETECTION_ALERT
|
||||
? t('anomaly_detection_alert_desc')
|
||||
: t('threshold_alert_desc')}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<QuerySection
|
||||
queryCategory={currentQuery.queryType}
|
||||
setQueryCategory={onQueryCategoryChange}
|
||||
@@ -659,6 +819,7 @@ function FormAlertRules({
|
||||
/>
|
||||
|
||||
{renderBasicInfo()}
|
||||
</div>
|
||||
<ButtonContainer>
|
||||
<Tooltip title={isAlertAvailableToSave ? MESSAGE.ALERT : ''}>
|
||||
<ActionButton
|
||||
@@ -700,38 +861,7 @@ function FormAlertRules({
|
||||
</ActionButton>
|
||||
</ButtonContainer>
|
||||
</MainFormContainer>
|
||||
</StyledLeftContainer>
|
||||
<Col flex="1 1 300px">
|
||||
<UserGuide queryType={currentQuery.queryType} />
|
||||
<div className="info-help-btns">
|
||||
<Button
|
||||
style={{ height: 32 }}
|
||||
onClick={(): void =>
|
||||
handleRedirection(alertDef?.alertType as AlertTypes)
|
||||
}
|
||||
className="doc-redirection-btn"
|
||||
>
|
||||
Check an example alert
|
||||
</Button>
|
||||
<LaunchChatSupport
|
||||
attributes={{
|
||||
alert: alertDef?.alert,
|
||||
alertType: alertDef?.alertType,
|
||||
id: ruleId,
|
||||
ruleType: alertDef?.ruleType,
|
||||
state: (alertDef as any)?.state,
|
||||
panelType,
|
||||
screen: isRuleCreated ? 'Edit Alert' : 'New Alert',
|
||||
}}
|
||||
className="facing-issue-btn"
|
||||
eventName="Alert: Facing Issues in alert"
|
||||
buttonText="Need help with this alert?"
|
||||
message={alertHelpMessage(alertDef, ruleId)}
|
||||
onHoverText="Click here to get help with this alert"
|
||||
/>
|
||||
</div>
|
||||
</Col>
|
||||
</PanelContainer>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
import { Button, Card, Col, Form, Input, Row, Select, Typography } from 'antd';
|
||||
import { Button, Card, Col, Form, Input, Select, Typography } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
const { TextArea } = Input;
|
||||
const { Item } = Form;
|
||||
|
||||
export const PanelContainer = styled(Row)`
|
||||
flex-wrap: nowrap;
|
||||
`;
|
||||
|
||||
export const StyledLeftContainer = styled(Col)`
|
||||
&&& {
|
||||
margin-right: 1rem;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user