Compare commits

..

4 Commits

Author SHA1 Message Date
Prashant Shahi
4192fd573d chore(signoz): 📌 pin versions: SigNoz 0.51.0, SigNoz OtelCollector 0.102.3
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-07-31 20:29:09 +05:30
Prashant Shahi
ca13d80205 Merge branch 'main' into release/v0.51.x 2024-07-31 20:27:51 +05:30
Prashant Shahi
8d84ce8f06 Merge pull request #5509 from SigNoz/release/v0.50.x
Release/v0.50.x
2024-07-17 20:16:19 +05:30
Prashant Shahi
09ea7b9eb5 chore(signoz): 📌 pin versions: SigNoz 0.50.0
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-07-17 19:01:03 +05:30
810 changed files with 9818 additions and 51725 deletions

6
.github/CODEOWNERS vendored
View File

@@ -5,6 +5,6 @@
/frontend/ @YounixM /frontend/ @YounixM
/frontend/src/container/MetricsApplication @srikanthccv /frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv /frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
/deploy/ @SigNoz/devops /deploy/ @prashant-shahi
/sample-apps/ @SigNoz/devops /sample-apps/ @prashant-shahi
.github @SigNoz/devops .github @prashant-shahi

View File

@@ -1,49 +0,0 @@
---
name: Request Dashboard
about: Request a new dashboard for the SigNoz Dashboards repository
title: '[Dashboard Request] '
labels: 'dashboard-template'
assignees: ''
---
<!-- Use this template to request a new dashboard for the SigNoz Dashboards repository. Providing detailed information will help us understand your needs better and speed up the dashboard creation process. -->
## Dashboard Name
<!-- Provide the name for the requested dashboard. Be specific (e.g., "MySQL Monitoring Dashboard"). -->
## Expected Dashboard Sections and Panels
(Can be tweaked (add or remove panels/sections) according to available metrics)
### Section Name
<!-- Brief description of what this section should display (e.g., "Resource usage metrics for MySQL database"). -->
### Panel Name
<!-- Description of the panel (e.g., "Displays current CPU usage, memory usage, etc."). -->
<!-- - **Example:**
- **Section**: Resource Metrics
- **Panel**: CPU Usage - Displays the current CPU usage across all database instances.
- **Panel**: Memory Usage - Displays the total memory used by the MySQL process. -->
<!-- Repeat this format for any additional sections or panels. -->
## Expected Dashboard Variables
<!-- List any dashboard variables that should be included in the dashboard. Examples could be `deployment.environment`, `hostname`, `region`, etc. -->
## Additional Comments or Requirements
<!-- Include any other details, special requirements, or specific visualizations you'd like to request for this dashboard. -->
## References or Screenshots
<!-- Add any references or screenshots of requested dashboard if available. -->
## 📋 Notes
Please review the [CONTRIBUTING.md](https://github.com/SigNoz/dashboards/blob/main/CONTRIBUTING.md) for guidelines on dashboard structure, naming conventions, and how to submit a pull request.

View File

@@ -8,13 +8,6 @@ on:
- release/v* - release/v*
jobs: jobs:
check-no-ee-references:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run check
run: make check-no-ee-references
build-frontend: build-frontend:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@@ -43,6 +36,7 @@ jobs:
run: | run: |
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'CLARITY_PROJECT_ID="${{ secrets.CLARITY_PROJECT_ID }}"' >> frontend/.env
- name: Install dependencies - name: Install dependencies
run: cd frontend && yarn install run: cd frontend && yarn install
- name: Run ESLint - name: Run ESLint

View File

@@ -9,6 +9,7 @@ on:
- v* - v*
jobs: jobs:
image-build-and-push-query-service: image-build-and-push-query-service:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@@ -150,6 +151,7 @@ jobs:
run: | run: |
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'CLARITY_PROJECT_ID="${{ secrets.CLARITY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env

3
.gitignore vendored
View File

@@ -67,6 +67,3 @@ e2e/.auth
# go # go
vendor/ vendor/
**/main/** **/main/**
# git-town
.git-branches.toml

View File

@@ -0,0 +1,7 @@
#!/bin/sh
# It Comments out the Line Query-Service & Frontend Section of deploy/docker/clickhouse-setup/docker-compose.yaml
# Update the Line Numbers when deploy/docker/clickhouse-setup/docker-compose.yaml chnages.
# Docs Ref.: https://github.com/SigNoz/signoz/blob/main/CONTRIBUTING.md#contribute-to-frontend-with-docker-installation-of-signoz
sed -i 38,62's/.*/# &/' .././deploy/docker/clickhouse-setup/docker-compose.yaml

View File

@@ -30,7 +30,6 @@ Also, have a look at these [good first issues label](https://github.com/SigNoz/s
- [To run ClickHouse setup](#41-to-run-clickhouse-setup-recommended-for-local-development) - [To run ClickHouse setup](#41-to-run-clickhouse-setup-recommended-for-local-development)
- [Contribute to SigNoz Helm Chart](#5-contribute-to-signoz-helm-chart-) - [Contribute to SigNoz Helm Chart](#5-contribute-to-signoz-helm-chart-)
- [To run helm chart for local development](#51-to-run-helm-chart-for-local-development) - [To run helm chart for local development](#51-to-run-helm-chart-for-local-development)
- [Contribute to Dashboards](#6-contribute-to-dashboards-)
- [Other Ways to Contribute](#other-ways-to-contribute) - [Other Ways to Contribute](#other-ways-to-contribute)
# 1. General Instructions 📝 # 1. General Instructions 📝
@@ -38,7 +37,7 @@ Also, have a look at these [good first issues label](https://github.com/SigNoz/s
## 1.1 For Creating Issue(s) ## 1.1 For Creating Issue(s)
Before making any significant changes and before filing a new issue, please check [existing open](https://github.com/SigNoz/signoz/issues?q=is%3Aopen+is%3Aissue), or [recently closed](https://github.com/SigNoz/signoz/issues?q=is%3Aissue+is%3Aclosed) issues to make sure somebody else hasn't already reported the issue. Please try to include as much information as you can. Before making any significant changes and before filing a new issue, please check [existing open](https://github.com/SigNoz/signoz/issues?q=is%3Aopen+is%3Aissue), or [recently closed](https://github.com/SigNoz/signoz/issues?q=is%3Aissue+is%3Aclosed) issues to make sure somebody else hasn't already reported the issue. Please try to include as much information as you can.
**Issue Types** - [Bug Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=) | [Feature Request](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=) | [Performance Issue Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=performance-issue-report.md&title=) | [Request Dashboard](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=dashboard-template&projects=&template=request_dashboard.md&title=%5BDashboard+Request%5D+) | [Report a Security Vulnerability](https://github.com/SigNoz/signoz/security/policy) **Issue Types** - [Bug Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=) | [Feature Request](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=) | [Performance Issue Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=performance-issue-report.md&title=) | [Report a Security Vulnerability](https://github.com/SigNoz/signoz/security/policy)
#### Details like these are incredibly useful: #### Details like these are incredibly useful:
@@ -57,7 +56,7 @@ Before making any significant changes and before filing a new issue, please chec
Discussing your proposed changes ahead of time will make the contribution Discussing your proposed changes ahead of time will make the contribution
process smooth for everyone 🙌. process smooth for everyone 🙌.
**[`^top^`](#contributing-guidelines)** **[`^top^`](#)**
<hr> <hr>
@@ -98,14 +97,13 @@ GitHub provides additional document on [forking a repository](https://help.githu
stability and quality of the component. stability and quality of the component.
You can always reach out to `ankit@signoz.io` to understand more about the repo and product. We are very responsive over email and [slack community](https://signoz.io/slack). You can always reach out to `ankit@signoz.io` to understand more about the repo and product. We are very responsive over email and [SLACK](https://signoz.io/slack).
### Pointers: ### Pointers:
- If you find any **bugs** → please create an [**issue.**](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=) - If you find any **bugs** → please create an [**issue.**](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=)
- If you find anything **missing** in documentation → you can create an issue with the label **`documentation`**. - If you find anything **missing** in documentation → you can create an issue with the label **`documentation`**.
- If you want to build any **new feature** → please create an [issue with the label **`enhancement`**.](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=) - If you want to build any **new feature** → please create an [issue with the label **`enhancement`**.](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=)
- If you want to **discuss** something about the product, start a new [**discussion**.](https://github.com/SigNoz/signoz/discussions) - If you want to **discuss** something about the product, start a new [**discussion**.](https://github.com/SigNoz/signoz/discussions)
- If you want to request a new **dashboard template** → please create an issue [here](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=dashboard-template&projects=&template=request_dashboard.md&title=%5BDashboard+Request%5D+).
<hr> <hr>
@@ -119,7 +117,7 @@ e.g. If you are submitting a fix for an issue in frontend, the PR name should be
- Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :) - Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :)
**[`^top^`](#contributing-guidelines)** **[`^top^`](#)**
<hr> <hr>
@@ -129,13 +127,14 @@ e.g. If you are submitting a fix for an issue in frontend, the PR name should be
- [**Frontend**](#3-develop-frontend-) (Written in Typescript, React) - [**Frontend**](#3-develop-frontend-) (Written in Typescript, React)
- [**Backend**](#4-contribute-to-backend-query-service-) (Query Service, written in Go) - [**Backend**](#4-contribute-to-backend-query-service-) (Query Service, written in Go)
- [**Dashboard Templates**](#6-contribute-to-dashboards-) (JSON dashboard templates built with SigNoz)
Depending upon your area of expertise & interest, you can choose one or more to contribute. Below are detailed instructions to contribute in each area. Depending upon your area of expertise & interest, you can choose one or more to contribute. Below are detailed instructions to contribute in each area.
**Please note:** If you want to work on an issue, please add a brief description of your solution on the issue before starting work on it. **Please note:** If you want to work on an issue, please ask the maintainers to assign the issue to you before starting work on it. This would help us understand who is working on an issue and prevent duplicate work. 🙏🏻
**[`^top^`](#contributing-guidelines)** ⚠️ If you just raise a PR, without the corresponding issue being assigned to you - it may not be accepted.
**[`^top^`](#)**
<hr> <hr>
@@ -189,7 +188,7 @@ Also, have a look at [Frontend README.md](https://github.com/SigNoz/signoz/blob/
### Important Notes: ### Important Notes:
The Maintainers / Contributors who will change Line Numbers of `Frontend` & `Query-Section`, please update line numbers in [`/.scripts/commentLinesForSetup.sh`](https://github.com/SigNoz/signoz/blob/develop/.scripts/commentLinesForSetup.sh) The Maintainers / Contributors who will change Line Numbers of `Frontend` & `Query-Section`, please update line numbers in [`/.scripts/commentLinesForSetup.sh`](https://github.com/SigNoz/signoz/blob/develop/.scripts/commentLinesForSetup.sh)
**[`^top^`](#contributing-guidelines)** **[`^top^`](#)**
## 3.2 Contribute to Frontend without installing SigNoz backend ## 3.2 Contribute to Frontend without installing SigNoz backend
@@ -210,7 +209,7 @@ Please ping us in the [`#contributing`](https://signoz-community.slack.com/archi
**Frontend should now be accessible at** [`http://localhost:3301/services`](http://localhost:3301/services) **Frontend should now be accessible at** [`http://localhost:3301/services`](http://localhost:3301/services)
**[`^top^`](#contributing-guidelines)** **[`^top^`](#)**
<hr> <hr>
@@ -310,7 +309,7 @@ Click the button below. A workspace with all required environments will be creat
> To use it on your forked repo, edit the 'Open in Gitpod' button URL to `https://gitpod.io/#https://github.com/<your-github-username>/signoz` --> > To use it on your forked repo, edit the 'Open in Gitpod' button URL to `https://gitpod.io/#https://github.com/<your-github-username>/signoz` -->
**[`^top^`](#contributing-guidelines)** **[`^top^`](#)**
<hr> <hr>
@@ -366,21 +365,10 @@ curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-
| HOTROD_NAMESPACE=sample-application bash | HOTROD_NAMESPACE=sample-application bash
``` ```
**[`^top^`](#contributing-guidelines)** **[`^top^`](#)**
--- ---
# 6. Contribute to Dashboards 📈
**Need to Update: [https://github.com/SigNoz/dashboards](https://github.com/SigNoz/dashboards)**
To contribute a new dashboard template for any service, follow the contribution guidelines in the [Dashboard Contributing Guide](https://github.com/SigNoz/dashboards/blob/main/CONTRIBUTING.md). In brief:
1. Create a dashboard JSON file.
2. Add a README file explaining the dashboard, the metrics ingested, and the configurations needed.
3. Include screenshots of the dashboard in the `assets/` directory.
4. Submit a pull request for review.
## Other Ways to Contribute ## Other Ways to Contribute
There are many other ways to get involved with the community and to participate in this project: There are many other ways to get involved with the community and to participate in this project:
@@ -391,6 +379,7 @@ There are many other ways to get involved with the community and to participate
- Help answer questions on forums such as Stack Overflow and [SigNoz Community Slack Channel](https://signoz.io/slack). - Help answer questions on forums such as Stack Overflow and [SigNoz Community Slack Channel](https://signoz.io/slack).
- Tell others about the project on Twitter, your blog, etc. - Tell others about the project on Twitter, your blog, etc.
Again, Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :) Again, Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :)
Thank You! Thank You!

View File

@@ -178,15 +178,6 @@ clear-swarm-ch:
@docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \ @docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \
sh -c "cd /pwd && rm -rf clickhouse*/* zookeeper-*/*" sh -c "cd /pwd && rm -rf clickhouse*/* zookeeper-*/*"
check-no-ee-references:
@echo "Checking for 'ee' package references in 'pkg' directory..."
@if grep -R --include="*.go" '.*/ee/.*' pkg/; then \
echo "Error: Found references to 'ee' packages in 'pkg' directory"; \
exit 1; \
else \
echo "No references to 'ee' packages found in 'pkg' directory"; \
fi
test: test:
go test ./pkg/query-service/app/metrics/... go test ./pkg/query-service/app/metrics/...
go test ./pkg/query-service/cache/... go test ./pkg/query-service/cache/...

View File

@@ -23,9 +23,6 @@
[1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114 [1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114
--> -->
<level>information</level> <level>information</level>
<formatting>
<type>json</type>
</formatting>
<log>/var/log/clickhouse-server/clickhouse-server.log</log> <log>/var/log/clickhouse-server/clickhouse-server.log</log>
<errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog> <errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog>
<!-- Rotation policy <!-- Rotation policy
@@ -652,12 +649,12 @@
See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/#creating-replicated-tables See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/#creating-replicated-tables
--> -->
<!--
<macros> <macros>
<shard>01</shard> <shard>01</shard>
<replica>example01-01-1</replica> <replica>example01-01-1</replica>
</macros> </macros>
-->
<!-- Reloading interval for embedded dictionaries, in seconds. Default: 3600. --> <!-- Reloading interval for embedded dictionaries, in seconds. Default: 3600. -->

View File

@@ -146,11 +146,11 @@ services:
condition: on-failure condition: on-failure
query-service: query-service:
image: signoz/query-service:0.55.0 image: signoz/query-service:0.51.0
command: command:
[ [
"-config=/root/config/prometheus.yml", "-config=/root/config/prometheus.yml",
"--use-logs-new-schema=true" # "--prefer-delta=true"
] ]
# ports: # ports:
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
@@ -186,7 +186,7 @@ services:
<<: *db-depend <<: *db-depend
frontend: frontend:
image: signoz/frontend:0.55.0 image: signoz/frontend:0.48.0
deploy: deploy:
restart_policy: restart_policy:
condition: on-failure condition: on-failure
@@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:0.102.10 image: signoz/signoz-otel-collector:0.102.3
command: command:
[ [
"--config=/etc/otel-collector-config.yaml", "--config=/etc/otel-collector-config.yaml",
@@ -211,7 +211,6 @@ services:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro - /var/lib/docker/containers:/var/lib/docker/containers:ro
- /:/hostfs:ro
environment: environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}},dockerswarm.service.name={{.Service.Name}},dockerswarm.task.name={{.Task.Name}} - OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}},dockerswarm.service.name={{.Service.Name}},dockerswarm.task.name={{.Task.Name}}
- DOCKER_MULTI_NODE_CLUSTER=false - DOCKER_MULTI_NODE_CLUSTER=false
@@ -238,7 +237,7 @@ services:
- query-service - query-service
otel-collector-migrator: otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.102.10 image: signoz/signoz-schema-migrator:0.102.3
deploy: deploy:
restart_policy: restart_policy:
condition: on-failure condition: on-failure

View File

@@ -36,7 +36,6 @@ receivers:
# endpoint: 0.0.0.0:6832 # endpoint: 0.0.0.0:6832
hostmetrics: hostmetrics:
collection_interval: 30s collection_interval: 30s
root_path: /hostfs
scrapers: scrapers:
cpu: {} cpu: {}
load: {} load: {}
@@ -144,7 +143,6 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs dsn: tcp://clickhouse:9000/signoz_logs
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER} docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
timeout: 10s timeout: 10s
use_new_schema: true
extensions: extensions:
health_check: health_check:
endpoint: 0.0.0.0:13133 endpoint: 0.0.0.0:13133
@@ -155,8 +153,6 @@ extensions:
service: service:
telemetry: telemetry:
logs:
encoding: json
metrics: metrics:
address: 0.0.0.0:8888 address: 0.0.0.0:8888
extensions: [health_check, zpages, pprof] extensions: [health_check, zpages, pprof]

View File

@@ -23,9 +23,6 @@
[1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114 [1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114
--> -->
<level>information</level> <level>information</level>
<formatting>
<type>json</type>
</formatting>
<log>/var/log/clickhouse-server/clickhouse-server.log</log> <log>/var/log/clickhouse-server/clickhouse-server.log</log>
<errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog> <errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog>
<!-- Rotation policy <!-- Rotation policy
@@ -652,12 +649,12 @@
See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/#creating-replicated-tables See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/#creating-replicated-tables
--> -->
<!--
<macros> <macros>
<shard>01</shard> <shard>01</shard>
<replica>example01-01-1</replica> <replica>example01-01-1</replica>
</macros> </macros>
-->
<!-- Reloading interval for embedded dictionaries, in seconds. Default: 3600. --> <!-- Reloading interval for embedded dictionaries, in seconds. Default: 3600. -->

View File

@@ -66,7 +66,7 @@ services:
- --storage.path=/data - --storage.path=/data
otel-collector-migrator: otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.10} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3}
container_name: otel-migrator container_name: otel-migrator
command: command:
- "--dsn=tcp://clickhouse:9000" - "--dsn=tcp://clickhouse:9000"
@@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector: otel-collector:
container_name: signoz-otel-collector container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.102.10 image: signoz/signoz-otel-collector:0.102.3
command: command:
[ [
"--config=/etc/otel-collector-config.yaml", "--config=/etc/otel-collector-config.yaml",
@@ -93,8 +93,6 @@ services:
volumes: volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /:/hostfs:ro
environment: environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
ports: ports:

View File

@@ -25,7 +25,7 @@ services:
command: command:
[ [
"-config=/root/config/prometheus.yml", "-config=/root/config/prometheus.yml",
"--use-logs-new-schema=true" # "--prefer-delta=true"
] ]
ports: ports:
- "6060:6060" - "6060:6060"

View File

@@ -164,13 +164,13 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service: query-service:
image: signoz/query-service:${DOCKER_TAG:-0.55.0} image: signoz/query-service:${DOCKER_TAG:-0.51.0}
container_name: signoz-query-service container_name: signoz-query-service
command: command:
[ [
"-config=/root/config/prometheus.yml", "-config=/root/config/prometheus.yml",
"-gateway-url=https://api.staging.signoz.cloud", "-gateway-url=https://api.staging.signoz.cloud"
"--use-logs-new-schema=true" # "--prefer-delta=true"
] ]
# ports: # ports:
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
@@ -204,7 +204,7 @@ services:
<<: *db-depend <<: *db-depend
frontend: frontend:
image: signoz/frontend:${DOCKER_TAG:-0.55.0} image: signoz/frontend:${DOCKER_TAG:-0.51.0}
container_name: signoz-frontend container_name: signoz-frontend
restart: on-failure restart: on-failure
depends_on: depends_on:
@@ -216,7 +216,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator: otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.10} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3}
container_name: otel-migrator container_name: otel-migrator
command: command:
- "--dsn=tcp://clickhouse:9000" - "--dsn=tcp://clickhouse:9000"
@@ -230,7 +230,7 @@ services:
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.10} image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.3}
container_name: signoz-otel-collector container_name: signoz-otel-collector
command: command:
[ [
@@ -244,7 +244,6 @@ services:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro - /var/lib/docker/containers:/var/lib/docker/containers:ro
- /:/hostfs:ro
environment: environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
- DOCKER_MULTI_NODE_CLUSTER=false - DOCKER_MULTI_NODE_CLUSTER=false

View File

@@ -164,12 +164,12 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service: query-service:
image: signoz/query-service:${DOCKER_TAG:-0.55.0} image: signoz/query-service:${DOCKER_TAG:-0.51.0}
container_name: signoz-query-service container_name: signoz-query-service
command: command:
[ [
"-config=/root/config/prometheus.yml", "-config=/root/config/prometheus.yml"
"--use-logs-new-schema=true" # "--prefer-delta=true"
] ]
# ports: # ports:
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
@@ -203,7 +203,7 @@ services:
<<: *db-depend <<: *db-depend
frontend: frontend:
image: signoz/frontend:${DOCKER_TAG:-0.55.0} image: signoz/frontend:${DOCKER_TAG:-0.51.0}
container_name: signoz-frontend container_name: signoz-frontend
restart: on-failure restart: on-failure
depends_on: depends_on:
@@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator: otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.10} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3}
container_name: otel-migrator container_name: otel-migrator
command: command:
- "--dsn=tcp://clickhouse:9000" - "--dsn=tcp://clickhouse:9000"
@@ -229,7 +229,7 @@ services:
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.10} image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.3}
container_name: signoz-otel-collector container_name: signoz-otel-collector
command: command:
[ [
@@ -243,7 +243,6 @@ services:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro - /var/lib/docker/containers:/var/lib/docker/containers:ro
- /:/hostfs:ro
environment: environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
- DOCKER_MULTI_NODE_CLUSTER=false - DOCKER_MULTI_NODE_CLUSTER=false

View File

@@ -36,7 +36,6 @@ receivers:
# endpoint: 0.0.0.0:6832 # endpoint: 0.0.0.0:6832
hostmetrics: hostmetrics:
collection_interval: 30s collection_interval: 30s
root_path: /hostfs
scrapers: scrapers:
cpu: {} cpu: {}
load: {} load: {}
@@ -154,13 +153,10 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs dsn: tcp://clickhouse:9000/signoz_logs
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER} docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
timeout: 10s timeout: 10s
use_new_schema: true
# logging: {} # logging: {}
service: service:
telemetry: telemetry:
logs:
encoding: json
metrics: metrics:
address: 0.0.0.0:8888 address: 0.0.0.0:8888
extensions: extensions:

View File

@@ -1,8 +1,3 @@
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
server { server {
listen 3301; listen 3301;
server_name _; server_name _;
@@ -47,14 +42,6 @@ server {
proxy_read_timeout 600s; proxy_read_timeout 600s;
} }
location /ws {
proxy_pass http://query-service:8080/ws;
proxy_http_version 1.1;
proxy_set_header Upgrade "websocket";
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
}
# redirect server error pages to the static page /50x.html # redirect server error pages to the static page /50x.html
# #
error_page 500 502 503 504 /50x.html; error_page 500 502 503 504 /50x.html;

View File

@@ -1,44 +0,0 @@
package anomaly
import (
"context"
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
)
type DailyProvider struct {
BaseSeasonalProvider
}
var _ BaseProvider = (*DailyProvider)(nil)
func (dp *DailyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
return &dp.BaseSeasonalProvider
}
// NewDailyProvider uses the same generic option type
func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvider {
dp := &DailyProvider{
BaseSeasonalProvider: BaseSeasonalProvider{},
}
for _, opt := range opts {
opt(dp)
}
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
Reader: dp.reader,
Cache: dp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: dp.fluxInterval,
FeatureLookup: dp.ff,
})
return dp
}
func (p *DailyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
req.Seasonality = SeasonalityDaily
return p.getAnomalies(ctx, req)
}

View File

@@ -1,44 +0,0 @@
package anomaly
import (
"context"
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
)
type HourlyProvider struct {
BaseSeasonalProvider
}
var _ BaseProvider = (*HourlyProvider)(nil)
func (hp *HourlyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
return &hp.BaseSeasonalProvider
}
// NewHourlyProvider now uses the generic option type
func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyProvider {
hp := &HourlyProvider{
BaseSeasonalProvider: BaseSeasonalProvider{},
}
for _, opt := range opts {
opt(hp)
}
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
Reader: hp.reader,
Cache: hp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: hp.fluxInterval,
FeatureLookup: hp.ff,
})
return hp
}
func (p *HourlyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
req.Seasonality = SeasonalityHourly
return p.getAnomalies(ctx, req)
}

View File

@@ -1,248 +0,0 @@
package anomaly
import (
"math"
"time"
"go.signoz.io/signoz/pkg/query-service/common"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
type Seasonality string
const (
SeasonalityHourly Seasonality = "hourly"
SeasonalityDaily Seasonality = "daily"
SeasonalityWeekly Seasonality = "weekly"
)
func (s Seasonality) String() string {
return string(s)
}
var (
oneWeekOffset = 24 * 7 * time.Hour.Milliseconds()
oneDayOffset = 24 * time.Hour.Milliseconds()
oneHourOffset = time.Hour.Milliseconds()
fiveMinOffset = 5 * time.Minute.Milliseconds()
)
func (s Seasonality) IsValid() bool {
switch s {
case SeasonalityHourly, SeasonalityDaily, SeasonalityWeekly:
return true
default:
return false
}
}
type GetAnomaliesRequest struct {
Params *v3.QueryRangeParamsV3
Seasonality Seasonality
}
type GetAnomaliesResponse struct {
Results []*v3.Result
}
// anomalyParams is the params for anomaly detection
// prediction = avg(past_period_query) + avg(current_season_query) - mean(past_season_query, past2_season_query, past3_season_query)
//
// ^ ^
// | |
// (rounded value for past peiod) + (seasonal growth)
//
// score = abs(value - prediction) / stddev (current_season_query)
type anomalyQueryParams struct {
// CurrentPeriodQuery is the query range params for period user is looking at or eval window
// Example: (now-5m, now), (now-30m, now), (now-1h, now)
// The results obtained from this query are used to compare with predicted values
// and to detect anomalies
CurrentPeriodQuery *v3.QueryRangeParamsV3
// PastPeriodQuery is the query range params for past seasonal period
// Example: For weekly seasonality, (now-1w-5m, now-1w)
// : For daily seasonality, (now-1d-5m, now-1d)
// : For hourly seasonality, (now-1h-5m, now-1h)
PastPeriodQuery *v3.QueryRangeParamsV3
// CurrentSeasonQuery is the query range params for current period (seasonal)
// Example: For weekly seasonality, this is the query range params for the (now-1w-5m, now)
// : For daily seasonality, this is the query range params for the (now-1d-5m, now)
// : For hourly seasonality, this is the query range params for the (now-1h-5m, now)
CurrentSeasonQuery *v3.QueryRangeParamsV3
// PastSeasonQuery is the query range params for past seasonal period to the current season
// Example: For weekly seasonality, this is the query range params for the (now-2w-5m, now-1w)
// : For daily seasonality, this is the query range params for the (now-2d-5m, now-1d)
// : For hourly seasonality, this is the query range params for the (now-2h-5m, now-1h)
PastSeasonQuery *v3.QueryRangeParamsV3
// Past2SeasonQuery is the query range params for past 2 seasonal period to the current season
// Example: For weekly seasonality, this is the query range params for the (now-3w-5m, now-2w)
// : For daily seasonality, this is the query range params for the (now-3d-5m, now-2d)
// : For hourly seasonality, this is the query range params for the (now-3h-5m, now-2h)
Past2SeasonQuery *v3.QueryRangeParamsV3
// Past3SeasonQuery is the query range params for past 3 seasonal period to the current season
// Example: For weekly seasonality, this is the query range params for the (now-4w-5m, now-3w)
// : For daily seasonality, this is the query range params for the (now-4d-5m, now-3d)
// : For hourly seasonality, this is the query range params for the (now-4h-5m, now-3h)
Past3SeasonQuery *v3.QueryRangeParamsV3
}
func updateStepInterval(req *v3.QueryRangeParamsV3) {
start := req.Start
end := req.End
req.Step = int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60))
for _, q := range req.CompositeQuery.BuilderQueries {
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
if minStep := common.MinAllowedStepInterval(start, end); q.StepInterval < minStep {
q.StepInterval = minStep
}
}
}
func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonality) *anomalyQueryParams {
start := req.Start
end := req.End
currentPeriodQuery := &v3.QueryRangeParamsV3{
Start: start,
End: end,
CompositeQuery: req.CompositeQuery.Clone(),
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(currentPeriodQuery)
var pastPeriodStart, pastPeriodEnd int64
switch seasonality {
// for one week period, we fetch the data from the past week with 5 min offset
case SeasonalityWeekly:
pastPeriodStart = start - oneWeekOffset - fiveMinOffset
pastPeriodEnd = end - oneWeekOffset
// for one day period, we fetch the data from the past day with 5 min offset
case SeasonalityDaily:
pastPeriodStart = start - oneDayOffset - fiveMinOffset
pastPeriodEnd = end - oneDayOffset
// for one hour period, we fetch the data from the past hour with 5 min offset
case SeasonalityHourly:
pastPeriodStart = start - oneHourOffset - fiveMinOffset
pastPeriodEnd = end - oneHourOffset
}
pastPeriodQuery := &v3.QueryRangeParamsV3{
Start: pastPeriodStart,
End: pastPeriodEnd,
CompositeQuery: req.CompositeQuery.Clone(),
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(pastPeriodQuery)
// seasonality growth trend
var currentGrowthPeriodStart, currentGrowthPeriodEnd int64
switch seasonality {
case SeasonalityWeekly:
currentGrowthPeriodStart = start - oneWeekOffset
currentGrowthPeriodEnd = end
case SeasonalityDaily:
currentGrowthPeriodStart = start - oneDayOffset
currentGrowthPeriodEnd = end
case SeasonalityHourly:
currentGrowthPeriodStart = start - oneHourOffset
currentGrowthPeriodEnd = end
}
currentGrowthQuery := &v3.QueryRangeParamsV3{
Start: currentGrowthPeriodStart,
End: currentGrowthPeriodEnd,
CompositeQuery: req.CompositeQuery.Clone(),
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(currentGrowthQuery)
var pastGrowthPeriodStart, pastGrowthPeriodEnd int64
switch seasonality {
case SeasonalityWeekly:
pastGrowthPeriodStart = start - 2*oneWeekOffset
pastGrowthPeriodEnd = start - 1*oneWeekOffset
case SeasonalityDaily:
pastGrowthPeriodStart = start - 2*oneDayOffset
pastGrowthPeriodEnd = start - 1*oneDayOffset
case SeasonalityHourly:
pastGrowthPeriodStart = start - 2*oneHourOffset
pastGrowthPeriodEnd = start - 1*oneHourOffset
}
pastGrowthQuery := &v3.QueryRangeParamsV3{
Start: pastGrowthPeriodStart,
End: pastGrowthPeriodEnd,
CompositeQuery: req.CompositeQuery.Clone(),
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(pastGrowthQuery)
var past2GrowthPeriodStart, past2GrowthPeriodEnd int64
switch seasonality {
case SeasonalityWeekly:
past2GrowthPeriodStart = start - 3*oneWeekOffset
past2GrowthPeriodEnd = start - 2*oneWeekOffset
case SeasonalityDaily:
past2GrowthPeriodStart = start - 3*oneDayOffset
past2GrowthPeriodEnd = start - 2*oneDayOffset
case SeasonalityHourly:
past2GrowthPeriodStart = start - 3*oneHourOffset
past2GrowthPeriodEnd = start - 2*oneHourOffset
}
past2GrowthQuery := &v3.QueryRangeParamsV3{
Start: past2GrowthPeriodStart,
End: past2GrowthPeriodEnd,
CompositeQuery: req.CompositeQuery.Clone(),
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(past2GrowthQuery)
var past3GrowthPeriodStart, past3GrowthPeriodEnd int64
switch seasonality {
case SeasonalityWeekly:
past3GrowthPeriodStart = start - 4*oneWeekOffset
past3GrowthPeriodEnd = start - 3*oneWeekOffset
case SeasonalityDaily:
past3GrowthPeriodStart = start - 4*oneDayOffset
past3GrowthPeriodEnd = start - 3*oneDayOffset
case SeasonalityHourly:
past3GrowthPeriodStart = start - 4*oneHourOffset
past3GrowthPeriodEnd = start - 3*oneHourOffset
}
past3GrowthQuery := &v3.QueryRangeParamsV3{
Start: past3GrowthPeriodStart,
End: past3GrowthPeriodEnd,
CompositeQuery: req.CompositeQuery.Clone(),
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(past3GrowthQuery)
return &anomalyQueryParams{
CurrentPeriodQuery: currentPeriodQuery,
PastPeriodQuery: pastPeriodQuery,
CurrentSeasonQuery: currentGrowthQuery,
PastSeasonQuery: pastGrowthQuery,
Past2SeasonQuery: past2GrowthQuery,
Past3SeasonQuery: past3GrowthQuery,
}
}
type anomalyQueryResults struct {
CurrentPeriodResults []*v3.Result
PastPeriodResults []*v3.Result
CurrentSeasonResults []*v3.Result
PastSeasonResults []*v3.Result
Past2SeasonResults []*v3.Result
Past3SeasonResults []*v3.Result
}

View File

@@ -1,9 +0,0 @@
package anomaly
import (
"context"
)
type Provider interface {
GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
}

View File

@@ -1,466 +0,0 @@
package anomaly
import (
"context"
"math"
"time"
"go.signoz.io/signoz/pkg/query-service/cache"
"go.signoz.io/signoz/pkg/query-service/interfaces"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/postprocess"
"go.signoz.io/signoz/pkg/query-service/utils/labels"
"go.uber.org/zap"
)
var (
// TODO(srikanthccv): make this configurable?
movingAvgWindowSize = 7
)
// BaseProvider is an interface that includes common methods for all provider types
type BaseProvider interface {
GetBaseSeasonalProvider() *BaseSeasonalProvider
}
// GenericProviderOption is a generic type for provider options
type GenericProviderOption[T BaseProvider] func(T)
func WithCache[T BaseProvider](cache cache.Cache) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().cache = cache
}
}
func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().keyGenerator = keyGenerator
}
}
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().ff = ff
}
}
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().reader = reader
}
}
type BaseSeasonalProvider struct {
querierV2 interfaces.Querier
reader interfaces.Reader
fluxInterval time.Duration
cache cache.Cache
keyGenerator cache.KeyGenerator
ff interfaces.FeatureLookup
}
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
if !req.Seasonality.IsValid() {
req.Seasonality = SeasonalityDaily
}
return prepareAnomalyQueryParams(req.Params, req.Seasonality)
}
func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQueryParams) (*anomalyQueryResults, error) {
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery)
if err != nil {
return nil, err
}
currentPeriodResults, err = postprocess.PostProcessResult(currentPeriodResults, params.CurrentPeriodQuery)
if err != nil {
return nil, err
}
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery)
if err != nil {
return nil, err
}
pastPeriodResults, err = postprocess.PostProcessResult(pastPeriodResults, params.PastPeriodQuery)
if err != nil {
return nil, err
}
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery)
if err != nil {
return nil, err
}
currentSeasonResults, err = postprocess.PostProcessResult(currentSeasonResults, params.CurrentSeasonQuery)
if err != nil {
return nil, err
}
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery)
if err != nil {
return nil, err
}
pastSeasonResults, err = postprocess.PostProcessResult(pastSeasonResults, params.PastSeasonQuery)
if err != nil {
return nil, err
}
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past2SeasonQuery)
if err != nil {
return nil, err
}
past2SeasonResults, err = postprocess.PostProcessResult(past2SeasonResults, params.Past2SeasonQuery)
if err != nil {
return nil, err
}
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past3SeasonQuery)
if err != nil {
return nil, err
}
past3SeasonResults, err = postprocess.PostProcessResult(past3SeasonResults, params.Past3SeasonQuery)
if err != nil {
return nil, err
}
return &anomalyQueryResults{
CurrentPeriodResults: currentPeriodResults,
PastPeriodResults: pastPeriodResults,
CurrentSeasonResults: currentSeasonResults,
PastSeasonResults: pastSeasonResults,
Past2SeasonResults: past2SeasonResults,
Past3SeasonResults: past3SeasonResults,
}, nil
}
// getMatchingSeries gets the matching series from the query result
// for the given series
func (p *BaseSeasonalProvider) getMatchingSeries(queryResult *v3.Result, series *v3.Series) *v3.Series {
if queryResult == nil || len(queryResult.Series) == 0 {
return nil
}
for _, curr := range queryResult.Series {
currLabels := labels.FromMap(curr.Labels)
seriesLabels := labels.FromMap(series.Labels)
if currLabels.Hash() == seriesLabels.Hash() {
return curr
}
}
return nil
}
func (p *BaseSeasonalProvider) getAvg(series *v3.Series) float64 {
if series == nil || len(series.Points) == 0 {
return 0
}
var sum float64
for _, smpl := range series.Points {
sum += smpl.Value
}
return sum / float64(len(series.Points))
}
func (p *BaseSeasonalProvider) getStdDev(series *v3.Series) float64 {
if series == nil || len(series.Points) == 0 {
return 0
}
avg := p.getAvg(series)
var sum float64
for _, smpl := range series.Points {
sum += math.Pow(smpl.Value-avg, 2)
}
return math.Sqrt(sum / float64(len(series.Points)))
}
// getMovingAvg gets the moving average for the given series
// for the given window size and start index
func (p *BaseSeasonalProvider) getMovingAvg(series *v3.Series, movingAvgWindowSize, startIdx int) float64 {
if series == nil || len(series.Points) == 0 {
return 0
}
if startIdx >= len(series.Points)-movingAvgWindowSize {
startIdx = int(math.Max(0, float64(len(series.Points)-movingAvgWindowSize)))
}
var sum float64
points := series.Points[startIdx:]
for i := 0; i < movingAvgWindowSize && i < len(points); i++ {
sum += points[i].Value
}
avg := sum / float64(movingAvgWindowSize)
return avg
}
func (p *BaseSeasonalProvider) getMean(floats ...float64) float64 {
if len(floats) == 0 {
return 0
}
var sum float64
for _, f := range floats {
sum += f
}
return sum / float64(len(floats))
}
func (p *BaseSeasonalProvider) getPredictedSeries(
series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series,
) *v3.Series {
predictedSeries := &v3.Series{
Labels: series.Labels,
LabelsArray: series.LabelsArray,
Points: []v3.Point{},
}
// for each point in the series, get the predicted value
// the predicted value is the moving average (with window size = 7) of the previous period series
// plus the average of the current season series
// minus the mean of the past season series, past2 season series and past3 season series
for idx, curr := range series.Points {
predictedValue :=
p.getMovingAvg(prevSeries, movingAvgWindowSize, idx) +
p.getAvg(currentSeasonSeries) -
p.getMean(p.getAvg(pastSeasonSeries), p.getAvg(past2SeasonSeries), p.getAvg(past3SeasonSeries))
if predictedValue < 0 {
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
}
zap.L().Info("predictedSeries",
zap.Float64("movingAvg", p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)),
zap.Float64("avg", p.getAvg(currentSeasonSeries)),
zap.Float64("mean", p.getMean(p.getAvg(pastSeasonSeries), p.getAvg(past2SeasonSeries), p.getAvg(past3SeasonSeries))),
zap.Any("labels", series.Labels),
zap.Float64("predictedValue", predictedValue),
)
predictedSeries.Points = append(predictedSeries.Points, v3.Point{
Timestamp: curr.Timestamp,
Value: predictedValue,
})
}
return predictedSeries
}
// getBounds gets the upper and lower bounds for the given series
// for the given z score threshold
// moving avg of the previous period series + z score threshold * std dev of the series
// moving avg of the previous period series - z score threshold * std dev of the series
func (p *BaseSeasonalProvider) getBounds(
series, predictedSeries *v3.Series,
zScoreThreshold float64,
) (*v3.Series, *v3.Series) {
upperBoundSeries := &v3.Series{
Labels: series.Labels,
LabelsArray: series.LabelsArray,
Points: []v3.Point{},
}
lowerBoundSeries := &v3.Series{
Labels: series.Labels,
LabelsArray: series.LabelsArray,
Points: []v3.Point{},
}
for idx, curr := range series.Points {
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(series)
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(series)
upperBoundSeries.Points = append(upperBoundSeries.Points, v3.Point{
Timestamp: curr.Timestamp,
Value: upperBound,
})
lowerBoundSeries.Points = append(lowerBoundSeries.Points, v3.Point{
Timestamp: curr.Timestamp,
Value: math.Max(lowerBound, 0),
})
}
return upperBoundSeries, lowerBoundSeries
}
// getExpectedValue gets the expected value for the given series
// for the given index
// prevSeriesAvg + currentSeasonSeriesAvg - mean of past season series, past2 season series and past3 season series
func (p *BaseSeasonalProvider) getExpectedValue(
_, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, idx int,
) float64 {
prevSeriesAvg := p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
return prevSeriesAvg + currentSeasonSeriesAvg - p.getMean(pastSeasonSeriesAvg, past2SeasonSeriesAvg, past3SeasonSeriesAvg)
}
// getScore gets the anomaly score for the given series
// for the given index
// (value - expectedValue) / std dev of the series
func (p *BaseSeasonalProvider) getScore(
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
) float64 {
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
return (value - expectedValue) / p.getStdDev(weekSeries)
}
// getAnomalyScores gets the anomaly scores for the given series
// for the given index
// (value - expectedValue) / std dev of the series
func (p *BaseSeasonalProvider) getAnomalyScores(
series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series,
) *v3.Series {
anomalyScoreSeries := &v3.Series{
Labels: series.Labels,
LabelsArray: series.LabelsArray,
Points: []v3.Point{},
}
for idx, curr := range series.Points {
anomalyScore := p.getScore(series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries, curr.Value, idx)
anomalyScoreSeries.Points = append(anomalyScoreSeries.Points, v3.Point{
Timestamp: curr.Timestamp,
Value: anomalyScore,
})
}
return anomalyScoreSeries
}
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
anomalyParams := p.getQueryParams(req)
anomalyQueryResults, err := p.getResults(ctx, anomalyParams)
if err != nil {
return nil, err
}
currentPeriodResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.CurrentPeriodResults {
currentPeriodResultsMap[result.QueryName] = result
}
pastPeriodResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.PastPeriodResults {
pastPeriodResultsMap[result.QueryName] = result
}
currentSeasonResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.CurrentSeasonResults {
currentSeasonResultsMap[result.QueryName] = result
}
pastSeasonResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.PastSeasonResults {
pastSeasonResultsMap[result.QueryName] = result
}
past2SeasonResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.Past2SeasonResults {
past2SeasonResultsMap[result.QueryName] = result
}
past3SeasonResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.Past3SeasonResults {
past3SeasonResultsMap[result.QueryName] = result
}
for _, result := range currentPeriodResultsMap {
funcs := req.Params.CompositeQuery.BuilderQueries[result.QueryName].Functions
var zScoreThreshold float64
for _, f := range funcs {
if f.Name == v3.FunctionNameAnomaly {
value, ok := f.NamedArgs["z_score_threshold"]
if ok {
zScoreThreshold = value.(float64)
} else {
zScoreThreshold = 3
}
break
}
}
pastPeriodResult, ok := pastPeriodResultsMap[result.QueryName]
if !ok {
continue
}
currentSeasonResult, ok := currentSeasonResultsMap[result.QueryName]
if !ok {
continue
}
pastSeasonResult, ok := pastSeasonResultsMap[result.QueryName]
if !ok {
continue
}
past2SeasonResult, ok := past2SeasonResultsMap[result.QueryName]
if !ok {
continue
}
past3SeasonResult, ok := past3SeasonResultsMap[result.QueryName]
if !ok {
continue
}
for _, series := range result.Series {
stdDev := p.getStdDev(series)
zap.L().Info("stdDev", zap.Float64("stdDev", stdDev), zap.Any("labels", series.Labels))
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
pastSeasonSeries := p.getMatchingSeries(pastSeasonResult, series)
past2SeasonSeries := p.getMatchingSeries(past2SeasonResult, series)
past3SeasonSeries := p.getMatchingSeries(past3SeasonResult, series)
prevSeriesAvg := p.getAvg(pastPeriodSeries)
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
zap.L().Info("getAvg", zap.Float64("prevSeriesAvg", prevSeriesAvg), zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg), zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg), zap.Float64("past2SeasonSeriesAvg", past2SeasonSeriesAvg), zap.Float64("past3SeasonSeriesAvg", past3SeasonSeriesAvg), zap.Any("labels", series.Labels))
predictedSeries := p.getPredictedSeries(
series,
pastPeriodSeries,
currentSeasonSeries,
pastSeasonSeries,
past2SeasonSeries,
past3SeasonSeries,
)
result.PredictedSeries = append(result.PredictedSeries, predictedSeries)
upperBoundSeries, lowerBoundSeries := p.getBounds(
series,
predictedSeries,
zScoreThreshold,
)
result.UpperBoundSeries = append(result.UpperBoundSeries, upperBoundSeries)
result.LowerBoundSeries = append(result.LowerBoundSeries, lowerBoundSeries)
anomalyScoreSeries := p.getAnomalyScores(
series,
pastPeriodSeries,
currentSeasonSeries,
pastSeasonSeries,
past2SeasonSeries,
past3SeasonSeries,
)
result.AnomalyScores = append(result.AnomalyScores, anomalyScoreSeries)
}
}
results := make([]*v3.Result, 0, len(currentPeriodResultsMap))
for _, result := range currentPeriodResultsMap {
results = append(results, result)
}
return &GetAnomaliesResponse{
Results: results,
}, nil
}

View File

@@ -1,43 +0,0 @@
package anomaly
import (
"context"
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
)
type WeeklyProvider struct {
BaseSeasonalProvider
}
var _ BaseProvider = (*WeeklyProvider)(nil)
func (wp *WeeklyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
return &wp.BaseSeasonalProvider
}
func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyProvider {
wp := &WeeklyProvider{
BaseSeasonalProvider: BaseSeasonalProvider{},
}
for _, opt := range opts {
opt(wp)
}
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
Reader: wp.reader,
Cache: wp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: wp.fluxInterval,
FeatureLookup: wp.ff,
})
return wp
}
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
req.Seasonality = SeasonalityWeekly
return p.getAnomalies(ctx, req)
}

View File

@@ -38,8 +38,7 @@ type APIHandlerOptions struct {
Cache cache.Cache Cache cache.Cache
Gateway *httputil.ReverseProxy Gateway *httputil.ReverseProxy
// Querier Influx Interval // Querier Influx Interval
FluxInterval time.Duration FluxInterval time.Duration
UseLogsNewSchema bool
} }
type APIHandler struct { type APIHandler struct {
@@ -64,7 +63,6 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
LogsParsingPipelineController: opts.LogsParsingPipelineController, LogsParsingPipelineController: opts.LogsParsingPipelineController,
Cache: opts.Cache, Cache: opts.Cache,
FluxInterval: opts.FluxInterval, FluxInterval: opts.FluxInterval,
UseLogsNewSchema: opts.UseLogsNewSchema,
}) })
if err != nil { if err != nil {
@@ -177,8 +175,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
am.ViewAccess(ah.listLicensesV2)). am.ViewAccess(ah.listLicensesV2)).
Methods(http.MethodGet) Methods(http.MethodGet)
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// Gateway // Gateway
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.AdminAccess(ah.ServeGatewayHTTP)) router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.AdminAccess(ah.ServeGatewayHTTP))

View File

@@ -1,48 +1,17 @@
package api package api
import ( import (
"encoding/json"
"errors"
"fmt"
"io"
"net/http" "net/http"
"time"
"go.signoz.io/signoz/ee/query-service/constants"
basemodel "go.signoz.io/signoz/pkg/query-service/model" basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
) )
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
featureSet, err := ah.FF().GetFeatureFlags() featureSet, err := ah.FF().GetFeatureFlags()
if err != nil { if err != nil {
ah.HandleError(w, err, http.StatusInternalServerError) ah.HandleError(w, err, http.StatusInternalServerError)
return return
} }
if constants.FetchFeatures == "true" {
zap.L().Debug("fetching license")
license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
if err != nil {
zap.L().Error("failed to fetch license", zap.Error(err))
} else if license == nil {
zap.L().Debug("no active license found")
} else {
licenseKey := license.Key
zap.L().Debug("fetching zeus features")
zeusFeatures, err := fetchZeusFeatures(constants.ZeusFeaturesURL, licenseKey)
if err == nil {
zap.L().Debug("fetched zeus features", zap.Any("features", zeusFeatures))
// merge featureSet and zeusFeatures in featureSet with higher priority to zeusFeatures
featureSet = MergeFeatureSets(zeusFeatures, featureSet)
} else {
zap.L().Error("failed to fetch zeus features", zap.Error(err))
}
}
}
if ah.opts.PreferSpanMetrics { if ah.opts.PreferSpanMetrics {
for idx := range featureSet { for idx := range featureSet {
feature := &featureSet[idx] feature := &featureSet[idx]
@@ -51,96 +20,5 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
} }
} }
} }
ah.Respond(w, featureSet) ah.Respond(w, featureSet)
} }
// fetchZeusFeatures makes an HTTP GET request to the /zeusFeatures endpoint
// and returns the FeatureSet.
func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) {
// Check if the URL is empty
if url == "" {
return nil, fmt.Errorf("url is empty")
}
// Check if the licenseKey is empty
if licenseKey == "" {
return nil, fmt.Errorf("licenseKey is empty")
}
// Creating an HTTP client with a timeout for better control
client := &http.Client{
Timeout: 10 * time.Second,
}
// Creating a new GET request
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
// Setting the custom header
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
// Making the GET request
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("failed to make GET request: %w", err)
}
defer func() {
if resp != nil {
resp.Body.Close()
}
}()
// Check for non-OK status code
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("%w: %d %s", errors.New("received non-OK HTTP status code"), resp.StatusCode, http.StatusText(resp.StatusCode))
}
// Reading and decoding the response body
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("failed to read response body: %w", err)
}
var zeusResponse ZeusFeaturesResponse
if err := json.Unmarshal(body, &zeusResponse); err != nil {
return nil, fmt.Errorf("%w: %v", errors.New("failed to decode response body"), err)
}
if zeusResponse.Status != "success" {
return nil, fmt.Errorf("%w: %s", errors.New("failed to fetch zeus features"), zeusResponse.Status)
}
return zeusResponse.Data, nil
}
type ZeusFeaturesResponse struct {
Status string `json:"status"`
Data basemodel.FeatureSet `json:"data"`
}
// MergeFeatureSets merges two FeatureSet arrays with precedence to zeusFeatures.
func MergeFeatureSets(zeusFeatures, internalFeatures basemodel.FeatureSet) basemodel.FeatureSet {
// Create a map to store the merged features
featureMap := make(map[string]basemodel.Feature)
// Add all features from the otherFeatures set to the map
for _, feature := range internalFeatures {
featureMap[feature.Name] = feature
}
// Add all features from the zeusFeatures set to the map
// If a feature already exists (i.e., same name), the zeusFeature will overwrite it
for _, feature := range zeusFeatures {
featureMap[feature.Name] = feature
}
// Convert the map back to a FeatureSet slice
var mergedFeatures basemodel.FeatureSet
for _, feature := range featureMap {
mergedFeatures = append(mergedFeatures, feature)
}
return mergedFeatures
}

View File

@@ -1,88 +0,0 @@
package api
import (
"testing"
"github.com/stretchr/testify/assert"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
func TestMergeFeatureSets(t *testing.T) {
tests := []struct {
name string
zeusFeatures basemodel.FeatureSet
internalFeatures basemodel.FeatureSet
expected basemodel.FeatureSet
}{
{
name: "empty zeusFeatures and internalFeatures",
zeusFeatures: basemodel.FeatureSet{},
internalFeatures: basemodel.FeatureSet{},
expected: basemodel.FeatureSet{},
},
{
name: "non-empty zeusFeatures and empty internalFeatures",
zeusFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
internalFeatures: basemodel.FeatureSet{},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
},
{
name: "empty zeusFeatures and non-empty internalFeatures",
zeusFeatures: basemodel.FeatureSet{},
internalFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
},
{
name: "non-empty zeusFeatures and non-empty internalFeatures with no conflicts",
zeusFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature3", Active: false},
},
internalFeatures: basemodel.FeatureSet{
{Name: "Feature2", Active: true},
{Name: "Feature4", Active: false},
},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: true},
{Name: "Feature3", Active: false},
{Name: "Feature4", Active: false},
},
},
{
name: "non-empty zeusFeatures and non-empty internalFeatures with conflicts",
zeusFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
internalFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: false},
{Name: "Feature3", Active: true},
},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
{Name: "Feature3", Active: true},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual := MergeFeatureSets(test.zeusFeatures, test.internalFeatures)
assert.ElementsMatch(t, test.expected, actual)
})
}
}

View File

@@ -1,119 +0,0 @@
package api
import (
"bytes"
"fmt"
"io"
"net/http"
"go.signoz.io/signoz/ee/query-service/anomaly"
baseapp "go.signoz.io/signoz/pkg/query-service/app"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.uber.org/zap"
)
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r)
if apiErrorObj != nil {
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err))
RespondError(w, apiErrorObj, nil)
return
}
queryRangeParams.Version = "v4"
// add temporality for each metric
temporalityErr := aH.PopulateTemporality(r.Context(), queryRangeParams)
if temporalityErr != nil {
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
return
}
anomalyQueryExists := false
anomalyQuery := &v3.BuilderQuery{}
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
for _, fn := range query.Functions {
if fn.Name == v3.FunctionNameAnomaly {
anomalyQueryExists = true
anomalyQuery = query
break
}
}
}
}
if anomalyQueryExists {
// ensure all queries have metric data source, and there should be only one anomaly query
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
if query.DataSource != v3.DataSourceMetrics {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("all queries must have metric data source")}, nil)
return
}
}
// get the threshold, and seasonality from the anomaly query
var seasonality anomaly.Seasonality
for _, fn := range anomalyQuery.Functions {
if fn.Name == v3.FunctionNameAnomaly {
seasonalityStr, ok := fn.NamedArgs["seasonality"].(string)
if !ok {
seasonalityStr = "daily"
}
if seasonalityStr == "weekly" {
seasonality = anomaly.SeasonalityWeekly
} else if seasonalityStr == "daily" {
seasonality = anomaly.SeasonalityDaily
} else {
seasonality = anomaly.SeasonalityHourly
}
break
}
}
var provider anomaly.Provider
switch seasonality {
case anomaly.SeasonalityWeekly:
provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
)
case anomaly.SeasonalityDaily:
provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
)
case anomaly.SeasonalityHourly:
provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
)
}
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
uniqueResults := make(map[string]*v3.Result)
for _, anomaly := range anomalies.Results {
uniqueResults[anomaly.QueryName] = anomaly
uniqueResults[anomaly.QueryName].IsAnomaly = true
}
aH.Respond(w, uniqueResults)
} else {
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
aH.QueryRangeV4(w, r)
}
}

View File

@@ -0,0 +1,401 @@
package db
import (
"context"
"crypto/md5"
"encoding/json"
"fmt"
"reflect"
"regexp"
"sort"
"strings"
"time"
"go.signoz.io/signoz/ee/query-service/model"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
// GetMetricResultEE runs the query and returns list of time series
func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) {
defer utils.Elapsed("GetMetricResult", nil)()
zap.L().Info("Executing metric result query: ", zap.String("query", query))
var hash string
// If getSubTreeSpans function is used in the clickhouse query
if strings.Contains(query, "getSubTreeSpans(") {
var err error
query, hash, err = r.getSubTreeSpansCustomFunction(ctx, query, hash)
if err == fmt.Errorf("no spans found for the given query") {
return nil, "", nil
}
if err != nil {
return nil, "", err
}
}
rows, err := r.conn.Query(ctx, query)
if err != nil {
zap.L().Error("Error in processing query", zap.Error(err))
return nil, "", fmt.Errorf("error in processing query")
}
var (
columnTypes = rows.ColumnTypes()
columnNames = rows.Columns()
vars = make([]interface{}, len(columnTypes))
)
for i := range columnTypes {
vars[i] = reflect.New(columnTypes[i].ScanType()).Interface()
}
// when group by is applied, each combination of cartesian product
// of attributes is separate series. each item in metricPointsMap
// represent a unique series.
metricPointsMap := make(map[string][]basemodel.MetricPoint)
// attribute key-value pairs for each group selection
attributesMap := make(map[string]map[string]string)
defer rows.Close()
for rows.Next() {
if err := rows.Scan(vars...); err != nil {
return nil, "", err
}
var groupBy []string
var metricPoint basemodel.MetricPoint
groupAttributes := make(map[string]string)
// Assuming that the end result row contains a timestamp, value and option labels
// Label key and value are both strings.
for idx, v := range vars {
colName := columnNames[idx]
switch v := v.(type) {
case *string:
// special case for returning all labels
if colName == "fullLabels" {
var metric map[string]string
err := json.Unmarshal([]byte(*v), &metric)
if err != nil {
return nil, "", err
}
for key, val := range metric {
groupBy = append(groupBy, val)
groupAttributes[key] = val
}
} else {
groupBy = append(groupBy, *v)
groupAttributes[colName] = *v
}
case *time.Time:
metricPoint.Timestamp = v.UnixMilli()
case *float64:
metricPoint.Value = *v
case **float64:
// ch seems to return this type when column is derived from
// SELECT count(*)/ SELECT count(*)
floatVal := *v
if floatVal != nil {
metricPoint.Value = *floatVal
}
case *float32:
float32Val := float32(*v)
metricPoint.Value = float64(float32Val)
case *uint8, *uint64, *uint16, *uint32:
if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok {
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Uint())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())
}
case *int8, *int16, *int32, *int64:
if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok {
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Int())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
}
default:
zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName))
}
}
sort.Strings(groupBy)
key := strings.Join(groupBy, "")
attributesMap[key] = groupAttributes
metricPointsMap[key] = append(metricPointsMap[key], metricPoint)
}
var seriesList []*basemodel.Series
for key := range metricPointsMap {
points := metricPointsMap[key]
// first point in each series could be invalid since the
// aggregations are applied with point from prev series
if len(points) != 0 && len(points) > 1 {
points = points[1:]
}
attributes := attributesMap[key]
series := basemodel.Series{Labels: attributes, Points: points}
seriesList = append(seriesList, &series)
}
// err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash)
// if err != nil {
// zap.L().Error("Error in dropping temporary table: ", err)
// return nil, err
// }
if hash == "" {
return seriesList, hash, nil
} else {
return seriesList, "getSubTreeSpans" + hash, nil
}
}
func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) {
zap.L().Debug("Executing getSubTreeSpans function")
// str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;`
// process the query to fetch subTree query
var subtreeInput string
query, subtreeInput, hash = processQuery(query, hash)
err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash)
if err != nil {
zap.L().Error("Error in dropping temporary table", zap.Error(err))
return query, hash, err
}
// Create temporary table to store the getSubTreeSpans() results
zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash))
err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)")
if err != nil {
zap.L().Error("Error in creating temporary table", zap.Error(err))
return query, hash, err
}
var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse
getSpansSubQuery := subtreeInput
// Execute the subTree query
zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery))
err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery)
// zap.L().Info(getSpansSubQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("error in processing sql query")
}
var searchScanResponses []basemodel.SearchSpanDBResponseItem
// TODO : @ankit: I think the algorithm does not need to assume that subtrees are from the same TraceID. We can take this as an improvement later.
// Fetch all the spans from of same TraceID so that we can build subtree
modelQuery := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
if len(getSpansSubQueryDBResponses) == 0 {
return query, hash, fmt.Errorf("no spans found for the given query")
}
zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery))
err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("error in processing sql query")
}
// Process model to fetch the spans
zap.L().Debug("Processing model to fetch the spans")
searchSpanResponses := []basemodel.SearchSpanResponseItem{}
for _, item := range searchScanResponses {
var jsonItem basemodel.SearchSpanResponseItem
json.Unmarshal([]byte(item.Model), &jsonItem)
jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano())
if jsonItem.Events == nil {
jsonItem.Events = []string{}
}
searchSpanResponses = append(searchSpanResponses, jsonItem)
}
// Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash
// Use map to store pointer to the spans to avoid duplicates and save memory
zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses)
if err != nil {
zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err))
return query, hash, err
}
zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash))
if err != nil {
zap.L().Error("Error in preparing batch statement", zap.Error(err))
return query, hash, err
}
for _, span := range treeSearchResponse {
var parentID string
if len(span.References) > 0 && span.References[0].RefType == "CHILD_OF" {
parentID = span.References[0].SpanId
}
err = statement.Append(
time.Unix(0, int64(span.TimeUnixNano)),
span.TraceID,
span.SpanID,
parentID,
span.RootSpanID,
span.ServiceName,
span.Name,
span.RootName,
uint64(span.DurationNano),
int8(span.Kind),
span.TagMap,
span.Events,
)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, err
}
}
zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
err = statement.Send()
if err != nil {
zap.L().Error("Error in sending statement", zap.Error(err))
return query, hash, err
}
return query, hash, nil
}
//lint:ignore SA4009 return hash is feeded to the query
func processQuery(query string, hash string) (string, string, string) {
re3 := regexp.MustCompile(`getSubTreeSpans`)
submatchall3 := re3.FindAllStringIndex(query, -1)
getSubtreeSpansMatchIndex := submatchall3[0][1]
query2countParenthesis := query[getSubtreeSpansMatchIndex:]
sqlCompleteIndex := 0
countParenthesisImbalance := 0
for i, char := range query2countParenthesis {
if string(char) == "(" {
countParenthesisImbalance += 1
}
if string(char) == ")" {
countParenthesisImbalance -= 1
}
if countParenthesisImbalance == 0 {
sqlCompleteIndex = i
break
}
}
subtreeInput := query2countParenthesis[1:sqlCompleteIndex]
// hash the subtreeInput
hmd5 := md5.Sum([]byte(subtreeInput))
hash = fmt.Sprintf("%x", hmd5)
// Reformat the query to use the getSubTreeSpans function
query = query[:getSubtreeSpansMatchIndex] + hash + " " + query2countParenthesis[sqlCompleteIndex+1:]
return query, subtreeInput, hash
}
// getSubTreeAlgorithm is an algorithm to build the subtrees of the spans and return the list of spans
func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse) (map[string]*basemodel.SearchSpanResponseItem, error) {
var spans []*model.SpanForTraceDetails
for _, spanItem := range payload {
var parentID string
if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" {
parentID = spanItem.References[0].SpanId
}
span := &model.SpanForTraceDetails{
TimeUnixNano: spanItem.TimeUnixNano,
SpanID: spanItem.SpanID,
TraceID: spanItem.TraceID,
ServiceName: spanItem.ServiceName,
Name: spanItem.Name,
Kind: spanItem.Kind,
DurationNano: spanItem.DurationNano,
TagMap: spanItem.TagMap,
ParentID: parentID,
Events: spanItem.Events,
HasError: spanItem.HasError,
}
spans = append(spans, span)
}
zap.L().Debug("Building Tree")
roots, err := buildSpanTrees(&spans)
if err != nil {
return nil, err
}
searchSpansResult := make(map[string]*basemodel.SearchSpanResponseItem)
// Every span which was fetched from getSubTree Input SQL query is considered root
// For each root, get the subtree spans
for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses {
targetSpan := &model.SpanForTraceDetails{}
// zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
// Search target span object in the tree
for _, root := range roots {
targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID)
if targetSpan != nil {
break
}
if err != nil {
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
return nil, err
}
}
if targetSpan == nil {
return nil, nil
}
// Build subtree for the target span
// Mark the target span as root by setting parent ID as empty string
targetSpan.ParentID = ""
preParents := []*model.SpanForTraceDetails{targetSpan}
children := []*model.SpanForTraceDetails{}
// Get the subtree child spans
for i := 0; len(preParents) != 0; i++ {
parents := []*model.SpanForTraceDetails{}
for _, parent := range preParents {
children = append(children, parent.Children...)
parents = append(parents, parent.Children...)
}
preParents = parents
}
resultSpans := children
// Add the target span to the result spans
resultSpans = append(resultSpans, targetSpan)
for _, item := range resultSpans {
references := []basemodel.OtelSpanRef{
{
TraceId: item.TraceID,
SpanId: item.ParentID,
RefType: "CHILD_OF",
},
}
if item.Events == nil {
item.Events = []string{}
}
searchSpansResult[item.SpanID] = &basemodel.SearchSpanResponseItem{
TimeUnixNano: item.TimeUnixNano,
SpanID: item.SpanID,
TraceID: item.TraceID,
ServiceName: item.ServiceName,
Name: item.Name,
Kind: item.Kind,
References: references,
DurationNano: item.DurationNano,
TagMap: item.TagMap,
Events: item.Events,
HasError: item.HasError,
RootSpanID: getSpansSubQueryDBResponse.SpanID,
RootName: targetSpan.Name,
}
}
}
return searchSpansResult, nil
}

View File

@@ -25,9 +25,8 @@ func NewDataConnector(
maxOpenConns int, maxOpenConns int,
dialTimeout time.Duration, dialTimeout time.Duration,
cluster string, cluster string,
useLogsNewSchema bool,
) *ClickhouseReader { ) *ClickhouseReader {
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster, useLogsNewSchema) ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster)
return &ClickhouseReader{ return &ClickhouseReader{
conn: ch.GetConn(), conn: ch.GetConn(),
appdb: localDB, appdb: localDB,

View File

@@ -1,7 +1,6 @@
package app package app
import ( import (
"bufio"
"bytes" "bytes"
"context" "context"
"encoding/json" "encoding/json"
@@ -28,9 +27,7 @@ import (
"go.signoz.io/signoz/ee/query-service/dao" "go.signoz.io/signoz/ee/query-service/dao"
"go.signoz.io/signoz/ee/query-service/integrations/gateway" "go.signoz.io/signoz/ee/query-service/integrations/gateway"
"go.signoz.io/signoz/ee/query-service/interfaces" "go.signoz.io/signoz/ee/query-service/interfaces"
"go.signoz.io/signoz/ee/query-service/rules"
baseauth "go.signoz.io/signoz/pkg/query-service/auth" baseauth "go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/migrate"
"go.signoz.io/signoz/pkg/query-service/model" "go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3" v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
@@ -53,7 +50,7 @@ import (
baseint "go.signoz.io/signoz/pkg/query-service/interfaces" baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
basemodel "go.signoz.io/signoz/pkg/query-service/model" basemodel "go.signoz.io/signoz/pkg/query-service/model"
pqle "go.signoz.io/signoz/pkg/query-service/pqlEngine" pqle "go.signoz.io/signoz/pkg/query-service/pqlEngine"
baserules "go.signoz.io/signoz/pkg/query-service/rules" rules "go.signoz.io/signoz/pkg/query-service/rules"
"go.signoz.io/signoz/pkg/query-service/telemetry" "go.signoz.io/signoz/pkg/query-service/telemetry"
"go.signoz.io/signoz/pkg/query-service/utils" "go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap" "go.uber.org/zap"
@@ -77,13 +74,12 @@ type ServerOptions struct {
FluxInterval string FluxInterval string
Cluster string Cluster string
GatewayUrl string GatewayUrl string
UseLogsNewSchema bool
} }
// Server runs HTTP api service // Server runs HTTP api service
type Server struct { type Server struct {
serverOptions *ServerOptions serverOptions *ServerOptions
ruleManager *baserules.Manager ruleManager *rules.Manager
// public http router // public http router
httpConn net.Listener httpConn net.Listener
@@ -155,7 +151,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
serverOptions.MaxOpenConns, serverOptions.MaxOpenConns,
serverOptions.DialTimeout, serverOptions.DialTimeout,
serverOptions.Cluster, serverOptions.Cluster,
serverOptions.UseLogsNewSchema,
) )
go qb.Start(readerReady) go qb.Start(readerReady)
reader = qb reader = qb
@@ -170,14 +165,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
return nil, err return nil, err
} }
} }
var c cache.Cache
if serverOptions.CacheConfigPath != "" {
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
if err != nil {
return nil, err
}
c = cache.NewCache(cacheOpts)
}
<-readerReady <-readerReady
rm, err := makeRulesManager(serverOptions.PromConfigPath, rm, err := makeRulesManager(serverOptions.PromConfigPath,
@@ -185,23 +172,13 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
serverOptions.RuleRepoURL, serverOptions.RuleRepoURL,
localDB, localDB,
reader, reader,
c,
serverOptions.DisableRules, serverOptions.DisableRules,
lm, lm)
serverOptions.UseLogsNewSchema,
)
if err != nil { if err != nil {
return nil, err return nil, err
} }
go func() {
err = migrate.ClickHouseMigrate(reader.GetConn(), serverOptions.Cluster)
if err != nil {
zap.L().Error("error while running clickhouse migrations", zap.Error(err))
}
}()
// initiate opamp // initiate opamp
_, err = opAmpModel.InitDB(localDB) _, err = opAmpModel.InitDB(localDB)
if err != nil { if err != nil {
@@ -246,6 +223,15 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
telemetry.GetInstance().SetReader(reader) telemetry.GetInstance().SetReader(reader)
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey) telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
var c cache.Cache
if serverOptions.CacheConfigPath != "" {
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
if err != nil {
return nil, err
}
c = cache.NewCache(cacheOpts)
}
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval) fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
if err != nil { if err != nil {
@@ -269,7 +255,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
Cache: c, Cache: c,
FluxInterval: fluxInterval, FluxInterval: fluxInterval,
Gateway: gatewayProxy, Gateway: gatewayProxy,
UseLogsNewSchema: serverOptions.UseLogsNewSchema,
} }
apiHandler, err := api.NewAPIHandler(apiOpts) apiHandler, err := api.NewAPIHandler(apiOpts)
@@ -324,7 +309,7 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
// ip here for alert manager // ip here for alert manager
AllowedOrigins: []string{"*"}, AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"}, AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"},
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "SIGNOZ-API-KEY", "X-SIGNOZ-QUERY-ID", "Sec-WebSocket-Protocol"}, AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "SIGNOZ-API-KEY"},
}) })
handler := c.Handler(r) handler := c.Handler(r)
@@ -365,13 +350,11 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
apiHandler.RegisterIntegrationRoutes(r, am) apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am) apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am) apiHandler.RegisterQueryRangeV4Routes(r, am)
apiHandler.RegisterWebSocketPaths(r, am)
apiHandler.RegisterMessagingQueuesRoutes(r, am)
c := cors.New(cors.Options{ c := cors.New(cors.Options{
AllowedOrigins: []string{"*"}, AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"}, AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"},
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "cache-control", "X-SIGNOZ-QUERY-ID", "Sec-WebSocket-Protocol"}, AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "cache-control"},
}) })
handler := c.Handler(r) handler := c.Handler(r)
@@ -383,7 +366,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
}, nil }, nil
} }
// TODO(remove): Implemented at pkg/http/middleware/logging.go
// loggingMiddleware is used for logging public api calls // loggingMiddleware is used for logging public api calls
func loggingMiddleware(next http.Handler) http.Handler { func loggingMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
@@ -395,7 +377,6 @@ func loggingMiddleware(next http.Handler) http.Handler {
}) })
} }
// TODO(remove): Implemented at pkg/http/middleware/logging.go
// loggingMiddlewarePrivate is used for logging private api calls // loggingMiddlewarePrivate is used for logging private api calls
// from internal services like alert manager // from internal services like alert manager
func loggingMiddlewarePrivate(next http.Handler) http.Handler { func loggingMiddlewarePrivate(next http.Handler) http.Handler {
@@ -408,41 +389,27 @@ func loggingMiddlewarePrivate(next http.Handler) http.Handler {
}) })
} }
// TODO(remove): Implemented at pkg/http/middleware/logging.go
type loggingResponseWriter struct { type loggingResponseWriter struct {
http.ResponseWriter http.ResponseWriter
statusCode int statusCode int
} }
// TODO(remove): Implemented at pkg/http/middleware/logging.go
func NewLoggingResponseWriter(w http.ResponseWriter) *loggingResponseWriter { func NewLoggingResponseWriter(w http.ResponseWriter) *loggingResponseWriter {
// WriteHeader(int) is not called if our response implicitly returns 200 OK, so // WriteHeader(int) is not called if our response implicitly returns 200 OK, so
// we default to that status code. // we default to that status code.
return &loggingResponseWriter{w, http.StatusOK} return &loggingResponseWriter{w, http.StatusOK}
} }
// TODO(remove): Implemented at pkg/http/middleware/logging.go
func (lrw *loggingResponseWriter) WriteHeader(code int) { func (lrw *loggingResponseWriter) WriteHeader(code int) {
lrw.statusCode = code lrw.statusCode = code
lrw.ResponseWriter.WriteHeader(code) lrw.ResponseWriter.WriteHeader(code)
} }
// TODO(remove): Implemented at pkg/http/middleware/logging.go
// Flush implements the http.Flush interface. // Flush implements the http.Flush interface.
func (lrw *loggingResponseWriter) Flush() { func (lrw *loggingResponseWriter) Flush() {
lrw.ResponseWriter.(http.Flusher).Flush() lrw.ResponseWriter.(http.Flusher).Flush()
} }
// TODO(remove): Implemented at pkg/http/middleware/logging.go
// Support websockets
func (lrw *loggingResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {
h, ok := lrw.ResponseWriter.(http.Hijacker)
if !ok {
return nil, nil, errors.New("hijack not supported")
}
return h.Hijack()
}
func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}, bool) { func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFromV3 := "/api/v3/query_range" pathToExtractBodyFromV3 := "/api/v3/query_range"
pathToExtractBodyFromV4 := "/api/v4/query_range" pathToExtractBodyFromV4 := "/api/v4/query_range"
@@ -579,7 +546,6 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
}) })
} }
// TODO(remove): Implemented at pkg/http/middleware/timeout.go
func setTimeoutMiddleware(next http.Handler) http.Handler { func setTimeoutMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context() ctx := r.Context()
@@ -732,10 +698,8 @@ func makeRulesManager(
ruleRepoURL string, ruleRepoURL string,
db *sqlx.DB, db *sqlx.DB,
ch baseint.Reader, ch baseint.Reader,
cache cache.Cache,
disableRules bool, disableRules bool,
fm baseint.FeatureLookup, fm baseint.FeatureLookup) (*rules.Manager, error) {
useLogsNewSchema bool) (*baserules.Manager, error) {
// create engine // create engine
pqle, err := pqle.FromConfigPath(promConfigPath) pqle, err := pqle.FromConfigPath(promConfigPath)
@@ -751,9 +715,12 @@ func makeRulesManager(
} }
// create manager opts // create manager opts
managerOpts := &baserules.ManagerOptions{ managerOpts := &rules.ManagerOptions{
NotifierOpts: notifierOpts, NotifierOpts: notifierOpts,
PqlEngine: pqle, Queriers: &rules.Queriers{
PqlEngine: pqle,
Ch: ch.GetConn(),
},
RepoURL: ruleRepoURL, RepoURL: ruleRepoURL,
DBConn: db, DBConn: db,
Context: context.Background(), Context: context.Background(),
@@ -761,15 +728,10 @@ func makeRulesManager(
DisableRules: disableRules, DisableRules: disableRules,
FeatureFlags: fm, FeatureFlags: fm,
Reader: ch, Reader: ch,
Cache: cache,
EvalDelay: baseconst.GetEvalDelay(),
PrepareTaskFunc: rules.PrepareTaskFunc,
UseLogsNewSchema: useLogsNewSchema,
} }
// create Manager // create Manager
manager, err := baserules.NewManager(managerOpts) manager, err := rules.NewManager(managerOpts)
if err != nil { if err != nil {
return nil, fmt.Errorf("rule manager error: %v", err) return nil, fmt.Errorf("rule manager error: %v", err)
} }

View File

@@ -11,8 +11,8 @@ const (
var LicenseSignozIo = "https://license.signoz.io/api/v1" var LicenseSignozIo = "https://license.signoz.io/api/v1"
var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "") var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "") var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false") var SpanRenderLimitStr = GetOrDefaultEnv("SPAN_RENDER_LIMIT", "2500")
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL") var MaxSpansInTraceStr = GetOrDefaultEnv("MAX_SPANS_IN_TRACE", "250000")
func GetOrDefaultEnv(key string, fallback string) string { func GetOrDefaultEnv(key string, fallback string) string {
v := os.Getenv(key) v := os.Getenv(key)

View File

@@ -147,7 +147,7 @@ func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, a
for _, l := range licenses { for _, l := range licenses {
l.ParsePlan() l.ParsePlan()
if lm.activeLicense != nil && l.Key == lm.activeLicense.Key { if l.Key == lm.activeLicense.Key {
l.IsCurrent = true l.IsCurrent = true
} }

View File

@@ -87,7 +87,6 @@ func main() {
var ruleRepoURL string var ruleRepoURL string
var cluster string var cluster string
var useLogsNewSchema bool
var cacheConfigPath, fluxInterval string var cacheConfigPath, fluxInterval string
var enableQueryServiceLogOTLPExport bool var enableQueryServiceLogOTLPExport bool
var preferSpanMetrics bool var preferSpanMetrics bool
@@ -97,7 +96,6 @@ func main() {
var dialTimeout time.Duration var dialTimeout time.Duration
var gatewayUrl string var gatewayUrl string
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
@@ -136,7 +134,6 @@ func main() {
FluxInterval: fluxInterval, FluxInterval: fluxInterval,
Cluster: cluster, Cluster: cluster,
GatewayUrl: gatewayUrl, GatewayUrl: gatewayUrl,
UseLogsNewSchema: useLogsNewSchema,
} }
// Read the jwt secret key // Read the jwt secret key

View File

@@ -12,7 +12,6 @@ const DisableUpsell = "DISABLE_UPSELL"
const Onboarding = "ONBOARDING" const Onboarding = "ONBOARDING"
const ChatSupport = "CHAT_SUPPORT" const ChatSupport = "CHAT_SUPPORT"
const Gateway = "GATEWAY" const Gateway = "GATEWAY"
const PremiumSupport = "PREMIUM_SUPPORT"
var BasicPlan = basemodel.FeatureSet{ var BasicPlan = basemodel.FeatureSet{
basemodel.Feature{ basemodel.Feature{
@@ -120,20 +119,6 @@ var BasicPlan = basemodel.FeatureSet{
UsageLimit: -1, UsageLimit: -1,
Route: "", Route: "",
}, },
basemodel.Feature{
Name: PremiumSupport,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AnomalyDetection,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
} }
var ProPlan = basemodel.FeatureSet{ var ProPlan = basemodel.FeatureSet{
@@ -235,20 +220,6 @@ var ProPlan = basemodel.FeatureSet{
UsageLimit: -1, UsageLimit: -1,
Route: "", Route: "",
}, },
basemodel.Feature{
Name: PremiumSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AnomalyDetection,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
} }
var EnterprisePlan = basemodel.FeatureSet{ var EnterprisePlan = basemodel.FeatureSet{
@@ -364,18 +335,4 @@ var EnterprisePlan = basemodel.FeatureSet{
UsageLimit: -1, UsageLimit: -1,
Route: "", Route: "",
}, },
basemodel.Feature{
Name: PremiumSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AnomalyDetection,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
} }

View File

@@ -1,393 +0,0 @@
package rules
import (
"context"
"encoding/json"
"fmt"
"math"
"strings"
"sync"
"time"
"go.uber.org/zap"
"go.signoz.io/signoz/ee/query-service/anomaly"
"go.signoz.io/signoz/pkg/query-service/cache"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/model"
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
"go.signoz.io/signoz/pkg/query-service/interfaces"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/utils/labels"
"go.signoz.io/signoz/pkg/query-service/utils/times"
"go.signoz.io/signoz/pkg/query-service/utils/timestamp"
"go.signoz.io/signoz/pkg/query-service/formatter"
baserules "go.signoz.io/signoz/pkg/query-service/rules"
yaml "gopkg.in/yaml.v2"
)
const (
RuleTypeAnomaly = "anomaly_rule"
)
type AnomalyRule struct {
*baserules.BaseRule
mtx sync.Mutex
reader interfaces.Reader
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
provider anomaly.Provider
seasonality anomaly.Seasonality
}
func NewAnomalyRule(
id string,
p *baserules.PostableRule,
featureFlags interfaces.FeatureLookup,
reader interfaces.Reader,
cache cache.Cache,
opts ...baserules.RuleOption,
) (*AnomalyRule, error) {
zap.L().Info("creating new AnomalyRule", zap.String("id", id), zap.Any("opts", opts))
baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
if err != nil {
return nil, err
}
t := AnomalyRule{
BaseRule: baseRule,
}
switch strings.ToLower(p.RuleCondition.Seasonality) {
case "hourly":
t.seasonality = anomaly.SeasonalityHourly
case "daily":
t.seasonality = anomaly.SeasonalityDaily
case "weekly":
t.seasonality = anomaly.SeasonalityWeekly
default:
t.seasonality = anomaly.SeasonalityDaily
}
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FeatureLookup: featureFlags,
}
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.reader = reader
if t.seasonality == anomaly.SeasonalityHourly {
t.provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](reader),
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
)
} else if t.seasonality == anomaly.SeasonalityDaily {
t.provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](reader),
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
)
} else if t.seasonality == anomaly.SeasonalityWeekly {
t.provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
)
}
return &t, nil
}
func (r *AnomalyRule) Type() baserules.RuleType {
return RuleTypeAnomaly
}
func (r *AnomalyRule) prepareQueryRange(ts time.Time) (*v3.QueryRangeParamsV3, error) {
zap.L().Info("prepareQueryRange", zap.Int64("ts", ts.UnixMilli()), zap.Int64("evalWindow", r.EvalWindow().Milliseconds()), zap.Int64("evalDelay", r.EvalDelay().Milliseconds()))
start := ts.Add(-time.Duration(r.EvalWindow())).UnixMilli()
end := ts.UnixMilli()
if r.EvalDelay() > 0 {
start = start - int64(r.EvalDelay().Milliseconds())
end = end - int64(r.EvalDelay().Milliseconds())
}
// round to minute otherwise we could potentially miss data
start = start - (start % (60 * 1000))
end = end - (end % (60 * 1000))
compositeQuery := r.Condition().CompositeQuery
if compositeQuery.PanelType != v3.PanelTypeGraph {
compositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: compositeQuery,
Variables: make(map[string]interface{}, 0),
NoCache: false,
}, nil
}
func (r *AnomalyRule) GetSelectedQuery() string {
return r.Condition().GetSelectedQueryName()
}
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (baserules.Vector, error) {
params, err := r.prepareQueryRange(ts)
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
anomalies, err := r.provider.GetAnomalies(ctx, &anomaly.GetAnomaliesRequest{
Params: params,
Seasonality: r.seasonality,
})
if err != nil {
return nil, err
}
var queryResult *v3.Result
for _, result := range anomalies.Results {
if result.QueryName == r.GetSelectedQuery() {
queryResult = result
break
}
}
var resultVector baserules.Vector
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
zap.L().Info("anomaly scores", zap.String("scores", string(scoresJSON)))
for _, series := range queryResult.AnomalyScores {
smpl, shouldAlert := r.ShouldAlert(*series)
if shouldAlert {
resultVector = append(resultVector, smpl)
}
}
return resultVector, nil
}
func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, error) {
prevState := r.State()
valueFormatter := formatter.FromUnit(r.Unit())
res, err := r.buildAndRunQuery(ctx, ts)
if err != nil {
return nil, err
}
r.mtx.Lock()
defer r.mtx.Unlock()
resultFPs := map[uint64]struct{}{}
var alerts = make(map[uint64]*baserules.Alert, len(res))
for _, smpl := range res {
l := make(map[string]string, len(smpl.Metric))
for _, lbl := range smpl.Metric {
l[lbl.Name] = lbl.Value
}
value := valueFormatter.Format(smpl.V, r.Unit())
threshold := valueFormatter.Format(r.TargetVal(), r.Unit())
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
tmplData := baserules.AlertTemplateData(l, value, threshold)
// Inject some convenience variables that are easier to remember for users
// who are not used to Go's templating system.
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
// utility function to apply go template on labels and annotations
expand := func(text string) string {
tmpl := baserules.NewTemplateExpander(
ctx,
defs+text,
"__alert_"+r.Name(),
tmplData,
times.Time(timestamp.FromTime(ts)),
nil,
)
result, err := tmpl.Expand()
if err != nil {
result = fmt.Sprintf("<error expanding template: %s>", err)
zap.L().Error("Expanding alert template failed", zap.Error(err), zap.Any("data", tmplData))
}
return result
}
lb := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel)
resultLabels := labels.NewBuilder(smpl.MetricOrig).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel).Labels()
for name, value := range r.Labels().Map() {
lb.Set(name, expand(value))
}
lb.Set(labels.AlertNameLabel, r.Name())
lb.Set(labels.AlertRuleIdLabel, r.ID())
lb.Set(labels.RuleSourceLabel, r.GeneratorURL())
annotations := make(labels.Labels, 0, len(r.Annotations().Map()))
for name, value := range r.Annotations().Map() {
annotations = append(annotations, labels.Label{Name: common.NormalizeLabelName(name), Value: expand(value)})
}
if smpl.IsMissing {
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
}
lbs := lb.Labels()
h := lbs.Hash()
resultFPs[h] = struct{}{}
if _, ok := alerts[h]; ok {
zap.L().Error("the alert query returns duplicate records", zap.String("ruleid", r.ID()), zap.Any("alert", alerts[h]))
err = fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
return nil, err
}
alerts[h] = &baserules.Alert{
Labels: lbs,
QueryResultLables: resultLabels,
Annotations: annotations,
ActiveAt: ts,
State: model.StatePending,
Value: smpl.V,
GeneratorURL: r.GeneratorURL(),
Receivers: r.PreferredChannels(),
Missing: smpl.IsMissing,
}
}
zap.L().Info("number of alerts found", zap.String("name", r.Name()), zap.Int("count", len(alerts)))
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
// Update the last value and annotations if so, create a new alert entry otherwise.
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
alert.Value = a.Value
alert.Annotations = a.Annotations
alert.Receivers = r.PreferredChannels()
continue
}
r.Active[h] = a
}
itemsToAdd := []model.RuleStateHistory{}
// Check if any pending alerts should be removed or fire now. Write out alert timeseries.
for fp, a := range r.Active {
labelsJSON, err := json.Marshal(a.QueryResultLables)
if err != nil {
zap.L().Error("error marshaling labels", zap.Error(err), zap.Any("labels", a.Labels))
}
if _, ok := resultFPs[fp]; !ok {
// If the alert was previously firing, keep it around for a given
// retention time so it is reported as resolved to the AlertManager.
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > baserules.ResolvedRetention) {
delete(r.Active, fp)
}
if a.State != model.StateInactive {
a.State = model.StateInactive
a.ResolvedAt = ts
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: model.StateInactive,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
if a.Missing {
state = model.StateNoData
}
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
}
currentState := r.State()
overallStateChanged := currentState != prevState
for idx, item := range itemsToAdd {
item.OverallStateChanged = overallStateChanged
item.OverallState = currentState
itemsToAdd[idx] = item
}
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
return len(r.Active), nil
}
func (r *AnomalyRule) String() string {
ar := baserules.PostableRule{
AlertName: r.Name(),
RuleCondition: r.Condition(),
EvalWindow: baserules.Duration(r.EvalWindow()),
Labels: r.Labels().Map(),
Annotations: r.Annotations().Map(),
PreferredChannels: r.PreferredChannels(),
}
byt, err := yaml.Marshal(ar)
if err != nil {
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
}
return string(byt)
}

View File

@@ -1,89 +0,0 @@
package rules
import (
"fmt"
"time"
baserules "go.signoz.io/signoz/pkg/query-service/rules"
)
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
rules := make([]baserules.Rule, 0)
var task baserules.Task
ruleId := baserules.RuleIdFromTaskName(opts.TaskName)
if opts.Rule.RuleType == baserules.RuleTypeThreshold {
// create a threshold rule
tr, err := baserules.NewThresholdRule(
ruleId,
opts.Rule,
opts.FF,
opts.Reader,
opts.UseLogsNewSchema,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
)
if err != nil {
return task, err
}
rules = append(rules, tr)
// create ch rule task for evalution
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
} else if opts.Rule.RuleType == baserules.RuleTypeProm {
// create promql rule
pr, err := baserules.NewPromRule(
ruleId,
opts.Rule,
opts.Logger,
opts.Reader,
opts.ManagerOpts.PqlEngine,
)
if err != nil {
return task, err
}
rules = append(rules, pr)
// create promql rule task for evalution
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
} else if opts.Rule.RuleType == baserules.RuleTypeAnomaly {
// create anomaly rule
ar, err := NewAnomalyRule(
ruleId,
opts.Rule,
opts.FF,
opts.Reader,
opts.Cache,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
)
if err != nil {
return task, err
}
rules = append(rules, ar)
// create anomaly rule task for evalution
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
} else {
return nil, fmt.Errorf("unsupported rule type. Supported types: %s, %s", baserules.RuleTypeProm, baserules.RuleTypeThreshold)
}
return task, nil
}
// newTask returns an appropriate group for
// rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, ruleDB baserules.RuleDB) baserules.Task {
if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
}
return baserules.NewPromRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
}

View File

@@ -51,7 +51,7 @@
"ansi-to-html": "0.7.2", "ansi-to-html": "0.7.2",
"antd": "5.11.0", "antd": "5.11.0",
"antd-table-saveas-excel": "2.2.1", "antd-table-saveas-excel": "2.2.1",
"axios": "1.7.4", "axios": "1.6.4",
"babel-eslint": "^10.1.0", "babel-eslint": "^10.1.0",
"babel-jest": "^29.6.4", "babel-jest": "^29.6.4",
"babel-loader": "9.1.3", "babel-loader": "9.1.3",
@@ -88,7 +88,7 @@
"lucide-react": "0.379.0", "lucide-react": "0.379.0",
"mini-css-extract-plugin": "2.4.5", "mini-css-extract-plugin": "2.4.5",
"papaparse": "5.4.1", "papaparse": "5.4.1",
"posthog-js": "1.160.3", "posthog-js": "1.142.1",
"rc-tween-one": "3.0.6", "rc-tween-one": "3.0.6",
"react": "18.2.0", "react": "18.2.0",
"react-addons-update": "15.6.3", "react-addons-update": "15.6.3",
@@ -207,6 +207,7 @@
"eslint-plugin-sonarjs": "^0.12.0", "eslint-plugin-sonarjs": "^0.12.0",
"husky": "^7.0.4", "husky": "^7.0.4",
"is-ci": "^3.0.1", "is-ci": "^3.0.1",
"jest-playwright-preset": "^1.7.2",
"jest-styled-components": "^7.0.8", "jest-styled-components": "^7.0.8",
"lint-staged": "^12.5.0", "lint-staged": "^12.5.0",
"msw": "1.3.2", "msw": "1.3.2",

View File

@@ -1 +0,0 @@
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#prefix__clip0_4344_1236)" stroke="#C0C1C3" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="M4.667 1.167H2.333c-.644 0-1.166.522-1.166 1.166v2.334c0 .644.522 1.166 1.166 1.166h2.334c.644 0 1.166-.522 1.166-1.166V2.333c0-.644-.522-1.166-1.166-1.166zM8.167 1.167a1.17 1.17 0 011.166 1.166v2.334a1.17 1.17 0 01-1.166 1.166M11.667 1.167a1.17 1.17 0 011.166 1.166v2.334a1.17 1.17 0 01-1.166 1.166M5.833 10.5H2.917c-.992 0-1.75-.758-1.75-1.75v-.583"/><path d="M4.083 12.25l1.75-1.75-1.75-1.75M11.667 8.167H9.333c-.644 0-1.166.522-1.166 1.166v2.334c0 .644.522 1.166 1.166 1.166h2.334c.644 0 1.166-.522 1.166-1.166V9.333c0-.644-.522-1.166-1.166-1.166z"/></g><defs><clipPath id="prefix__clip0_4344_1236"><path fill="#fff" d="M0 0h14v14H0z"/></clipPath></defs></svg>

Before

Width:  |  Height:  |  Size: 878 B

View File

@@ -1 +0,0 @@
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#prefix__clip0_4062_7291)" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="M7 12.833A5.833 5.833 0 107 1.167a5.833 5.833 0 000 11.666z" fill="#E5484D" stroke="#E5484D"/><path d="M8.75 5.25l-3.5 3.5M5.25 5.25l3.5 3.5" stroke="#121317"/></g><defs><clipPath id="prefix__clip0_4062_7291"><path fill="#fff" d="M0 0h14v14H0z"/></clipPath></defs></svg>

Before

Width:  |  Height:  |  Size: 467 B

View File

@@ -1,7 +1,11 @@
<svg xmlns="http://www.w3.org/2000/svg" width="100" height="100" fill="none"><rect width="100" height="100" fill="url(#a)" rx="20"/><g fill="#fff" fill-rule="evenodd" clip-rule="evenodd" filter="url(#b)"><path d="M11 49.941v-.003l.002-.005.003-.014.007-.035a8.37 8.37 0 0 1 .105-.42c.073-.263.184-.624.348-1.072.328-.896.866-2.135 1.73-3.617 1.732-2.97 4.753-6.883 9.95-10.955 5.223-4.092 10.295-6.293 14.08-7.471a35.328 35.328 0 0 1 4.585-1.114 23.628 23.628 0 0 1 1.687-.223 9.17 9.17 0 0 1 .108-.009l.034-.002h.011l.007-.001s.002 0 .133 2.217c.13 2.218.132 2.218.132 2.218h-.002l-.053.004a19.098 19.098 0 0 0-1.326.178c-.809.136-1.937.37-3.302.763l-.127.043c-2.745.94-6.666 2.775-11.249 6.362-4.572 3.577-7.142 6.95-8.563 9.393-.711 1.222-1.137 2.215-1.383 2.889a9.995 9.995 0 0 0-.29.933c.008.037.022.095.044.173.046.166.123.423.246.76.246.674.672 1.667 1.383 2.89 1.421 2.441 3.991 5.815 8.563 9.392 4.584 3.587 8.504 5.423 11.25 6.362l.126.043c1.365.393 2.493.627 3.302.763a19.098 19.098 0 0 0 1.326.178l.053.004h.002s-.002 0-.133 2.218C43.66 75 43.657 75 43.657 75h-.007l-.011-.001-.034-.002a9.17 9.17 0 0 1-.478-.046 23.628 23.628 0 0 1-1.317-.186 35.328 35.328 0 0 1-4.584-1.114c-3.786-1.178-8.858-3.38-14.081-7.471-5.197-4.072-8.218-7.985-9.95-10.955-.864-1.482-1.402-2.72-1.73-3.617-.164-.448-.275-.81-.348-1.072a8.37 8.37 0 0 1-.105-.42l-.007-.035-.003-.014-.002-.005v-.121Zm78 0v-.003l-.002-.005-.002-.014-.008-.035a8.532 8.532 0 0 0-.105-.42 14.049 14.049 0 0 0-.348-1.072c-.328-.896-.866-2.135-1.73-3.617-1.732-2.97-4.753-6.883-9.95-10.955-5.223-4.092-10.295-6.293-14.08-7.471a35.328 35.328 0 0 0-4.585-1.114 23.628 23.628 0 0 0-1.687-.223 9.17 9.17 0 0 0-.108-.009l-.034-.002h-.011L56.343 25s-.002 0-.133 2.217c-.13 2.218-.132 2.218-.132 2.218h.002l.053.004a19.098 19.098 0 0 1 1.326.178c.809.136 1.937.37 3.302.763l.127.043c2.745.94 6.666 2.775 11.249 6.362 4.572 3.577 7.141 6.95 8.563 9.393.711 1.222 1.137 2.215 1.383 2.889a9.995 9.995 0 0 1 .29.933 9.995 9.995 0 0 1-.29.934c-.246.673-.672 1.666-1.383 2.888-1.422 2.442-3.991 5.816-8.563 9.393-4.584 3.587-8.504 5.423-11.25 6.362l-.126.043a30.108 30.108 0 0 1-3.302.763 19.098 19.098 0 0 1-1.326.178l-.053.004h-.002s.002 0 .133 2.218C56.34 75 56.343 75 56.343 75h.007l.011-.001.034-.002a9.17 9.17 0 0 0 .478-.046c.314-.034.758-.092 1.317-.186a35.328 35.328 0 0 0 4.584-1.114c3.786-1.178 8.858-3.38 14.081-7.471 5.197-4.072 8.218-7.985 9.95-10.955.864-1.482 1.402-2.72 1.73-3.617.164-.448.275-.81.348-1.072a8.532 8.532 0 0 0 .105-.42l.008-.035.002-.014.001-.005.001-.003v-.118Z"/><path d="M68.342 49.998c0 9.846-7.924 17.827-17.7 17.827-9.775 0-17.7-7.981-17.7-17.827 0-9.846 7.925-17.827 17.7-17.827 9.776 0 17.7 7.981 17.7 17.827ZM46.218 39.97s-2.127 2.508-2.766 4.457c-.412 1.257-.553 3.343-.553 3.343h-5.531s0-1.672 1.106-4.457c1.106-2.786 2.212-3.343 2.212-3.343h5.532Zm-2.766 15.6c.639 1.949 2.766 4.457 2.766 4.457h-5.532s-1.106-.557-2.212-3.343c-1.106-2.785-1.106-4.457-1.106-4.457h5.53s.142 2.086.554 3.343Z"/></g><defs><radialGradient id="a" cx="0" cy="0" r="1" gradientTransform="matrix(40.99997 42 -42 40.99997 50 50)" gradientUnits="userSpaceOnUse"><stop offset=".33" stop-color="#F76526"/><stop offset="1" stop-color="#F43030"/></radialGradient><filter id="b" width="90" height="62" x="5" y="23" color-interpolation-filters="sRGB" filterUnits="userSpaceOnUse"><feFlood flood-opacity="0" result="BackgroundImageFix"/><feColorMatrix in="SourceAlpha" result="hardAlpha" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/><feOffset dy="4"/><feGaussianBlur stdDeviation="3"/><feComposite in2="hardAlpha" operator="out"/><feColorMatrix values="0 0 0 0 0.368384 0 0 0 0 0.0623777 0 0 0 0 0.0623777 0 0 0 0.25 0"/><feBlend in2="BackgroundImageFix" result="effect1_dropShadow_3909_18731"/><feBlend in="SourceGraphic" in2="effect1_dropShadow_3909_18731" result="shape"/><feColorMatrix in="SourceAlpha" result="hardAlpha" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/><feOffset dy="4"/><feGaussianBlur stdDeviation="3"/><feComposite in2="hardAlpha" k2="-1" k3="1" operator="arithmetic"/><feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.06 0"/><feBlend in2="shape" result="effect2_innerShadow_3909_18731"/></filter></defs></svg> <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_2048_2251)">
<path opacity="0.9" d="M8.02226 15.9866C3.56539 15.9866 -6.10352e-05 12.4896 -6.10352e-05 8.11832C-6.10352e-05 3.79075 3.56539 0.25 8.02226 0.25H13.0584C14.7075 0.25 15.9999 1.56139 15.9999 3.13506V8.11832C15.9999 12.4896 12.4345 15.9866 8.02226 15.9866Z" fill="#F25733"/>
<path d="M7.95919 4.71207C4.63025 4.71207 2.75514 7.46868 2.67693 7.58603C2.48413 7.87508 2.48413 8.24888 2.67707 8.53816C2.75514 8.65528 4.63025 11.4119 7.95919 11.4119C11.2881 11.4119 13.1633 8.65528 13.2414 8.53792C13.4342 8.24888 13.4342 7.87508 13.2413 7.58582C13.1632 7.46868 11.2881 4.71207 7.95919 4.71207ZM3.13771 8.23088C3.06925 8.12832 3.06925 7.99571 3.13771 7.89307C3.20059 7.79867 4.53564 5.83764 6.92256 5.36723C5.84092 5.78476 5.07127 6.83485 5.07127 8.062C5.07127 9.28912 5.84092 10.3392 6.92256 10.7567C4.53564 10.2863 3.20059 8.32528 3.13771 8.23088ZM6.62838 8.062C6.62838 8.21488 6.50443 8.3388 6.35151 8.3388C6.19859 8.3388 6.07465 8.21488 6.07465 8.062C6.07465 7.02287 6.92003 6.17748 7.95916 6.17748C8.11207 6.17748 8.23599 6.30141 8.23599 6.45434C8.23599 6.60727 8.11207 6.73119 7.95916 6.73119C7.22535 6.73119 6.62838 7.32815 6.62838 8.062ZM7.95919 8.73504C7.58803 8.73504 7.2861 8.43312 7.2861 8.062C7.2861 7.69085 7.58803 7.3889 7.95919 7.3889C8.33039 7.3889 8.63231 7.69083 8.63231 8.062C8.63231 8.43312 8.33039 8.73504 7.95919 8.73504ZM12.7806 8.23088C12.7178 8.32528 11.3827 10.2863 8.99583 10.7567C10.0775 10.3392 10.8471 9.28912 10.8471 8.062C10.8471 6.83487 10.0775 5.78477 8.99583 5.36724C11.3827 5.83768 12.7178 7.7987 12.7806 7.89307C12.8491 7.99571 12.8491 8.12832 12.7806 8.23088Z" fill="#F9F2F9"/>
</g>
<defs>
<clipPath id="clip0_2048_2251">
<rect width="16" height="16" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 4.1 KiB

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 2.2 KiB

View File

@@ -53,7 +53,6 @@
"option_atleastonce": "at least once", "option_atleastonce": "at least once",
"option_onaverage": "on average", "option_onaverage": "on average",
"option_intotal": "in total", "option_intotal": "in total",
"option_last": "last",
"option_above": "above", "option_above": "above",
"option_below": "below", "option_below": "below",
"option_equal": "is equal to", "option_equal": "is equal to",

View File

@@ -1,30 +0,0 @@
{
"breadcrumb": "Messaging Queues",
"header": "Kafka / Overview",
"overview": {
"title": "Start sending data in as little as 20 minutes",
"subtitle": "Connect and Monitor Your Data Streams"
},
"configureConsumer": {
"title": "Configure Consumer",
"description": "Add consumer data sources to gain insights and enhance monitoring.",
"button": "Get Started"
},
"configureProducer": {
"title": "Configure Producer",
"description": "Add producer data sources to gain insights and enhance monitoring.",
"button": "Get Started"
},
"monitorKafka": {
"title": "Monitor kafka",
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
"button": "Get Started"
},
"summarySection": {
"viewDetailsButton": "View Details"
},
"confirmModal": {
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
"okText": "Proceed"
}
}

View File

@@ -40,7 +40,6 @@
"option_atleastonce": "at least once", "option_atleastonce": "at least once",
"option_onaverage": "on average", "option_onaverage": "on average",
"option_intotal": "in total", "option_intotal": "in total",
"option_last": "last",
"option_above": "above", "option_above": "above",
"option_below": "below", "option_below": "below",
"option_equal": "is equal to", "option_equal": "is equal to",

View File

@@ -1,3 +1,3 @@
{ {
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or " "rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support."
} }

View File

@@ -38,7 +38,5 @@
"LIST_LICENSES": "SigNoz | List of Licenses", "LIST_LICENSES": "SigNoz | List of Licenses",
"WORKSPACE_LOCKED": "SigNoz | Workspace Locked", "WORKSPACE_LOCKED": "SigNoz | Workspace Locked",
"SUPPORT": "SigNoz | Support", "SUPPORT": "SigNoz | Support",
"DEFAULT": "Open source Observability Platform | SigNoz", "DEFAULT": "Open source Observability Platform | SigNoz"
"ALERT_HISTORY": "SigNoz | Alert Rule History",
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview"
} }

View File

@@ -1,22 +0,0 @@
{
"trialPlanExpired": "Trial Plan Expired",
"gotQuestions": "Got Questions?",
"contactUs": "Contact Us",
"upgradeToContinue": "Upgrade to Continue",
"upgradeNow": "Upgrade now to keep enjoying all the great features youve been using.",
"yourDataIsSafe": "Your data is safe with us until",
"actNow": "Act now to avoid any disruptions and continue where you left off.",
"contactAdmin": "Contact your admin to proceed with the upgrade.",
"continueMyJourney": "Continue My Journey",
"needMoreTime": "Need More Time?",
"extendTrial": "Extend Trial",
"extendTrialMsgPart1": "If you have a specific reason why you were not able to finish your PoC in the trial period, please write to us on",
"extendTrialMsgPart2": "with the reason. Sometimes we can extend trial by a few days on a case by case basis",
"whyChooseSignoz": "Why choose Signoz",
"enterpriseGradeObservability": "Enterprise-grade Observability",
"observabilityDescription": "Get access to observability at any scale with advanced security and compliance.",
"continueToUpgrade": "Continue to Upgrade",
"youAreInGoodCompany": "You are in good company",
"faqs": "FAQs",
"somethingWentWrong": "Something went wrong"
}

View File

@@ -53,7 +53,6 @@
"option_atleastonce": "at least once", "option_atleastonce": "at least once",
"option_onaverage": "on average", "option_onaverage": "on average",
"option_intotal": "in total", "option_intotal": "in total",
"option_last": "last",
"option_above": "above", "option_above": "above",
"option_below": "below", "option_below": "below",
"option_equal": "is equal to", "option_equal": "is equal to",

View File

@@ -1,7 +1,6 @@
{ {
"create_dashboard": "Create Dashboard", "create_dashboard": "Create Dashboard",
"import_json": "Import Dashboard JSON", "import_json": "Import Dashboard JSON",
"view_template": "View templates",
"import_grafana_json": "Import Grafana JSON", "import_grafana_json": "Import Grafana JSON",
"copy_to_clipboard": "Copy To ClipBoard", "copy_to_clipboard": "Copy To ClipBoard",
"download_json": "Download JSON", "download_json": "Download JSON",

View File

@@ -1,30 +0,0 @@
{
"breadcrumb": "Messaging Queues",
"header": "Kafka / Overview",
"overview": {
"title": "Start sending data in as little as 20 minutes",
"subtitle": "Connect and Monitor Your Data Streams"
},
"configureConsumer": {
"title": "Configure Consumer",
"description": "Add consumer data sources to gain insights and enhance monitoring.",
"button": "Get Started"
},
"configureProducer": {
"title": "Configure Producer",
"description": "Add producer data sources to gain insights and enhance monitoring.",
"button": "Get Started"
},
"monitorKafka": {
"title": "Monitor kafka",
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
"button": "Get Started"
},
"summarySection": {
"viewDetailsButton": "View Details"
},
"confirmModal": {
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
"okText": "Proceed"
}
}

View File

@@ -40,7 +40,6 @@
"option_atleastonce": "at least once", "option_atleastonce": "at least once",
"option_onaverage": "on average", "option_onaverage": "on average",
"option_intotal": "in total", "option_intotal": "in total",
"option_last": "last",
"option_above": "above", "option_above": "above",
"option_below": "below", "option_below": "below",
"option_equal": "is equal to", "option_equal": "is equal to",

View File

@@ -1,3 +1,3 @@
{ {
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or " "rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support."
} }

View File

@@ -49,8 +49,5 @@
"TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views", "TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views",
"DEFAULT": "Open source Observability Platform | SigNoz", "DEFAULT": "Open source Observability Platform | SigNoz",
"SHORTCUTS": "SigNoz | Shortcuts", "SHORTCUTS": "SigNoz | Shortcuts",
"INTEGRATIONS": "SigNoz | Integrations", "INTEGRATIONS": "SigNoz | Integrations"
"ALERT_HISTORY": "SigNoz | Alert Rule History",
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
"MESSAGING_QUEUES": "SigNoz | Messaging Queues"
} }

View File

@@ -1,22 +0,0 @@
{
"trialPlanExpired": "Trial Plan Expired",
"gotQuestions": "Got Questions?",
"contactUs": "Contact Us",
"upgradeToContinue": "Upgrade to Continue",
"upgradeNow": "Upgrade now to keep enjoying all the great features youve been using.",
"yourDataIsSafe": "Your data is safe with us until",
"actNow": "Act now to avoid any disruptions and continue where you left off.",
"contactAdmin": "Contact your admin to proceed with the upgrade.",
"continueMyJourney": "Continue My Journey",
"needMoreTime": "Need More Time?",
"extendTrial": "Extend Trial",
"extendTrialMsgPart1": "If you have a specific reason why you were not able to finish your PoC in the trial period, please write to us on",
"extendTrialMsgPart2": "with the reason. Sometimes we can extend trial by a few days on a case by case basis",
"whyChooseSignoz": "Why choose Signoz",
"enterpriseGradeObservability": "Enterprise-grade Observability",
"observabilityDescription": "Get access to observability at any scale with advanced security and compliance.",
"continueToUpgrade": "Continue to Upgrade",
"youAreInGoodCompany": "You are in good company",
"faqs": "FAQs",
"somethingWentWrong": "Something went wrong"
}

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 10 KiB

View File

@@ -76,8 +76,9 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
isUserFetching: false, isUserFetching: false,
}, },
}); });
if (!isLoggedIn) { if (!isLoggedIn) {
history.push(ROUTES.LOGIN, { from: pathname }); history.push(ROUTES.LOGIN);
} }
}; };

View File

@@ -12,7 +12,6 @@ import useAnalytics from 'hooks/analytics/useAnalytics';
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys'; import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode'; import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
import { THEME_MODE } from 'hooks/useDarkMode/constant'; import { THEME_MODE } from 'hooks/useDarkMode/constant';
import useFeatureFlags from 'hooks/useFeatureFlag';
import useGetFeatureFlag from 'hooks/useGetFeatureFlag'; import useGetFeatureFlag from 'hooks/useGetFeatureFlag';
import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense'; import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
import { NotificationProvider } from 'hooks/useNotifications'; import { NotificationProvider } from 'hooks/useNotifications';
@@ -20,7 +19,6 @@ import { ResourceProvider } from 'hooks/useResourceAttribute';
import history from 'lib/history'; import history from 'lib/history';
import { identity, pick, pickBy } from 'lodash-es'; import { identity, pick, pickBy } from 'lodash-es';
import posthog from 'posthog-js'; import posthog from 'posthog-js';
import AlertRuleProvider from 'providers/Alert';
import { DashboardProvider } from 'providers/Dashboard/Dashboard'; import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { QueryBuilderProvider } from 'providers/QueryBuilder'; import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useEffect, useState } from 'react'; import { Suspense, useEffect, useState } from 'react';
@@ -59,16 +57,15 @@ function App(): JSX.Element {
const isDarkMode = useIsDarkMode(); const isDarkMode = useIsDarkMode();
const isOnboardingEnabled =
useFeatureFlags(FeatureKeys.ONBOARDING)?.active || false;
const isChatSupportEnabled =
useFeatureFlags(FeatureKeys.CHAT_SUPPORT)?.active || false;
const isPremiumSupportEnabled =
useFeatureFlags(FeatureKeys.PREMIUM_SUPPORT)?.active || false;
const featureResponse = useGetFeatureFlag((allFlags) => { const featureResponse = useGetFeatureFlag((allFlags) => {
const isOnboardingEnabled =
allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active ||
false;
const isChatSupportEnabled =
allFlags.find((flag) => flag.name === FeatureKeys.CHAT_SUPPORT)?.active ||
false;
dispatch({ dispatch({
type: UPDATE_FEATURE_FLAG_RESPONSE, type: UPDATE_FEATURE_FLAG_RESPONSE,
payload: { payload: {
@@ -84,6 +81,16 @@ function App(): JSX.Element {
setRoutes(newRoutes); setRoutes(newRoutes);
} }
if (isLoggedInState && isChatSupportEnabled) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
window.Intercom('boot', {
app_id: process.env.INTERCOM_APP_ID,
email: user?.email || '',
name: user?.name || '',
});
}
}); });
const isOnBasicPlan = const isOnBasicPlan =
@@ -121,6 +128,7 @@ function App(): JSX.Element {
window.analytics.identify(email, sanitizedIdentifyPayload); window.analytics.identify(email, sanitizedIdentifyPayload);
window.analytics.group(domain, groupTraits); window.analytics.group(domain, groupTraits);
window.clarity('identify', email, name);
posthog?.identify(email, { posthog?.identify(email, {
email, email,
@@ -185,26 +193,6 @@ function App(): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [pathname]); }, [pathname]);
useEffect(() => {
const showAddCreditCardModal =
!isPremiumSupportEnabled &&
!licenseData?.payload?.trialConvertedToSubscription;
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
window.Intercom('boot', {
app_id: process.env.INTERCOM_APP_ID,
email: user?.email || '',
name: user?.name || '',
});
}
}, [
isLoggedInState,
isChatSupportEnabled,
user,
licenseData,
isPremiumSupportEnabled,
]);
useEffect(() => { useEffect(() => {
if (user && user?.email && user?.userId && user?.name) { if (user && user?.email && user?.userId && user?.name) {
try { try {
@@ -240,24 +228,22 @@ function App(): JSX.Element {
<QueryBuilderProvider> <QueryBuilderProvider>
<DashboardProvider> <DashboardProvider>
<KeyboardHotkeysProvider> <KeyboardHotkeysProvider>
<AlertRuleProvider> <AppLayout>
<AppLayout> <Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}> <Switch>
<Switch> {routes.map(({ path, component, exact }) => (
{routes.map(({ path, component, exact }) => ( <Route
<Route key={`${path}`}
key={`${path}`} exact={exact}
exact={exact} path={path}
path={path} component={component}
component={component} />
/> ))}
))}
<Route path="*" component={NotFound} /> <Route path="*" component={NotFound} />
</Switch> </Switch>
</Suspense> </Suspense>
</AppLayout> </AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider> </KeyboardHotkeysProvider>
</DashboardProvider> </DashboardProvider>
</QueryBuilderProvider> </QueryBuilderProvider>

View File

@@ -92,14 +92,6 @@ export const CreateNewAlerts = Loadable(
() => import(/* webpackChunkName: "Create Alerts" */ 'pages/CreateAlert'), () => import(/* webpackChunkName: "Create Alerts" */ 'pages/CreateAlert'),
); );
export const AlertHistory = Loadable(
() => import(/* webpackChunkName: "Alert History" */ 'pages/AlertList'),
);
export const AlertOverview = Loadable(
() => import(/* webpackChunkName: "Alert Overview" */ 'pages/AlertList'),
);
export const CreateAlertChannelAlerts = Loadable( export const CreateAlertChannelAlerts = Loadable(
() => () =>
import(/* webpackChunkName: "Create Channels" */ 'pages/AlertChannelCreate'), import(/* webpackChunkName: "Create Channels" */ 'pages/AlertChannelCreate'),
@@ -212,15 +204,3 @@ export const InstalledIntegrations = Loadable(
/* webpackChunkName: "InstalledIntegrations" */ 'pages/IntegrationsModulePage' /* webpackChunkName: "InstalledIntegrations" */ 'pages/IntegrationsModulePage'
), ),
); );
export const MessagingQueues = Loadable(
() =>
import(/* webpackChunkName: "MessagingQueues" */ 'pages/MessagingQueues'),
);
export const MQDetailPage = Loadable(
() =>
import(
/* webpackChunkName: "MQDetailPage" */ 'pages/MessagingQueues/MQDetailPage'
),
);

View File

@@ -2,8 +2,6 @@ import ROUTES from 'constants/routes';
import { RouteProps } from 'react-router-dom'; import { RouteProps } from 'react-router-dom';
import { import {
AlertHistory,
AlertOverview,
AllAlertChannels, AllAlertChannels,
AllErrors, AllErrors,
APIKeys, APIKeys,
@@ -25,8 +23,6 @@ import {
LogsExplorer, LogsExplorer,
LogsIndexToFields, LogsIndexToFields,
LogsSaveViews, LogsSaveViews,
MessagingQueues,
MQDetailPage,
MySettings, MySettings,
NewDashboardPage, NewDashboardPage,
OldLogsExplorer, OldLogsExplorer,
@@ -173,20 +169,6 @@ const routes: AppRoutes[] = [
isPrivate: true, isPrivate: true,
key: 'ALERTS_NEW', key: 'ALERTS_NEW',
}, },
{
path: ROUTES.ALERT_HISTORY,
exact: true,
component: AlertHistory,
isPrivate: true,
key: 'ALERT_HISTORY',
},
{
path: ROUTES.ALERT_OVERVIEW,
exact: true,
component: AlertOverview,
isPrivate: true,
key: 'ALERT_OVERVIEW',
},
{ {
path: ROUTES.TRACE, path: ROUTES.TRACE,
exact: true, exact: true,
@@ -369,20 +351,6 @@ const routes: AppRoutes[] = [
isPrivate: true, isPrivate: true,
key: 'INTEGRATIONS', key: 'INTEGRATIONS',
}, },
{
path: ROUTES.MESSAGING_QUEUES,
exact: true,
component: MessagingQueues,
key: 'MESSAGING_QUEUES',
isPrivate: true,
},
{
path: ROUTES.MESSAGING_QUEUES_DETAIL,
exact: true,
component: MQDetailPage,
key: 'MESSAGING_QUEUES_DETAIL',
isPrivate: true,
},
]; ];
export const SUPPORT_ROUTE: AppRoutes = { export const SUPPORT_ROUTE: AppRoutes = {

View File

@@ -9,9 +9,9 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
// making the error status code as standard Error Status Code // making the error status code as standard Error Status Code
const statusCode = response.status as ErrorStatusCode; const statusCode = response.status as ErrorStatusCode;
const { data } = response as AxiosResponse;
if (statusCode >= 400 && statusCode < 500) { if (statusCode >= 400 && statusCode < 500) {
const { data } = response as AxiosResponse;
if (statusCode === 404) { if (statusCode === 404) {
return { return {
statusCode, statusCode,
@@ -34,11 +34,12 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
body: JSON.stringify((response.data as any).data), body: JSON.stringify((response.data as any).data),
}; };
} }
return { return {
statusCode, statusCode,
payload: null, payload: null,
error: 'Something went wrong', error: 'Something went wrong',
message: data?.error, message: null,
}; };
} }
if (request) { if (request) {

View File

@@ -1,20 +1,26 @@
import axios from 'api'; import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/create'; import { PayloadProps, Props } from 'types/api/alerts/create';
const create = async ( const create = async (
props: Props, props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => { ): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.post('/rules', { try {
...props.data, const response = await axios.post('/rules', {
}); ...props.data,
});
return { return {
statusCode: 200, statusCode: 200,
error: null, error: null,
message: response.data.status, message: response.data.status,
payload: response.data.data, payload: response.data.data,
}; };
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
}; };
export default create; export default create;

View File

@@ -1,18 +1,24 @@
import axios from 'api'; import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/delete'; import { PayloadProps, Props } from 'types/api/alerts/delete';
const deleteAlerts = async ( const deleteAlerts = async (
props: Props, props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => { ): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.delete(`/rules/${props.id}`); try {
const response = await axios.delete(`/rules/${props.id}`);
return { return {
statusCode: 200, statusCode: 200,
error: null, error: null,
message: response.data.status, message: response.data.status,
payload: response.data.data.rules, payload: response.data.data.rules,
}; };
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
}; };
export default deleteAlerts; export default deleteAlerts;

View File

@@ -1,16 +1,24 @@
import axios from 'api'; import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/get'; import { PayloadProps, Props } from 'types/api/alerts/get';
const get = async ( const get = async (
props: Props, props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => { ): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.get(`/rules/${props.id}`); try {
return { const response = await axios.get(`/rules/${props.id}`);
statusCode: 200,
error: null, return {
message: response.data.status, statusCode: 200,
payload: response.data, error: null,
}; message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
}; };
export default get; export default get;

View File

@@ -1,20 +1,26 @@
import axios from 'api'; import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/patch'; import { PayloadProps, Props } from 'types/api/alerts/patch';
const patch = async ( const patch = async (
props: Props, props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => { ): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.patch(`/rules/${props.id}`, { try {
...props.data, const response = await axios.patch(`/rules/${props.id}`, {
}); ...props.data,
});
return { return {
statusCode: 200, statusCode: 200,
error: null, error: null,
message: response.data.status, message: response.data.status,
payload: response.data.data, payload: response.data.data,
}; };
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
}; };
export default patch; export default patch;

View File

@@ -1,20 +1,26 @@
import axios from 'api'; import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/save'; import { PayloadProps, Props } from 'types/api/alerts/save';
const put = async ( const put = async (
props: Props, props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => { ): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.put(`/rules/${props.id}`, { try {
...props.data, const response = await axios.put(`/rules/${props.id}`, {
}); ...props.data,
});
return { return {
statusCode: 200, statusCode: 200,
error: null, error: null,
message: response.data.status, message: response.data.status,
payload: response.data.data, payload: response.data.data,
}; };
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
}; };
export default put; export default put;

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AlertRuleStatsPayload } from 'types/api/alerts/def';
import { RuleStatsProps } from 'types/api/alerts/ruleStats';
const ruleStats = async (
props: RuleStatsProps,
): Promise<SuccessResponse<AlertRuleStatsPayload> | ErrorResponse> => {
try {
const response = await axios.post(`/rules/${props.id}/history/stats`, {
start: props.start,
end: props.end,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default ruleStats;

View File

@@ -1,33 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AlertRuleTimelineGraphResponsePayload } from 'types/api/alerts/def';
import { GetTimelineGraphRequestProps } from 'types/api/alerts/timelineGraph';
const timelineGraph = async (
props: GetTimelineGraphRequestProps,
): Promise<
SuccessResponse<AlertRuleTimelineGraphResponsePayload> | ErrorResponse
> => {
try {
const response = await axios.post(
`/rules/${props.id}/history/overall_status`,
{
start: props.start,
end: props.end,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default timelineGraph;

View File

@@ -1,36 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AlertRuleTimelineTableResponsePayload } from 'types/api/alerts/def';
import { GetTimelineTableRequestProps } from 'types/api/alerts/timelineTable';
const timelineTable = async (
props: GetTimelineTableRequestProps,
): Promise<
SuccessResponse<AlertRuleTimelineTableResponsePayload> | ErrorResponse
> => {
try {
const response = await axios.post(`/rules/${props.id}/history/timeline`, {
start: props.start,
end: props.end,
offset: props.offset,
limit: props.limit,
order: props.order,
state: props.state,
// TODO(shaheer): implement filters
filters: props.filters,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default timelineTable;

View File

@@ -1,33 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AlertRuleTopContributorsPayload } from 'types/api/alerts/def';
import { TopContributorsProps } from 'types/api/alerts/topContributors';
const topContributors = async (
props: TopContributorsProps,
): Promise<
SuccessResponse<AlertRuleTopContributorsPayload> | ErrorResponse
> => {
try {
const response = await axios.post(
`/rules/${props.id}/history/top_contributors`,
{
start: props.start,
end: props.end,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default topContributors;

View File

@@ -1,62 +0,0 @@
import getLocalStorageApi from 'api/browser/localstorage/get';
import { ENVIRONMENT } from 'constants/env';
import { LOCALSTORAGE } from 'constants/localStorage';
import { isEmpty } from 'lodash-es';
export interface WsDataEvent {
read_rows: number;
read_bytes: number;
elapsed_ms: number;
}
interface GetQueryStatsProps {
queryId: string;
setData: React.Dispatch<React.SetStateAction<WsDataEvent | undefined>>;
}
function getURL(baseURL: string, queryId: string): URL | string {
if (baseURL && !isEmpty(baseURL)) {
return `${baseURL}/ws/query_progress?q=${queryId}`;
}
const url = new URL(`/ws/query_progress?q=${queryId}`, window.location.href);
if (window.location.protocol === 'http:') {
url.protocol = 'ws';
} else {
url.protocol = 'wss';
}
return url;
}
export function getQueryStats(props: GetQueryStatsProps): void {
const { queryId, setData } = props;
const token = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) || '';
// https://github.com/whatwg/websockets/issues/20 reason for not using the relative URLs
const url = getURL(ENVIRONMENT.wsURL, queryId);
const socket = new WebSocket(url, token);
socket.addEventListener('message', (event) => {
try {
const parsedData = JSON.parse(event?.data);
setData(parsedData);
} catch {
setData(event?.data);
}
});
socket.addEventListener('error', (event) => {
console.error(event);
});
socket.addEventListener('close', (event) => {
// 1000 is a normal closure status code
if (event.code !== 1000) {
console.error('WebSocket closed with error:', event);
} else {
console.error('WebSocket closed normally.');
}
});
}

View File

@@ -1,8 +1,6 @@
import { ApiV2Instance as axios } from 'api'; import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios'; import { AxiosError } from 'axios';
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
import store from 'store';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { import {
Props, Props,
@@ -13,26 +11,7 @@ const dashboardVariablesQuery = async (
props: Props, props: Props,
): Promise<SuccessResponse<VariableResponseProps> | ErrorResponse> => { ): Promise<SuccessResponse<VariableResponseProps> | ErrorResponse> => {
try { try {
const { globalTime } = store.getState(); const response = await axios.post(`/variables/query`, props);
const { start, end } = getStartEndRangeTime({
type: 'GLOBAL_TIME',
interval: globalTime.selectedTime,
});
const timeVariables: Record<string, number> = {
start_timestamp_ms: parseInt(start, 10) * 1e3,
end_timestamp_ms: parseInt(end, 10) * 1e3,
start_timestamp_nano: parseInt(start, 10) * 1e9,
end_timestamp_nano: parseInt(end, 10) * 1e9,
start_timestamp: parseInt(start, 10),
end_timestamp: parseInt(end, 10),
};
const payload = { ...props };
payload.variables = { ...payload.variables, ...timeVariables };
const response = await axios.post(`/variables/query`, payload);
return { return {
statusCode: 200, statusCode: 200,

View File

@@ -12,13 +12,10 @@ export const getMetricsQueryRange = async (
props: QueryRangePayload, props: QueryRangePayload,
version: string, version: string,
signal: AbortSignal, signal: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<MetricRangePayloadV3> | ErrorResponse> => { ): Promise<SuccessResponse<MetricRangePayloadV3> | ErrorResponse> => {
try { try {
if (version && version === ENTITY_VERSION_V4) { if (version && version === ENTITY_VERSION_V4) {
const response = await ApiV4Instance.post('/query_range', props, { const response = await ApiV4Instance.post('/query_range', props, { signal });
signal,
});
return { return {
statusCode: 200, statusCode: 200,
@@ -29,10 +26,7 @@ export const getMetricsQueryRange = async (
}; };
} }
const response = await ApiV3Instance.post('/query_range', props, { const response = await ApiV3Instance.post('/query_range', props, { signal });
signal,
headers,
});
return { return {
statusCode: 200, statusCode: 200,

View File

@@ -1,20 +1,7 @@
import axios from 'api'; import axios from 'api';
import { isNil } from 'lodash-es';
interface GetTopLevelOperationsProps { const getTopLevelOperations = async (): Promise<ServiceDataProps> => {
service?: string; const response = await axios.post(`/service/top_level_operations`);
start?: number;
end?: number;
}
const getTopLevelOperations = async (
props: GetTopLevelOperationsProps,
): Promise<ServiceDataProps> => {
const response = await axios.post(`/service/top_level_operations`, {
start: !isNil(props.start) ? `${props.start}` : undefined,
end: !isNil(props.end) ? `${props.end}` : undefined,
service: props.service,
});
return response.data; return response.data;
}; };

View File

@@ -1,7 +1,6 @@
import axios from 'api'; import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios'; import { AxiosError } from 'axios';
import { Dayjs } from 'dayjs';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { Recurrence } from './getAllDowntimeSchedules'; import { Recurrence } from './getAllDowntimeSchedules';
@@ -12,8 +11,8 @@ export interface DowntimeSchedulePayload {
alertIds: string[]; alertIds: string[];
schedule: { schedule: {
timezone?: string; timezone?: string;
startTime?: string | Dayjs; startTime?: string;
endTime?: string | Dayjs; endTime?: string;
recurrence?: Recurrence; recurrence?: Recurrence;
}; };
} }

View File

@@ -1,6 +1,6 @@
import axios from 'api'; import axios from 'api';
import { AxiosError, AxiosResponse } from 'axios'; import { AxiosError, AxiosResponse } from 'axios';
import { Option } from 'container/PlannedDowntime/PlannedDowntimeutils'; import { Option } from 'container/PlannedDowntime/DropdownWithSubMenu/DropdownWithSubMenu';
import { useQuery, UseQueryResult } from 'react-query'; import { useQuery, UseQueryResult } from 'react-query';
export type Recurrence = { export type Recurrence = {
@@ -28,7 +28,6 @@ export interface DowntimeSchedules {
createdBy: string | null; createdBy: string | null;
updatedAt: string | null; updatedAt: string | null;
updatedBy: string | null; updatedBy: string | null;
kind: string | null;
} }
export type PayloadProps = { data: DowntimeSchedules[] }; export type PayloadProps = { data: DowntimeSchedules[] };

View File

@@ -1,63 +0,0 @@
import { ApiV3Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError, AxiosResponse } from 'axios';
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
import { encode } from 'js-base64';
import { createIdFromObjectFields } from 'lib/createIdFromObjectFields';
import createQueryParams from 'lib/createQueryParams';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
IGetAttributeSuggestionsPayload,
IGetAttributeSuggestionsSuccessResponse,
} from 'types/api/queryBuilder/getAttributeSuggestions';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
export const getAttributeSuggestions = async ({
searchText,
dataSource,
filters,
}: IGetAttributeSuggestionsPayload): Promise<
SuccessResponse<IGetAttributeSuggestionsSuccessResponse> | ErrorResponse
> => {
try {
let base64EncodedFiltersString;
try {
// the replace function is to remove the padding at the end of base64 encoded string which is auto added to make it a multiple of 4
// why ? because the current working of qs doesn't work well with padding
base64EncodedFiltersString = encode(JSON.stringify(filters)).replace(
/=+$/,
'',
);
} catch {
// default base64 encoded string for empty filters object
base64EncodedFiltersString = 'eyJpdGVtcyI6W10sIm9wIjoiQU5EIn0';
}
const response: AxiosResponse<{
data: IGetAttributeSuggestionsSuccessResponse;
}> = await ApiV3Instance.get(
`/filter_suggestions?${createQueryParams({
searchText,
dataSource,
existingFilter: base64EncodedFiltersString,
})}`,
);
const payload: BaseAutocompleteData[] =
response.data.data.attributes?.map(({ id: _, ...item }) => ({
...item,
id: createIdFromObjectFields(item, baseAutoCompleteIdKeysOrder),
})) || [];
return {
statusCode: 200,
error: null,
message: response.statusText,
payload: {
attributes: payload,
example_queries: response.data.data.example_queries,
},
};
} catch (e) {
return ErrorResponseHandler(e as AxiosError);
}
};

View File

@@ -1,41 +0,0 @@
interface ConfigureIconProps {
width?: number;
height?: number;
fill?: string;
}
function ConfigureIcon({
width,
height,
fill,
}: ConfigureIconProps): JSX.Element {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={width}
height={height}
fill={fill}
>
<path
stroke="#C0C1C3"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="1.333"
d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z"
/>
<path
stroke="#C0C1C3"
strokeLinecap="round"
strokeWidth="1.333"
d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3m5.417 7.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"
/>
</svg>
);
}
ConfigureIcon.defaultProps = {
width: 16,
height: 16,
fill: 'none',
};
export default ConfigureIcon;

View File

@@ -1,65 +0,0 @@
interface LogsIconProps {
width?: number;
height?: number;
fill?: string;
strokeColor?: string;
strokeWidth?: number;
}
function LogsIcon({
width,
height,
fill,
strokeColor,
strokeWidth,
}: LogsIconProps): JSX.Element {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={width}
height={height}
fill={fill}
>
<path
stroke={strokeColor}
strokeWidth={strokeWidth}
d="M2.917 3.208v7.875"
/>
<ellipse
cx="6.417"
cy="3.208"
stroke={strokeColor}
strokeWidth={strokeWidth}
rx="3.5"
ry="1.458"
/>
<ellipse cx="6.417" cy="3.165" fill={strokeColor} rx="0.875" ry="0.365" />
<path
stroke={strokeColor}
strokeWidth={strokeWidth}
d="M9.917 11.083c0 .645-1.567 1.167-3.5 1.167s-3.5-.522-3.5-1.167"
/>
<path
stroke={strokeColor}
strokeLinecap="round"
strokeWidth={strokeWidth}
d="M5.25 6.417v1.117c0 .028.02.053.049.057l1.652.276A.058.058 0 017 7.924v1.993"
/>
<path
stroke={strokeColor}
strokeWidth={strokeWidth}
d="M9.917 3.208v3.103c0 .046.05.074.089.05L12.182 5a.058.058 0 01.088.035l.264 1.059a.058.058 0 01-.013.053l-2.59 2.877a.058.058 0 00-.014.04v2.018"
/>
</svg>
);
}
LogsIcon.defaultProps = {
width: 14,
height: 14,
fill: 'none',
strokeColor: '#C0C1C3',
strokeWidth: 1.167,
};
export default LogsIcon;

View File

@@ -1,39 +0,0 @@
interface SeverityCriticalIconProps {
width?: number;
height?: number;
fill?: string;
stroke?: string;
}
function SeverityCriticalIcon({
width,
height,
fill,
stroke,
}: SeverityCriticalIconProps): JSX.Element {
return (
<svg
width={width}
height={height}
fill={fill}
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M.99707.666056.99707 2.99939M.99707 5.33337H.991237M3.00293.666056 3.00293 2.99939M3.00293 5.33337H2.9971M5.00879.666056V2.99939M5.00879 5.33337H5.00296"
stroke={stroke}
strokeWidth="1.16667"
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
}
SeverityCriticalIcon.defaultProps = {
width: 6,
height: 6,
fill: 'none',
stroke: '#F56C87',
};
export default SeverityCriticalIcon;

View File

@@ -1,42 +0,0 @@
interface SeverityErrorIconProps {
width?: number;
height?: number;
fill?: string;
stroke?: string;
strokeWidth?: string;
}
function SeverityErrorIcon({
width,
height,
fill,
stroke,
strokeWidth,
}: SeverityErrorIconProps): JSX.Element {
return (
<svg
width={width}
height={height}
fill={fill}
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M1.00781.957845 1.00781 2.99951M1.00781 5.04175H1.00228"
stroke={stroke}
strokeWidth={strokeWidth}
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
}
SeverityErrorIcon.defaultProps = {
width: 2,
height: 6,
fill: 'none',
stroke: '#F56C87',
strokeWidth: '1.02083',
};
export default SeverityErrorIcon;

View File

@@ -1,46 +0,0 @@
interface SeverityInfoIconProps {
width?: number;
height?: number;
fill?: string;
stroke?: string;
}
function SeverityInfoIcon({
width,
height,
fill,
stroke,
}: SeverityInfoIconProps): JSX.Element {
return (
<svg
width={width}
height={height}
fill={fill}
xmlns="http://www.w3.org/2000/svg"
>
<rect
width={width}
height={height}
rx="3.5"
fill={stroke}
fillOpacity=".2"
/>
<path
d="M7 9.33346V7.00012M7 4.66675H7.00583"
stroke={stroke}
strokeWidth="1.16667"
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
}
SeverityInfoIcon.defaultProps = {
width: 14,
height: 14,
fill: 'none',
stroke: '#7190F9',
};
export default SeverityInfoIcon;

View File

@@ -1,42 +0,0 @@
interface SeverityWarningIconProps {
width?: number;
height?: number;
fill?: string;
stroke?: string;
strokeWidth?: string;
}
function SeverityWarningIcon({
width,
height,
fill,
stroke,
strokeWidth,
}: SeverityWarningIconProps): JSX.Element {
return (
<svg
width={width}
height={height}
fill={fill}
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M1.00732.957845 1.00732 2.99951M1.00732 5.04175H1.00179"
stroke={stroke}
strokeWidth={strokeWidth}
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
}
SeverityWarningIcon.defaultProps = {
width: 2,
height: 6,
fill: 'none',
stroke: '#FFD778',
strokeWidth: '0.978299',
};
export default SeverityWarningIcon;

View File

@@ -1,27 +0,0 @@
import { Color } from '@signozhq/design-tokens';
import { useIsDarkMode } from 'hooks/useDarkMode';
function GroupByIcon(): JSX.Element {
const isDarkMode = useIsDarkMode();
return (
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg">
<g
clipPath="url(#prefix__clip0_4344_1236)"
stroke={isDarkMode ? Color.BG_VANILLA_100 : Color.BG_INK_500}
strokeWidth="1.167"
strokeLinecap="round"
strokeLinejoin="round"
>
<path d="M4.667 1.167H2.333c-.644 0-1.166.522-1.166 1.166v2.334c0 .644.522 1.166 1.166 1.166h2.334c.644 0 1.166-.522 1.166-1.166V2.333c0-.644-.522-1.166-1.166-1.166zM8.167 1.167a1.17 1.17 0 011.166 1.166v2.334a1.17 1.17 0 01-1.166 1.166M11.667 1.167a1.17 1.17 0 011.166 1.166v2.334a1.17 1.17 0 01-1.166 1.166M5.833 10.5H2.917c-.992 0-1.75-.758-1.75-1.75v-.583" />
<path d="M4.083 12.25l1.75-1.75-1.75-1.75M11.667 8.167H9.333c-.644 0-1.166.522-1.166 1.166v2.334c0 .644.522 1.166 1.166 1.166h2.334c.644 0 1.166-.522 1.166-1.166V9.333c0-.644-.522-1.166-1.166-1.166z" />
</g>
<defs>
<clipPath id="prefix__clip0_4344_1236">
<path fill="#fff" d="M0 0h14v14H0z" />
</clipPath>
</defs>
</svg>
);
}
export default GroupByIcon;

View File

@@ -1,14 +0,0 @@
.reset-button {
display: flex;
justify-content: space-between;
align-items: center;
background: var(--bg-ink-300);
border: 1px solid var(--bg-slate-400);
}
.lightMode {
.reset-button {
background: var(--bg-vanilla-100);
border-color: var(--bg-vanilla-300);
}
}

View File

@@ -1,11 +0,0 @@
import './Filters.styles.scss';
import DateTimeSelector from 'container/TopNav/DateTimeSelectionV2';
export function Filters(): JSX.Element {
return (
<div className="filters">
<DateTimeSelector showAutoRefresh={false} hideShareModal showResetButton />
</div>
);
}

View File

@@ -1,137 +0,0 @@
import { Button, Modal, Typography } from 'antd';
import updateCreditCardApi from 'api/billing/checkout';
import logEvent from 'api/common/logEvent';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import { CreditCard, X } from 'lucide-react';
import { useEffect, useState } from 'react';
import { useMutation } from 'react-query';
import { useLocation } from 'react-router-dom';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout';
import { License } from 'types/api/licenses/def';
export default function ChatSupportGateway(): JSX.Element {
const { notifications } = useNotifications();
const [activeLicense, setActiveLicense] = useState<License | null>(null);
const [isAddCreditCardModalOpen, setIsAddCreditCardModalOpen] = useState(
false,
);
const { data: licenseData, isFetching } = useLicense();
useEffect(() => {
const activeValidLicense =
licenseData?.payload?.licenses?.find(
(license) => license.isCurrent === true,
) || null;
setActiveLicense(activeValidLicense);
}, [licenseData, isFetching]);
const handleBillingOnSuccess = (
data: ErrorResponse | SuccessResponse<CheckoutSuccessPayloadProps, unknown>,
): void => {
if (data?.payload?.redirectURL) {
const newTab = document.createElement('a');
newTab.href = data.payload.redirectURL;
newTab.target = '_blank';
newTab.rel = 'noopener noreferrer';
newTab.click();
}
};
const handleBillingOnError = (): void => {
notifications.error({
message: SOMETHING_WENT_WRONG,
});
};
const { mutate: updateCreditCard, isLoading: isLoadingBilling } = useMutation(
updateCreditCardApi,
{
onSuccess: (data) => {
handleBillingOnSuccess(data);
},
onError: handleBillingOnError,
},
);
const { pathname } = useLocation();
const handleAddCreditCard = (): void => {
logEvent('Add Credit card modal: Clicked', {
source: `intercom icon`,
page: pathname,
});
updateCreditCard({
licenseKey: activeLicense?.key || '',
successURL: window.location.href,
cancelURL: window.location.href,
});
};
return (
<>
<div className="chat-support-gateway">
<Button
className="chat-support-gateway-btn"
onClick={(): void => {
logEvent('Disabled Chat Support: Clicked', {
source: `intercom icon`,
page: pathname,
});
setIsAddCreditCardModalOpen(true);
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 28 32"
className="chat-support-gateway-btn-icon"
>
<path d="M28 32s-4.714-1.855-8.527-3.34H3.437C1.54 28.66 0 27.026 0 25.013V3.644C0 1.633 1.54 0 3.437 0h21.125c1.898 0 3.437 1.632 3.437 3.645v18.404H28V32zm-4.139-11.982a.88.88 0 00-1.292-.105c-.03.026-3.015 2.681-8.57 2.681-5.486 0-8.517-2.636-8.571-2.684a.88.88 0 00-1.29.107 1.01 1.01 0 00-.219.708.992.992 0 00.318.664c.142.128 3.537 3.15 9.762 3.15 6.226 0 9.621-3.022 9.763-3.15a.992.992 0 00.317-.664 1.01 1.01 0 00-.218-.707z" />
</svg>
</Button>
</div>
{/* Add Credit Card Modal */}
<Modal
className="add-credit-card-modal"
title={<span className="title">Add Credit Card for Chat Support</span>}
open={isAddCreditCardModalOpen}
closable
onCancel={(): void => setIsAddCreditCardModalOpen(false)}
destroyOnClose
footer={[
<Button
key="cancel"
onClick={(): void => setIsAddCreditCardModalOpen(false)}
className="cancel-btn"
icon={<X size={16} />}
>
Cancel
</Button>,
<Button
key="submit"
type="primary"
icon={<CreditCard size={16} />}
size="middle"
loading={isLoadingBilling}
disabled={isLoadingBilling}
onClick={handleAddCreditCard}
className="add-credit-card-btn"
>
Add Credit Card
</Button>,
]}
>
<Typography.Text className="add-credit-card-text">
You&apos;re currently on <span className="highlight-text">Trial plan</span>
. Add a credit card to access SigNoz chat support to your workspace.
</Typography.Text>
</Modal>
</>
);
}

View File

@@ -12,20 +12,6 @@ beforeAll(() => {
matchMedia(); matchMedia();
}); });
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
jest.mock('react-dnd', () => ({ jest.mock('react-dnd', () => ({
useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]), useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]), useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),

View File

@@ -139,7 +139,6 @@ export const getGraphOptions = (
}, },
scales: { scales: {
x: { x: {
stacked: isStacked,
grid: { grid: {
display: true, display: true,
color: getGridColor(), color: getGridColor(),
@@ -166,7 +165,6 @@ export const getGraphOptions = (
ticks: { color: getAxisLabelColor(currentTheme) }, ticks: { color: getAxisLabelColor(currentTheme) },
}, },
y: { y: {
stacked: isStacked,
display: true, display: true,
grid: { grid: {
display: true, display: true,
@@ -180,6 +178,9 @@ export const getGraphOptions = (
}, },
}, },
}, },
stacked: {
display: isStacked === undefined ? false : 'auto',
},
}, },
elements: { elements: {
line: { line: {

View File

@@ -1,191 +0,0 @@
import './LaunchChatSupport.styles.scss';
import { Button, Modal, Tooltip, Typography } from 'antd';
import updateCreditCardApi from 'api/billing/checkout';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { FeatureKeys } from 'constants/features';
import useFeatureFlags from 'hooks/useFeatureFlag';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import { defaultTo } from 'lodash-es';
import { CreditCard, HelpCircle, X } from 'lucide-react';
import { useEffect, useState } from 'react';
import { useMutation } from 'react-query';
import { useLocation } from 'react-router-dom';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout';
import { License } from 'types/api/licenses/def';
import { isCloudUser } from 'utils/app';
export interface LaunchChatSupportProps {
eventName: string;
attributes: Record<string, unknown>;
message?: string;
buttonText?: string;
className?: string;
onHoverText?: string;
intercomMessageDisabled?: boolean;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
function LaunchChatSupport({
attributes,
eventName,
message = '',
buttonText = '',
className = '',
onHoverText = '',
intercomMessageDisabled = false,
}: LaunchChatSupportProps): JSX.Element | null {
const isChatSupportEnabled = useFeatureFlags(FeatureKeys.CHAT_SUPPORT)?.active;
const isCloudUserVal = isCloudUser();
const { notifications } = useNotifications();
const { data: licenseData, isFetching } = useLicense();
const [activeLicense, setActiveLicense] = useState<License | null>(null);
const [isAddCreditCardModalOpen, setIsAddCreditCardModalOpen] = useState(
false,
);
const { pathname } = useLocation();
const isPremiumChatSupportEnabled =
useFeatureFlags(FeatureKeys.PREMIUM_SUPPORT)?.active || false;
const showAddCreditCardModal =
!isPremiumChatSupportEnabled &&
!licenseData?.payload?.trialConvertedToSubscription;
useEffect(() => {
const activeValidLicense =
licenseData?.payload?.licenses?.find(
(license) => license.isCurrent === true,
) || null;
setActiveLicense(activeValidLicense);
}, [licenseData, isFetching]);
const handleFacingIssuesClick = (): void => {
if (showAddCreditCardModal) {
logEvent('Disabled Chat Support: Clicked', {
source: `facing issues button`,
page: pathname,
...attributes,
});
setIsAddCreditCardModalOpen(true);
} else {
logEvent(eventName, attributes);
if (window.Intercom && !intercomMessageDisabled) {
window.Intercom('showNewMessage', defaultTo(message, ''));
}
}
};
const handleBillingOnSuccess = (
data: ErrorResponse | SuccessResponse<CheckoutSuccessPayloadProps, unknown>,
): void => {
if (data?.payload?.redirectURL) {
const newTab = document.createElement('a');
newTab.href = data.payload.redirectURL;
newTab.target = '_blank';
newTab.rel = 'noopener noreferrer';
newTab.click();
}
};
const handleBillingOnError = (): void => {
notifications.error({
message: SOMETHING_WENT_WRONG,
});
};
const { mutate: updateCreditCard, isLoading: isLoadingBilling } = useMutation(
updateCreditCardApi,
{
onSuccess: (data) => {
handleBillingOnSuccess(data);
},
onError: handleBillingOnError,
},
);
const handleAddCreditCard = (): void => {
logEvent('Add Credit card modal: Clicked', {
source: `facing issues button`,
page: pathname,
...attributes,
});
updateCreditCard({
licenseKey: activeLicense?.key || '',
successURL: window.location.href,
cancelURL: window.location.href,
});
};
return isCloudUserVal && isChatSupportEnabled ? ( // Note: we would need to move this condition to license based in future
<div className="facing-issue-button">
<Tooltip
title={onHoverText}
autoAdjustOverflow
style={{ padding: 8 }}
overlayClassName="tooltip-overlay"
>
<Button
className={cx('periscope-btn', 'facing-issue-button', className)}
onClick={handleFacingIssuesClick}
icon={<HelpCircle size={14} />}
>
{buttonText || 'Facing issues?'}
</Button>
</Tooltip>
{/* Add Credit Card Modal */}
<Modal
className="add-credit-card-modal"
title={<span className="title">Add Credit Card for Chat Support</span>}
open={isAddCreditCardModalOpen}
closable
onCancel={(): void => setIsAddCreditCardModalOpen(false)}
destroyOnClose
footer={[
<Button
key="cancel"
onClick={(): void => setIsAddCreditCardModalOpen(false)}
className="cancel-btn"
icon={<X size={16} />}
>
Cancel
</Button>,
<Button
key="submit"
type="primary"
icon={<CreditCard size={16} />}
size="middle"
loading={isLoadingBilling}
disabled={isLoadingBilling}
onClick={handleAddCreditCard}
className="add-credit-card-btn"
>
Add Credit Card
</Button>,
]}
>
<Typography.Text className="add-credit-card-text">
You&apos;re currently on <span className="highlight-text">Trial plan</span>
. Add a credit card to access SigNoz chat support to your workspace.
</Typography.Text>
</Modal>
</div>
) : null;
}
LaunchChatSupport.defaultProps = {
message: '',
buttonText: '',
className: '',
onHoverText: '',
intercomMessageDisabled: false,
};
export default LaunchChatSupport;

View File

@@ -1,22 +1,14 @@
import { DrawerProps } from 'antd'; import { DrawerProps } from 'antd';
import { AddToQueryHOCProps } from 'components/Logs/AddToQueryHOC'; import { AddToQueryHOCProps } from 'components/Logs/AddToQueryHOC';
import { ActionItemProps } from 'container/LogDetailedView/ActionItem'; import { ActionItemProps } from 'container/LogDetailedView/ActionItem';
import { IField } from 'types/api/logs/fields';
import { ILog } from 'types/api/logs/log'; import { ILog } from 'types/api/logs/log';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { VIEWS } from './constants'; import { VIEWS } from './constants';
export type LogDetailProps = { export type LogDetailProps = {
log: ILog | null; log: ILog | null;
selectedTab: VIEWS; selectedTab: VIEWS;
onGroupByAttribute?: (
fieldKey: string,
isJSON?: boolean,
dataType?: DataTypes,
) => Promise<void>;
isListViewPanel?: boolean; isListViewPanel?: boolean;
listViewPanelSelectedFields?: IField[] | null;
} & Pick<AddToQueryHOCProps, 'onAddToQuery'> & } & Pick<AddToQueryHOCProps, 'onAddToQuery'> &
Partial<Pick<ActionItemProps, 'onClickActionItem'>> & Partial<Pick<ActionItemProps, 'onClickActionItem'>> &
Pick<DrawerProps, 'onClose'>; Pick<DrawerProps, 'onClose'>;

View File

@@ -2,14 +2,6 @@ export const VIEW_TYPES = {
OVERVIEW: 'OVERVIEW', OVERVIEW: 'OVERVIEW',
JSON: 'JSON', JSON: 'JSON',
CONTEXT: 'CONTEXT', CONTEXT: 'CONTEXT',
INFRAMETRICS: 'INFRAMETRICS',
} as const; } as const;
export type VIEWS = typeof VIEW_TYPES[keyof typeof VIEW_TYPES]; export type VIEWS = typeof VIEW_TYPES[keyof typeof VIEW_TYPES];
export const RESOURCE_KEYS = {
CLUSTER_NAME: 'k8s.cluster.name',
POD_NAME: 'k8s.pod.name',
NODE_NAME: 'k8s.node.name',
HOST_NAME: 'host.name',
} as const;

View File

@@ -2,28 +2,17 @@
import './LogDetails.styles.scss'; import './LogDetails.styles.scss';
import { Color, Spacing } from '@signozhq/design-tokens'; import { Color, Spacing } from '@signozhq/design-tokens';
import Convert from 'ansi-to-html';
import { Button, Divider, Drawer, Radio, Tooltip, Typography } from 'antd'; import { Button, Divider, Drawer, Radio, Tooltip, Typography } from 'antd';
import { RadioChangeEvent } from 'antd/lib'; import { RadioChangeEvent } from 'antd/lib';
import cx from 'classnames'; import cx from 'classnames';
import { LogType } from 'components/Logs/LogStateIndicator/LogStateIndicator'; import { LogType } from 'components/Logs/LogStateIndicator/LogStateIndicator';
import { LOCALSTORAGE } from 'constants/localStorage';
import ContextView from 'container/LogDetailedView/ContextView/ContextView'; import ContextView from 'container/LogDetailedView/ContextView/ContextView';
import InfraMetrics from 'container/LogDetailedView/InfraMetrics/InfraMetrics';
import JSONView from 'container/LogDetailedView/JsonView'; import JSONView from 'container/LogDetailedView/JsonView';
import Overview from 'container/LogDetailedView/Overview'; import Overview from 'container/LogDetailedView/Overview';
import { import { aggregateAttributesResourcesToString } from 'container/LogDetailedView/utils';
aggregateAttributesResourcesToString,
removeEscapeCharacters,
unescapeString,
} from 'container/LogDetailedView/utils';
import { useOptionsMenu } from 'container/OptionsMenu';
import dompurify from 'dompurify';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useIsDarkMode } from 'hooks/useDarkMode'; import { useIsDarkMode } from 'hooks/useDarkMode';
import { useNotifications } from 'hooks/useNotifications'; import { useNotifications } from 'hooks/useNotifications';
import { import {
BarChart2,
Braces, Braces,
Copy, Copy,
Filter, Filter,
@@ -32,27 +21,21 @@ import {
TextSelect, TextSelect,
X, X,
} from 'lucide-react'; } from 'lucide-react';
import { useMemo, useState } from 'react'; import { useState } from 'react';
import { useCopyToClipboard } from 'react-use'; import { useCopyToClipboard } from 'react-use';
import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData'; import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource, StringOperators } from 'types/common/queryBuilder';
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
import { RESOURCE_KEYS, VIEW_TYPES, VIEWS } from './constants'; import { VIEW_TYPES, VIEWS } from './constants';
import { LogDetailProps } from './LogDetail.interfaces'; import { LogDetailProps } from './LogDetail.interfaces';
import QueryBuilderSearchWrapper from './QueryBuilderSearchWrapper'; import QueryBuilderSearchWrapper from './QueryBuilderSearchWrapper';
const convert = new Convert();
function LogDetail({ function LogDetail({
log, log,
onClose, onClose,
onAddToQuery, onAddToQuery,
onGroupByAttribute,
onClickActionItem, onClickActionItem,
selectedTab, selectedTab,
isListViewPanel = false, isListViewPanel = false,
listViewPanelSelectedFields,
}: LogDetailProps): JSX.Element { }: LogDetailProps): JSX.Element {
const [, copyToClipboard] = useCopyToClipboard(); const [, copyToClipboard] = useCopyToClipboard();
const [selectedView, setSelectedView] = useState<VIEWS>(selectedTab); const [selectedView, setSelectedView] = useState<VIEWS>(selectedTab);
@@ -62,19 +45,6 @@ function LogDetail({
const [contextQuery, setContextQuery] = useState<Query | undefined>(); const [contextQuery, setContextQuery] = useState<Query | undefined>();
const [filters, setFilters] = useState<TagFilter | null>(null); const [filters, setFilters] = useState<TagFilter | null>(null);
const [isEdit, setIsEdit] = useState<boolean>(false); const [isEdit, setIsEdit] = useState<boolean>(false);
const { initialDataSource, stagedQuery } = useQueryBuilder();
const listQuery = useMemo(() => {
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
}, [stagedQuery]);
const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: initialDataSource || DataSource.LOGS,
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
});
const isDarkMode = useIsDarkMode(); const isDarkMode = useIsDarkMode();
@@ -101,17 +71,6 @@ function LogDetail({
} }
}; };
const htmlBody = useMemo(
() => ({
__html: convert.toHtml(
dompurify.sanitize(unescapeString(log?.body || ''), {
FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS],
}),
),
}),
[log?.body],
);
const handleJSONCopy = (): void => { const handleJSONCopy = (): void => {
copyToClipboard(LogJsonData); copyToClipboard(LogJsonData);
notifications.success({ notifications.success({
@@ -149,8 +108,8 @@ function LogDetail({
> >
<div className="log-detail-drawer__log"> <div className="log-detail-drawer__log">
<Divider type="vertical" className={cx('log-type-indicator', logType)} /> <Divider type="vertical" className={cx('log-type-indicator', logType)} />
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left"> <Tooltip title={log?.body} placement="left">
<div className="log-body" dangerouslySetInnerHTML={htmlBody} /> <Typography.Text className="log-body">{log?.body}</Typography.Text>
</Tooltip> </Tooltip>
<div className="log-overflow-shadow">&nbsp;</div> <div className="log-overflow-shadow">&nbsp;</div>
@@ -194,17 +153,6 @@ function LogDetail({
Context Context
</div> </div>
</Radio.Button> </Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.INFRAMETRICS}
>
<div className="view-title">
<BarChart2 size={14} />
Metrics
</div>
</Radio.Button>
</Radio.Group> </Radio.Group>
{selectedView === VIEW_TYPES.JSON && ( {selectedView === VIEW_TYPES.JSON && (
@@ -243,10 +191,7 @@ function LogDetail({
logData={log} logData={log}
onAddToQuery={onAddToQuery} onAddToQuery={onAddToQuery}
onClickActionItem={onClickActionItem} onClickActionItem={onClickActionItem}
onGroupByAttribute={onGroupByAttribute}
isListViewPanel={isListViewPanel} isListViewPanel={isListViewPanel}
selectedOptions={options}
listViewPanelSelectedFields={listViewPanelSelectedFields}
/> />
)} )}
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />} {selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}
@@ -259,15 +204,6 @@ function LogDetail({
isEdit={isEdit} isEdit={isEdit}
/> />
)} )}
{selectedView === VIEW_TYPES.INFRAMETRICS && (
<InfraMetrics
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
logLineTimestamp={log.timestamp.toString()}
/>
)}
</Drawer> </Drawer>
); );
} }

View File

@@ -1,16 +1,3 @@
.addToQueryContainer { .addToQueryContainer {
cursor: pointer; cursor: pointer;
display: flex;
align-items: center;
&.small {
line-height: 16px;
}
&.medium {
line-height: 20px;
}
&.large {
line-height: 24px;
}
} }

View File

@@ -1,21 +1,18 @@
import './AddToQueryHOC.styles.scss'; import './AddToQueryHOC.styles.scss';
import { Popover } from 'antd'; import { Popover } from 'antd';
import cx from 'classnames';
import { OPERATORS } from 'constants/queryBuilder'; import { OPERATORS } from 'constants/queryBuilder';
import { FontSize } from 'container/OptionsMenu/types';
import { memo, MouseEvent, ReactNode, useMemo } from 'react'; import { memo, MouseEvent, ReactNode, useMemo } from 'react';
function AddToQueryHOC({ function AddToQueryHOC({
fieldKey, fieldKey,
fieldValue, fieldValue,
onAddToQuery, onAddToQuery,
fontSize,
children, children,
}: AddToQueryHOCProps): JSX.Element { }: AddToQueryHOCProps): JSX.Element {
const handleQueryAdd = (event: MouseEvent<HTMLDivElement>): void => { const handleQueryAdd = (event: MouseEvent<HTMLDivElement>): void => {
event.stopPropagation(); event.stopPropagation();
onAddToQuery(fieldKey, fieldValue, OPERATORS['=']); onAddToQuery(fieldKey, fieldValue, OPERATORS.IN);
}; };
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [ const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
@@ -24,7 +21,7 @@ function AddToQueryHOC({
return ( return (
// eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-static-element-interactions // eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-static-element-interactions
<div className={cx('addToQueryContainer', fontSize)} onClick={handleQueryAdd}> <div className="addToQueryContainer" onClick={handleQueryAdd}>
<Popover placement="top" content={popOverContent}> <Popover placement="top" content={popOverContent}>
{children} {children}
</Popover> </Popover>
@@ -36,7 +33,6 @@ export interface AddToQueryHOCProps {
fieldKey: string; fieldKey: string;
fieldValue: string; fieldValue: string;
onAddToQuery: (fieldKey: string, fieldValue: string, operator: string) => void; onAddToQuery: (fieldKey: string, fieldValue: string, operator: string) => void;
fontSize: FontSize;
children: ReactNode; children: ReactNode;
} }

View File

@@ -4,7 +4,6 @@ import { ReactNode, useCallback, useEffect } from 'react';
import { useCopyToClipboard } from 'react-use'; import { useCopyToClipboard } from 'react-use';
function CopyClipboardHOC({ function CopyClipboardHOC({
entityKey,
textToCopy, textToCopy,
children, children,
}: CopyClipboardHOCProps): JSX.Element { }: CopyClipboardHOCProps): JSX.Element {
@@ -12,15 +11,11 @@ function CopyClipboardHOC({
const { notifications } = useNotifications(); const { notifications } = useNotifications();
useEffect(() => { useEffect(() => {
if (value.value) { if (value.value) {
const key = entityKey || '';
const notificationMessage = `${key} copied to clipboard`;
notifications.success({ notifications.success({
message: notificationMessage, message: 'Copied to clipboard',
}); });
} }
}, [value, notifications, entityKey]); }, [value, notifications]);
const onClick = useCallback((): void => { const onClick = useCallback((): void => {
setCopy(textToCopy); setCopy(textToCopy);
@@ -39,7 +34,6 @@ function CopyClipboardHOC({
} }
interface CopyClipboardHOCProps { interface CopyClipboardHOCProps {
entityKey: string | undefined;
textToCopy: string; textToCopy: string;
children: ReactNode; children: ReactNode;
} }

View File

@@ -6,21 +6,6 @@
font-weight: 400; font-weight: 400;
line-height: 18px; /* 128.571% */ line-height: 18px; /* 128.571% */
letter-spacing: -0.07px; letter-spacing: -0.07px;
&.small {
font-size: 11px;
line-height: 16px;
}
&.medium {
font-size: 13px;
line-height: 20px;
}
&.large {
font-size: 14px;
line-height: 24px;
}
} }
.log-value { .log-value {
color: var(--text-vanilla-400, #c0c1c3); color: var(--text-vanilla-400, #c0c1c3);
@@ -29,21 +14,6 @@
font-weight: 400; font-weight: 400;
line-height: 18px; /* 128.571% */ line-height: 18px; /* 128.571% */
letter-spacing: -0.07px; letter-spacing: -0.07px;
&.small {
font-size: 11px;
line-height: 16px;
}
&.medium {
font-size: 13px;
line-height: 20px;
}
&.large {
font-size: 14px;
line-height: 24px;
}
} }
.log-line { .log-line {
display: flex; display: flex;
@@ -70,20 +40,6 @@
font-weight: 400; font-weight: 400;
line-height: 18px; /* 128.571% */ line-height: 18px; /* 128.571% */
letter-spacing: -0.07px; letter-spacing: -0.07px;
&.small {
font-size: 11px;
line-height: 16px;
}
&.medium {
font-size: 13px;
line-height: 20px;
}
&.large {
font-size: 14px;
line-height: 24px;
}
} }
.selected-log-value { .selected-log-value {
@@ -96,37 +52,12 @@
line-height: 18px; line-height: 18px;
letter-spacing: -0.07px; letter-spacing: -0.07px;
font-size: 14px; font-size: 14px;
&.small {
font-size: 11px;
line-height: 16px;
}
&.medium {
font-size: 13px;
line-height: 20px;
}
&.large {
font-size: 14px;
line-height: 24px;
}
} }
.selected-log-kv { .selected-log-kv {
min-height: 24px; min-height: 24px;
display: flex; display: flex;
align-items: center; align-items: center;
&.small {
min-height: 16px;
}
&.medium {
min-height: 20px;
}
&.large {
min-height: 24px;
}
} }
} }

View File

@@ -3,11 +3,8 @@ import './ListLogView.styles.scss';
import { blue } from '@ant-design/colors'; import { blue } from '@ant-design/colors';
import Convert from 'ansi-to-html'; import Convert from 'ansi-to-html';
import { Typography } from 'antd'; import { Typography } from 'antd';
import cx from 'classnames';
import LogDetail from 'components/LogDetail'; import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants'; import { VIEW_TYPES } from 'components/LogDetail/constants';
import { unescapeString } from 'container/LogDetailedView/utils';
import { FontSize } from 'container/OptionsMenu/types';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import dompurify from 'dompurify'; import dompurify from 'dompurify';
import { useActiveLog } from 'hooks/logs/useActiveLog'; import { useActiveLog } from 'hooks/logs/useActiveLog';
@@ -42,7 +39,6 @@ interface LogFieldProps {
fieldKey: string; fieldKey: string;
fieldValue: string; fieldValue: string;
linesPerRow?: number; linesPerRow?: number;
fontSize: FontSize;
} }
type LogSelectedFieldProps = Omit<LogFieldProps, 'linesPerRow'> & type LogSelectedFieldProps = Omit<LogFieldProps, 'linesPerRow'> &
@@ -52,12 +48,11 @@ function LogGeneralField({
fieldKey, fieldKey,
fieldValue, fieldValue,
linesPerRow = 1, linesPerRow = 1,
fontSize,
}: LogFieldProps): JSX.Element { }: LogFieldProps): JSX.Element {
const html = useMemo( const html = useMemo(
() => ({ () => ({
__html: convert.toHtml( __html: convert.toHtml(
dompurify.sanitize(unescapeString(fieldValue), { dompurify.sanitize(fieldValue, {
FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS], FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS],
}), }),
), ),
@@ -67,12 +62,12 @@ function LogGeneralField({
return ( return (
<TextContainer> <TextContainer>
<Text ellipsis type="secondary" className={cx('log-field-key', fontSize)}> <Text ellipsis type="secondary" className="log-field-key">
{`${fieldKey} : `} {`${fieldKey} : `}
</Text> </Text>
<LogText <LogText
dangerouslySetInnerHTML={html} dangerouslySetInnerHTML={html}
className={cx('log-value', fontSize)} className="log-value"
linesPerRow={linesPerRow > 1 ? linesPerRow : undefined} linesPerRow={linesPerRow > 1 ? linesPerRow : undefined}
/> />
</TextContainer> </TextContainer>
@@ -83,7 +78,6 @@ function LogSelectedField({
fieldKey = '', fieldKey = '',
fieldValue = '', fieldValue = '',
onAddToQuery, onAddToQuery,
fontSize,
}: LogSelectedFieldProps): JSX.Element { }: LogSelectedFieldProps): JSX.Element {
return ( return (
<div className="log-selected-fields"> <div className="log-selected-fields">
@@ -91,22 +85,16 @@ function LogSelectedField({
fieldKey={fieldKey} fieldKey={fieldKey}
fieldValue={fieldValue} fieldValue={fieldValue}
onAddToQuery={onAddToQuery} onAddToQuery={onAddToQuery}
fontSize={fontSize}
> >
<Typography.Text> <Typography.Text>
<span <span style={{ color: blue[4] }} className="selected-log-field-key">
style={{ color: blue[4] }}
className={cx('selected-log-field-key', fontSize)}
>
{fieldKey} {fieldKey}
</span> </span>
</Typography.Text> </Typography.Text>
</AddToQueryHOC> </AddToQueryHOC>
<Typography.Text ellipsis className={cx('selected-log-kv', fontSize)}> <Typography.Text ellipsis className="selected-log-kv">
<span className={cx('selected-log-field-key', fontSize)}>{': '}</span> <span className="selected-log-field-key">{': '}</span>
<span className={cx('selected-log-value', fontSize)}> <span className="selected-log-value">{fieldValue || "''"}</span>
{fieldValue || "''"}
</span>
</Typography.Text> </Typography.Text>
</div> </div>
); );
@@ -119,7 +107,6 @@ type ListLogViewProps = {
onAddToQuery: AddToQueryHOCProps['onAddToQuery']; onAddToQuery: AddToQueryHOCProps['onAddToQuery'];
activeLog?: ILog | null; activeLog?: ILog | null;
linesPerRow: number; linesPerRow: number;
fontSize: FontSize;
}; };
function ListLogView({ function ListLogView({
@@ -129,7 +116,6 @@ function ListLogView({
onAddToQuery, onAddToQuery,
activeLog, activeLog,
linesPerRow, linesPerRow,
fontSize,
}: ListLogViewProps): JSX.Element { }: ListLogViewProps): JSX.Element {
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]); const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
@@ -142,7 +128,6 @@ function ListLogView({
onAddToQuery: handleAddToQuery, onAddToQuery: handleAddToQuery,
onSetActiveLog: handleSetActiveContextLog, onSetActiveLog: handleSetActiveContextLog,
onClearActiveLog: handleClearActiveContextLog, onClearActiveLog: handleClearActiveContextLog,
onGroupByAttribute,
} = useActiveLog(); } = useActiveLog();
const isDarkMode = useIsDarkMode(); const isDarkMode = useIsDarkMode();
@@ -200,7 +185,6 @@ function ListLogView({
onMouseEnter={handleMouseEnter} onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave} onMouseLeave={handleMouseLeave}
onClick={handleDetailedView} onClick={handleDetailedView}
fontSize={fontSize}
> >
<div className="log-line"> <div className="log-line">
<LogStateIndicator <LogStateIndicator
@@ -208,28 +192,18 @@ function ListLogView({
isActive={ isActive={
activeLog?.id === logData.id || activeContextLog?.id === logData.id activeLog?.id === logData.id || activeContextLog?.id === logData.id
} }
fontSize={fontSize}
/> />
<div> <div>
<LogContainer fontSize={fontSize}> <LogContainer>
<LogGeneralField <LogGeneralField
fieldKey="Log" fieldKey="Log"
fieldValue={flattenLogData.body} fieldValue={flattenLogData.body}
linesPerRow={linesPerRow} linesPerRow={linesPerRow}
fontSize={fontSize}
/> />
{flattenLogData.stream && ( {flattenLogData.stream && (
<LogGeneralField <LogGeneralField fieldKey="Stream" fieldValue={flattenLogData.stream} />
fieldKey="Stream"
fieldValue={flattenLogData.stream}
fontSize={fontSize}
/>
)} )}
<LogGeneralField <LogGeneralField fieldKey="Timestamp" fieldValue={timestampValue} />
fieldKey="Timestamp"
fieldValue={timestampValue}
fontSize={fontSize}
/>
{updatedSelecedFields.map((field) => {updatedSelecedFields.map((field) =>
isValidLogField(flattenLogData[field.name] as never) ? ( isValidLogField(flattenLogData[field.name] as never) ? (
@@ -238,7 +212,6 @@ function ListLogView({
fieldKey={field.name} fieldKey={field.name}
fieldValue={flattenLogData[field.name] as never} fieldValue={flattenLogData[field.name] as never}
onAddToQuery={onAddToQuery} onAddToQuery={onAddToQuery}
fontSize={fontSize}
/> />
) : null, ) : null,
)} )}
@@ -259,7 +232,6 @@ function ListLogView({
onAddToQuery={handleAddToQuery} onAddToQuery={handleAddToQuery}
selectedTab={VIEW_TYPES.CONTEXT} selectedTab={VIEW_TYPES.CONTEXT}
onClose={handlerClearActiveContextLog} onClose={handlerClearActiveContextLog}
onGroupByAttribute={onGroupByAttribute}
/> />
)} )}
</> </>

View File

@@ -1,46 +1,21 @@
/* eslint-disable no-nested-ternary */
import { Color } from '@signozhq/design-tokens'; import { Color } from '@signozhq/design-tokens';
import { Card, Typography } from 'antd'; import { Card, Typography } from 'antd';
import { FontSize } from 'container/OptionsMenu/types';
import styled from 'styled-components'; import styled from 'styled-components';
interface LogTextProps { interface LogTextProps {
linesPerRow?: number; linesPerRow?: number;
} }
interface LogContainerProps {
fontSize: FontSize;
}
export const Container = styled(Card)<{ export const Container = styled(Card)<{
$isActiveLog: boolean; $isActiveLog: boolean;
$isDarkMode: boolean; $isDarkMode: boolean;
fontSize: FontSize;
}>` }>`
width: 100% !important; width: 100% !important;
margin-bottom: 0.3rem; margin-bottom: 0.3rem;
${({ fontSize }): string =>
fontSize === FontSize.SMALL
? `margin-bottom:0.1rem;`
: fontSize === FontSize.MEDIUM
? `margin-bottom: 0.2rem;`
: fontSize === FontSize.LARGE
? `margin-bottom:0.3rem;`
: ``}
cursor: pointer; cursor: pointer;
.ant-card-body { .ant-card-body {
padding: 0.3rem 0.6rem; padding: 0.3rem 0.6rem;
${({ fontSize }): string =>
fontSize === FontSize.SMALL
? `padding:0.1rem 0.6rem;`
: fontSize === FontSize.MEDIUM
? `padding: 0.2rem 0.6rem;`
: fontSize === FontSize.LARGE
? `padding:0.3rem 0.6rem;`
: ``}
${({ $isActiveLog, $isDarkMode }): string => ${({ $isActiveLog, $isDarkMode }): string =>
$isActiveLog $isActiveLog
? `background-color: ${ ? `background-color: ${
@@ -63,17 +38,11 @@ export const TextContainer = styled.div`
width: 100%; width: 100%;
`; `;
export const LogContainer = styled.div<LogContainerProps>` export const LogContainer = styled.div`
margin-left: 0.5rem; margin-left: 0.5rem;
display: flex; display: flex;
flex-direction: column; flex-direction: column;
gap: 6px; gap: 6px;
${({ fontSize }): string =>
fontSize === FontSize.SMALL
? `gap: 2px;`
: fontSize === FontSize.MEDIUM
? ` gap:4px;`
: `gap:6px;`}
`; `;
export const LogText = styled.div<LogTextProps>` export const LogText = styled.div<LogTextProps>`

View File

@@ -9,34 +9,26 @@
border-radius: 50px; border-radius: 50px;
background-color: transparent; background-color: transparent;
&.small {
min-height: 16px;
}
&.medium {
min-height: 20px;
}
&.large {
min-height: 24px;
}
&.INFO { &.INFO {
background-color: var(--bg-robin-500); background-color: var(--bg-slate-400);
} }
&.WARNING,
&.WARN { &.WARNING, &.WARN {
background-color: var(--bg-amber-500); background-color: var(--bg-amber-500);
} }
&.ERROR { &.ERROR {
background-color: var(--bg-cherry-500); background-color: var(--bg-cherry-500);
} }
&.TRACE { &.TRACE {
background-color: var(--bg-forest-400); background-color: var(--bg-robin-300);
} }
&.DEBUG { &.DEBUG {
background-color: var(--bg-aqua-500); background-color: var(--bg-forest-500);
} }
&.FATAL { &.FATAL {
background-color: var(--bg-sakura-500); background-color: var(--bg-sakura-500);
} }

Some files were not shown because too many files have changed in this diff Show More