Compare commits

...

83 Commits

Author SHA1 Message Date
Ankit Nayan
8fde9008b2 chore: different ticker interval for active user
(cherry picked from commit 215ea8d819)
2023-01-08 21:11:31 +00:00
Prashant Shahi
b35bdf01cc chore: 📌 pin versions: SigNoz 0.13.1 2023-01-07 18:22:02 +05:30
Ankit Nayan
9b654143bb chore: update latest loggedin user 2023-01-07 02:54:27 +05:30
Ankit Nayan
4841f150f4 fix: minor changes 2023-01-07 02:31:54 +05:30
Ankit Nayan
16a49a8b04 fix: minor changes 2023-01-07 02:21:44 +05:30
Ankit Nayan
1fd819b806 fix: added ratelimit to specific event 2023-01-07 00:16:57 +05:30
Ankit Nayan
cab9e04cdd fix: concurrent writes to map 2023-01-06 16:10:13 +05:30
Ankit Nayan
e8f341b850 Revert "feat: antdv5 is updated (#1880)" (#1991)
This reverts commit 7b86022280.
2023-01-06 13:40:31 +05:30
Ankit Nayan
1f6fcb9b8c Revert "feat: react is updated to v18 (#1948)" (#1990)
This reverts commit 1c7202b5bf.
2023-01-06 13:32:27 +05:30
Palash Gupta
1c7202b5bf feat: react is updated to v18 (#1948)
* feat: v5 is in progress

* feat: antdv5 is updated

* fix: build is fixed

* fix: default config is over written by custom one

* chore: onchange handler is updated

* chore: overflow is hidden in the layout

* feat: react is updated from v17 to v18

* feat: antdv5 is updated (#1880)

* feat: v5 is in progress

* feat: antdv5 is updated

* fix: build is fixed

* fix: default config is over written by custom one

* chore: onchange handler is updated

* chore: overflow is hidden in the layout

* Update index.tsx

* fix: import is fixed

* chore: un used import is fixed

* fix: dark mode is updated in service map

* fix: config dropdown is updated

* fix: logs types is updated

* fix: copy clipboard notification is updated

Co-authored-by: Pranay Prateek <pranay@signoz.io>

* chore: all channel is updated move from usefetch to usequery

* fix: typescript is fixed

Co-authored-by: Pranay Prateek <pranay@signoz.io>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2023-01-04 22:58:05 +05:30
Ankit Nayan
24ac062bf5 Fix/analytics (#1987)
* fix: added server code to ee
2023-01-04 22:48:38 +05:30
Axay Sagathiya
b776bf5b09 Add Docs to install SQLite3 (#1924)
* add commands to install sqlite3 in Makefile.

* Add code to check if it's running on Linux System.

* Revert "Add code to check if its running on Linux"

This reverts commit 552cfb08c9.

* Revert "add commands to install sqlite3 in Makefi"

This reverts commit 781c23d12d.

* Add Docuentation to install SQLite3.

Co-authored-by: Pranay Prateek <pranay@signoz.io>
Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
Co-authored-by: Prashant Shahi <prashant@signoz.io>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2023-01-04 22:02:48 +05:30
Yash Joshi
144076e029 fix: disable button unless org name is different (#1984) 2023-01-04 18:20:02 +05:30
Vishal Sharma
835251b342 fix: use rpc method and responseStatusCode (#1971)
Co-authored-by: Palash Gupta <palashgdev@gmail.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2023-01-04 16:15:08 +05:30
Prashant Shahi
ebbad5812f ci: 👷 fix testing and staging deployments (#1980)
Signed-off-by: Prashant Shahi <prashant@signoz.io>

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2023-01-04 14:33:52 +05:30
Palash Gupta
7b86022280 feat: antdv5 is updated (#1880)
* feat: v5 is in progress

* feat: antdv5 is updated

* fix: build is fixed

* fix: default config is over written by custom one

* chore: onchange handler is updated

* chore: overflow is hidden in the layout

* Update index.tsx

* fix: import is fixed

* chore: un used import is fixed

* fix: dark mode is updated in service map

* fix: config dropdown is updated

* fix: logs types is updated

* fix: copy clipboard notification is updated

Co-authored-by: Pranay Prateek <pranay@signoz.io>
2023-01-04 12:48:12 +05:30
Prashant Shahi
da1fd4b0cd ci(deployments): workflows for staging and testing deployments and related changes (#1933)
* chore(Makefile): ️ remove no-cache from all docker build commands
* chore(Makefile): 🔧 update target name
* feat(docker-standalone):  introduce tag environment variables for easy custom deployments
* ci(deployments): 👷 workflows for staging and testing deployments
* ci(deployments): 👷 pass DEV_BUILD env to remote host
2023-01-03 22:28:48 +05:30
Prashant Shahi
57d28be9f5 fix: 🐛 resolve redundant metrics issue (#1946)
Signed-off-by: Prashant Shahi <prashant@signoz.io>

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2023-01-03 10:55:58 +05:30
Palash Gupta
126c9238ba feat: loading is added in the button (#1927)
* feat: loading is added in the button

* chore: disable condition is updated

Co-authored-by: Pranay Prateek <pranay@signoz.io>
2023-01-02 12:08:35 +05:30
Pranay Prateek
31a3bc09c8 Removing Beta tag from Logs (#1952) 2022-12-31 11:11:48 +05:30
Vishal Sharma
6ba5c0ecad fix: apply filters on count of exceptions (#1945) 2022-12-30 16:46:13 +05:30
Palash Gupta
27cd514fa5 fix: Logs double api is called (#1947) 2022-12-30 13:59:02 +05:30
Yash Joshi
f0e13784e5 fix(sidebar): highlight active feature in nested route (#1929)
Co-authored-by: Pranay Prateek <pranay@signoz.io>
Co-authored-by: Palash Gupta <palashgdev@gmail.com>
2022-12-30 01:10:02 +05:30
Yash Joshi
742ceac32c fix(logs): prevent duplicate logs dispatch (#1934)
* fix(logs): prevent duplicate logs dispatch

* refactor: use useMountedstate hook

Co-authored-by: Palash Gupta <palashgdev@gmail.com>
Co-authored-by: Pranay Prateek <pranay@signoz.io>
2022-12-30 00:51:53 +05:30
Ankit Nayan
545d46c39c Merge pull request #1943 from SigNoz/release/v0.13.0
Release/v0.13.0
2022-12-29 17:32:15 +05:30
Prashant Shahi
d134e4f4d9 chore: 📌 pin versions: SigNoz 0.13.0, SigNoz OtelCollector 0.66.1
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2022-12-29 14:27:24 +05:30
Ankit Nayan
e03b0aa45f chore/analytics (#1939)
* fix: not capturing empty filters

* feat: removing signoz_ metrics using grep

* fix: initialise companyDomain

* feat: added ttl status
2022-12-29 01:14:57 +05:30
Vishal Sharma
46e131698e fix: exception filter clear (#1936) 2022-12-28 17:48:39 +05:30
Ankit Nayan
d1ee15c372 fix: nil pointer 2022-12-28 15:30:24 +05:30
Ankit Nayan
1e035be978 Merge branch 'develop' into chore/analytics 2022-12-28 15:26:59 +05:30
Vishal Sharma
88a97fc4b8 add exception page filters support (#1919)
* feat: backend changes for supporting exception filters

* feat: frontend changes for exception page filter support

* chore: extractSingleFilterValue is updated

* fix: handle frontend edge case

Co-authored-by: Ankit Nayan <ankit@signoz.io>
Co-authored-by: Palash Gupta <palashgdev@gmail.com>
2022-12-28 14:54:15 +05:30
Nityananda Gohain
2e58f6db7a fix: error handling for index removal from selected field (#1935)
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-28 14:31:57 +05:30
Amol Umbark
1916fc87b0 fix: added clear filters button (#1920)
* fix: added clear filters button

* fix: removed console log


Co-authored-by: mindhash <mindhash@mindhashs-MacBook-Pro.local>
Co-authored-by: Pranay Prateek <pranay@signoz.io>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-28 14:30:37 +05:30
Ankit Nayan
d8882acdd7 fix: changed or to and 2022-12-28 02:34:07 +05:30
Ankit Nayan
7f42b39684 fix: changed or to and 2022-12-28 02:33:21 +05:30
Ankit Nayan
b11f79b4c7 Chore/analytics (#1922)
* fix: reduced rate limit to 2 of each events in 1 min

* feat: added new event for length of filters in logs search page

* feat: added distributed cluster info

* fix: length of filters in logs

* feat: dashboard metadata with no rateLimit

* feat: active user

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2022-12-28 02:16:46 +05:30
Ankit Nayan
c717e39a1a Merge branch 'chore/analytics' of https://github.com/SigNoz/signoz into chore/analytics 2022-12-28 02:10:36 +05:30
Ankit Nayan
c3253687d0 feat: active user 2022-12-28 02:09:44 +05:30
Yash Joshi
895c721b37 fix(version): use link instead of click handler (#1931)
Co-authored-by: Palash Gupta <palashgdev@gmail.com>
2022-12-27 23:13:13 +05:30
Vishal Sharma
35f5fb6957 fix: respect durationSort feature flag on getSpanFilters API (#1900)
* fix: respect durationSort feature flag on getSpanFilters API

* chore: update DB query
2022-12-27 21:09:36 +05:30
Palash Gupta
40ec4517c2 fix: per page is added in the dependancy (#1926) 2022-12-27 19:01:56 +05:30
Srikanth Chekuri
48a6f536fa chore: increase dimensions_cache_size for signozspanmetrics processor (#1925) 2022-12-27 15:44:39 +05:30
Palash Gupta
13a6d7f7c6 fix: live tail time out is updated (#1899)
* fix: live tail time out is updated
* Update livetail.ts

Co-authored-by: Pranay Prateek <pranay@signoz.io>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-27 13:36:37 +05:30
Srikanth Chekuri
8b6ed0f951 Merge branch 'develop' into chore/analytics 2022-12-27 12:21:51 +05:30
Srikanth Chekuri
eef48c54f8 fix(query_range): invalid memory address or nil pointer dereference (#1875)
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-27 11:28:15 +05:30
Ankit Nayan
aad962d07d feat: dashboard metadata with no rateLimit 2022-12-27 01:10:01 +05:30
Ankit Nayan
18bbb3cf36 fix: length of filters in logs 2022-12-26 23:10:55 +05:30
Ankit Nayan
a3455fb553 feat: added distributed cluster info 2022-12-26 23:01:54 +05:30
Ankit Nayan
ece2988d0d feat: added new event for length of filters in logs search page 2022-12-26 22:11:23 +05:30
Ankit Nayan
db704b212d fix: reduced rate limit to 2 of each events in 1 min 2022-12-26 21:52:54 +05:30
Amol Umbark
4b13b0a8a4 fix: resolves issue related ops not flowing from search box to panel (#1918) 2022-12-26 20:31:50 +05:30
Palash Gupta
6f6499c267 fix: flush logs before starting (#1912)
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-26 17:25:55 +05:30
Prashant Shahi
3dcb44a758 fix docker-compose for swarm and related changes for distributed clickhouse (#1863)
* chore: 🔧 fix docker-compose.yaml for swarm

Signed-off-by: Prashant Shahi <prashant@signoz.io>

* chore: 🔧 add .gitkeep files for docker and swarm

Signed-off-by: Prashant Shahi <prashant@signoz.io>

Signed-off-by: Prashant Shahi <prashant@signoz.io>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-26 17:16:47 +05:30
Palash Gupta
0595cdc7af fix: scroll is added (#1873)
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-26 17:14:54 +05:30
Palash Gupta
092c02762f feat: add no found with no events are present (#1874)
* chore: not found component is updated
* feat: no events handling is updated

Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-26 17:14:17 +05:30
Palash Gupta
d1d2829d2b fix: logs issues (#1889)
* changed debounce interval to 600ms

Co-authored-by: Pranay Prateek <pranay@signoz.io>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-26 16:45:28 +05:30
Palash Gupta
ac446294e7 fix: logs selection of filter is fixed (#1910)
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-26 16:20:34 +05:30
Marius Kimmina
1cceab4d5e fix(FE): remove unnecessary complexity from password check (#1904)
Signed-off-by: Marius Kimmina <mar.kimmina@gmail.com>
2022-12-26 16:02:18 +05:30
Ankit Nayan
02898d14f9 fix: removes password validations other than length (#1909) 2022-12-26 15:42:08 +05:30
Nityananda Gohain
09af6c262c fix: proxy_read_timeout updated in nginx conf (#1885)
* fix: proxy_read_timeout updated in nginx conf
* fix: live tail endpoint-flush the headers first

Co-authored-by: Prashant Shahi <prashant@signoz.io>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-26 15:29:49 +05:30
Amol Umbark
faeaeb61a0 fix: added validations on query builder (#1906)
Co-authored-by: mindhash <mindhash@mindhashs-MacBook-Pro.local>
Co-authored-by: Pranay Prateek <pranay@signoz.io>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-26 15:10:01 +05:30
Nityananda Gohain
9c80ba6b78 fix: allow multiple spaces between a filter expression (#1897)
* fix: allow multiple spaces between a filter expression

* fix: regex updated to respect spaces between a search string


Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2022-12-26 15:08:43 +05:30
Palash Gupta
dbba8b5b55 feat: event time is updated when root span is missing 2022-12-22 17:35:20 +05:30
Pranay Prateek
58ce838023 chore: Updating stale edition message (#1896) 2022-12-22 11:44:28 +05:30
Srikanth Chekuri
5260b152f5 fix: do not show result of sub queries in external calls (#1858) 2022-12-20 19:54:27 +05:30
Ankit Nayan
f2dd254d83 Merge pull request #1849 from SigNoz/release/v0.12.0
Release/v0.12.0
2022-12-11 00:14:59 +05:30
Prashant Shahi
82d53fa45c chore: 📌 pin versions: SigNoz 0.12.0, SigNoz OtelCollector 0.66.0
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2022-12-10 20:17:49 +05:30
Zsombor
c38d1c150d Fix case sensitivity in query parsing (#1670)
* Fix case sensitivity in query parsing - now the parser correctly recognize fields which contains uppercase letters

* fix: logs parser respects the case of fields

Co-authored-by: nityanandagohain <nityanandagohain@gmail.com>
Co-authored-by: Pranay Prateek <pranay@signoz.io>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-10 19:27:57 +05:30
Srikanth Chekuri
16170eacc0 Revert "chore: use local table for innery query (#1815) (#1847)
* Revert "chore: use local table for innery query (#1815)"

This reverts commit 1b52edb056.

* chore: use localhost
2022-12-10 19:25:44 +05:30
Amol Umbark
66ddbfc085 fix: solves issue legend update causing null ch query (#1845)
Co-authored-by: mindhash <mindhash@mindhashs-MacBook-Pro.local>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-10 12:21:20 +05:30
Vishal Sharma
2715ab61a4 chore: introduce docker_multi_node_cluster and by default set to false (#1839)
* chore: introduce docker_multi_node_cluster and by default set to false

* chore(query-service): 🔧 include docker_multi_node_cluster for tests

Co-authored-by: Prashant Shahi <prashant@signoz.io>
Co-authored-by: Prashant Shahi <me@prashantshahi.dev>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-09 21:57:25 +05:30
Amol Umbark
4d291e92b9 fix: changed table names in default alert queries (#1843)
Co-authored-by: mindhash <mindhash@mindhashs-MacBook-Pro.local>
2022-12-09 21:54:51 +05:30
Nityananda Gohain
1b73649f8e fix: add default value for materialized column in distributed logs table (#1835)
Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-09 20:18:58 +05:30
Amol Umbark
0abae1c09c feat: show release note in alerts dashboards and services pages (#1840)
* feat: show release note in alerts dashboards and services pages

* fix: made code changes as per review and changed message in release note

* fix: solved build pipeline issue

* fix: solved lint issue

Co-authored-by: mindhash <mindhash@mindhashs-MacBook-Pro.local>
Co-authored-by: Pranay Prateek <pranay@signoz.io>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2022-12-09 20:16:09 +05:30
Pranay Prateek
4d02603aed Update README.md 2022-12-09 14:01:05 +05:30
Pranay Prateek
c58e43a678 Update README.md 2022-12-09 12:54:34 +05:30
Pranay Prateek
b77bbe1e4f Update README.md 2022-12-09 12:50:41 +05:30
Pranay Prateek
d4eb241c04 Update README.md 2022-12-09 12:48:57 +05:30
Pranay Prateek
98e1a77a43 Update README.md 2022-12-09 12:48:30 +05:30
Pranay Prateek
498b04491b updated logs image 2022-12-09 12:42:25 +05:30
Pranay Prateek
4e58414cc2 Update README.md 2022-12-09 12:36:05 +05:30
Pranay Prateek
67943cfec0 Update README.md 2022-12-09 12:32:03 +05:30
Palash Gupta
f170eb1b23 fix: scroll is added in case of extra space (#1838) 2022-12-09 10:00:55 +05:30
103 changed files with 2048 additions and 596 deletions

View File

@@ -11,6 +11,6 @@ jobs:
- name: Remove label
uses: buildsville/add-remove-label@v1
with:
label: ok-to-test
label: ok-to-test,testing-deploy
type: remove
token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -0,0 +1,38 @@
name: staging-deployment
# Trigger deployment only on push to develop branch
on:
push:
branches:
- develop
jobs:
deploy:
name: Deploy latest develop branch to staging
runs-on: ubuntu-latest
environment: staging
steps:
- name: Executing remote ssh commands using ssh key
uses: appleboy/ssh-action@v0.1.6
env:
GITHUB_BRANCH: develop
GITHUB_SHA: ${{ github.sha }}
with:
host: ${{ secrets.HOST_DNS }}
username: ${{ secrets.USERNAME }}
key: ${{ secrets.EC2_SSH_KEY }}
envs: GITHUB_BRANCH,GITHUB_SHA
command_timeout: 60m
script: |
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
docker system prune --force
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout ${GITHUB_BRANCH}
git pull
make build-ee-query-service-amd64
make build-frontend-amd64
make run-signoz

View File

@@ -0,0 +1,39 @@
name: testing-deployment
# Trigger deployment only on testing-deploy label on pull request
on:
pull_request:
types: [labeled]
jobs:
deploy:
name: Deploy PR branch to testing
runs-on: ubuntu-latest
environment: testing
if: ${{ github.event.label.name == 'testing-deploy' }}
steps:
- name: Executing remote ssh commands using ssh key
uses: appleboy/ssh-action@v0.1.6
env:
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
GITHUB_SHA: ${{ github.sha }}
with:
host: ${{ secrets.HOST_DNS }}
username: ${{ secrets.USERNAME }}
key: ${{ secrets.EC2_SSH_KEY }}
envs: GITHUB_BRANCH,GITHUB_SHA
command_timeout: 60m
script: |
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
export DEV_BUILD="1"
docker system prune --force
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout ${GITHUB_BRANCH}
git pull
make build-ee-query-service-amd64
make build-frontend-amd64
make run-signoz

View File

@@ -215,9 +215,26 @@ Please ping us in the [`#contributing`](https://signoz-community.slack.com/archi
# 4. Contribute to Backend (Query-Service) 🌑
[**https://github.com/SigNoz/signoz/tree/develop/pkg/query-service**](https://github.com/SigNoz/signoz/tree/develop/pkg/query-service)
**Need to Update: [https://github.com/SigNoz/signoz/tree/develop/pkg/query-service](https://github.com/SigNoz/signoz/tree/develop/pkg/query-service)**
## 4.1 To run ClickHouse setup (recommended for local development)
## 4.1 Prerequisites
### 4.1.1 Install SQLite3
- Run `sqlite3` command to check if you already have SQLite3 installed on your machine.
- If not installed already, Install using below command
- on Linux
- on Debian / Ubuntu
```
sudo apt install sqlite3
```
- on CentOS / Fedora / RedHat
```
sudo yum install sqlite3
```
## 4.2 To run ClickHouse setup (recommended for local development)
- Clone the SigNoz repository and cd into signoz directory,
```

View File

@@ -45,7 +45,7 @@ build-frontend-amd64:
@echo "--> Building frontend docker image for amd64"
@echo "------------------"
@cd $(FRONTEND_DIRECTORY) && \
docker build --file Dockerfile --no-cache -t $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) \
docker build --file Dockerfile -t $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) \
--build-arg TARGETPLATFORM="linux/amd64" .
# Step to build and push docker image of frontend(used in push pipeline)
@@ -54,7 +54,7 @@ build-push-frontend:
@echo "--> Building and pushing frontend docker image"
@echo "------------------"
@cd $(FRONTEND_DIRECTORY) && \
docker buildx build --file Dockerfile --progress plane --no-cache --push --platform linux/amd64 \
docker buildx build --file Dockerfile --progress plane --push --platform linux/amd64 \
--tag $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) .
# Steps to build and push docker image of query service
@@ -65,7 +65,7 @@ build-query-service-amd64:
@echo "--> Building query-service docker image for amd64"
@echo "------------------"
@docker build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile \
--no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
-t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
--build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="$(LD_FLAGS)" .
# Step to build and push docker image of query in amd64 and arm64 (used in push pipeline)
@@ -73,7 +73,7 @@ build-push-query-service:
@echo "------------------"
@echo "--> Building and pushing query-service docker image"
@echo "------------------"
@docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plane --no-cache \
@docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plane \
--push --platform linux/arm64,linux/amd64 --build-arg LD_FLAGS="$(LD_FLAGS)" \
--tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
@@ -84,11 +84,11 @@ build-ee-query-service-amd64:
@echo "------------------"
@if [ $(DEV_BUILD) != "" ]; then \
docker build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
--no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
-t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
--build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="${LD_FLAGS} ${DEV_LD_FLAGS}" .; \
else \
docker build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
--no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
-t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
--build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="$(LD_FLAGS)" .; \
fi
@@ -98,7 +98,7 @@ build-push-ee-query-service:
@echo "--> Building and pushing query-service docker image"
@echo "------------------"
@docker buildx build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
--progress plane --no-cache --push --platform linux/arm64,linux/amd64 \
--progress plane --push --platform linux/arm64,linux/amd64 \
--build-arg LD_FLAGS="$(LD_FLAGS)" --tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
dev-setup:
@@ -119,16 +119,19 @@ down-local:
$(STANDALONE_DIRECTORY)/docker-compose-core.yaml -f $(STANDALONE_DIRECTORY)/docker-compose-local.yaml \
down -v
run-x86:
pull-signoz:
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml pull
run-signoz:
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml up --build -d
down-x86:
down-signoz:
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml down -v
clear-standalone-data:
@docker run --rm -v "$(PWD)/$(STANDALONE_DIRECTORY)/data:/pwd" busybox \
sh -c "cd /pwd && rm -rf alertmanager/* clickhous*/* signoz/* zookeeper-*/*"
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse*/* signoz/* zookeeper-*/*"
clear-swarm-data:
@docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \
sh -c "cd /pwd && rm -rf alertmanager/* clickhous*/* signoz/* zookeeper-*/*"
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse*/* signoz/* zookeeper-*/*"

View File

@@ -25,17 +25,25 @@
SigNoz helps developers monitor applications and troubleshoot problems in their deployed applications. SigNoz uses distributed tracing to gain visibility into your software stack.
👉 Visualise Metrics, Traces and Logs in a single pane of glass
👉 You can see metrics like p99 latency, error rates for your services, external API calls and individual end points.
👉 You can find the root cause of the problem by going to the exact traces which are causing the problem and see detailed flamegraphs of individual request traces.
👉 Run aggregates on trace data to get business relevant metrics
![screenzy-1644432902955](https://user-images.githubusercontent.com/504541/153270713-1b2156e6-ec03-42de-975b-3c02b8ec1836.png)
👉 Filter and query logs, build dashboards and alerts based on attributes in logs
![screenzy-1670570187181](https://user-images.githubusercontent.com/504541/206646629-829fdafe-70e2-4503-a9c4-1301b7918586.png)
<br />
![screenzy-1644432986784](https://user-images.githubusercontent.com/504541/153270725-0efb73b3-06ed-4207-bf13-9b7e2e17c4b8.png)
![screenzy-1670570193901](https://user-images.githubusercontent.com/504541/206646676-a676fdeb-331c-4847-aea9-d1cabf7c47e1.png)
<br />
![screenzy-1647005040573](https://user-images.githubusercontent.com/504541/157875938-a3d57904-ea6d-4278-b929-bd1408d7f94c.png)
![screenzy-1670570199026](https://user-images.githubusercontent.com/504541/206646754-28c5534f-0377-428c-9c6e-5c7c0d9dd22d.png)
<br />
![screenzy-1670569888865](https://user-images.githubusercontent.com/504541/206645819-1e865a56-71b4-4fde-80cc-fbdb137a4da5.png)
<br /><br />
@@ -51,12 +59,12 @@ Come say Hi to us on [Slack](https://signoz.io/slack) 👋
## Features:
- Unified UI for metrics, traces and logs. No need to switch from Prometheus to Jaeger to debug issues, or use a logs tool like Elastic separate from your metrics and traces stack.
- Application overview metrics like RPS, 50th/90th/99th Percentile latencies, and Error Rate
- Slowest endpoints in your application
- See exact request trace to figure out issues in downstream services, slow DB queries, call to 3rd party services like payment gateways, etc
- Filter traces by service name, operation, latency, error, tags/annotations.
- Run aggregates on trace data (events/spans) to get business relevant metrics. e.g. You can get error rate and 99th percentile latency of `customer_type: gold` or `deployment_version: v2` or `external_call: paypal`
- Unified UI for metrics and traces. No need to switch from Prometheus to Jaeger to debug issues.
<br /><br />
@@ -129,6 +137,21 @@ Moreover, SigNoz has few more advanced features wrt Jaeger:
- Jaegar UI doesnt show any metrics on traces or on filtered traces
- Jaeger cant get aggregates on filtered traces. For example, p99 latency of requests which have tag - customer_type='premium'. This can be done easily on SigNoz
<p>&nbsp </p>
### SigNoz vs Elastic
- SigNoz Logs management are based on ClickHouse, a columnar OLAP datastore which makes aggregate log analytics queries much more efficient
- 50% lower resource requirement compared to Elastic during ingestion
<p>&nbsp </p>
### SigNoz vs Loki
- SigNoz supports aggregations on high-cardinality data over a huge volume while loki doesnt.
- SigNoz supports indexes over high cardinality data and has no limitations on the number of indexes, while Loki reaches max streams with a few indexes added to it.
- Searching over a huge volume of data is difficult and slow in Loki compared to SigNoz
<br /><br />
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/Contributors.svg" width="50px" />

View File

@@ -35,7 +35,6 @@ x-clickhouse-depend: &clickhouse-depend
services:
zookeeper-1:
image: bitnami/zookeeper:3.7.0
container_name: zookeeper-1
hostname: zookeeper-1
user: root
ports:
@@ -52,7 +51,6 @@ services:
# zookeeper-2:
# image: bitnami/zookeeper:3.7.0
# container_name: zookeeper-2
# hostname: zookeeper-2
# user: root
# ports:
@@ -69,7 +67,6 @@ services:
# zookeeper-3:
# image: bitnami/zookeeper:3.7.0
# container_name: zookeeper-3
# hostname: zookeeper-3
# user: root
# ports:
@@ -86,7 +83,6 @@ services:
clickhouse:
<<: *clickhouse-defaults
container_name: clickhouse
hostname: clickhouse
# ports:
# - "9000:9000"
@@ -101,7 +97,6 @@ services:
# clickhouse-2:
# <<: *clickhouse-defaults
# container_name: clickhouse-2
# hostname: clickhouse-2
# ports:
# - "9001:9000"
@@ -116,7 +111,6 @@ services:
# clickhouse-3:
# <<: *clickhouse-defaults
# container_name: clickhouse-3
# hostname: clickhouse-3
# ports:
# - "9002:9000"
@@ -143,7 +137,7 @@ services:
condition: on-failure
query-service:
image: signoz/query-service:0.11.4
image: signoz/query-service:0.13.1
command: ["-config=/root/config/prometheus.yml"]
# ports:
# - "6060:6060" # pprof port
@@ -172,7 +166,7 @@ services:
<<: *clickhouse-depend
frontend:
image: signoz/frontend:0.11.4
image: signoz/frontend:0.13.1
deploy:
restart_policy:
condition: on-failure
@@ -185,7 +179,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.63.0
image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-config.yaml"]
user: root # required for reading docker container logs
volumes:
@@ -193,6 +187,7 @@ services:
- /var/lib/docker/containers:/var/lib/docker/containers:ro
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}},dockerswarm.service.name={{.Service.Name}},dockerswarm.task.name={{.Task.Name}}
- DOCKER_MULTI_NODE_CLUSTER=false
ports:
# - "1777:1777" # pprof extension
- "4317:4317" # OTLP gRPC receiver
@@ -212,7 +207,7 @@ services:
<<: *clickhouse-depend
otel-collector-metrics:
image: signoz/signoz-otel-collector:0.63.0
image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml

View File

@@ -64,7 +64,9 @@ receivers:
- job_name: otel-collector
static_configs:
- targets:
- localhost:8888
- localhost:8888
labels:
job_name: otel-collector
processors:
batch:
@@ -78,7 +80,7 @@ processors:
signozspanmetrics/prometheus:
metrics_exporter: prometheus
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 10000
dimensions_cache_size: 100000
dimensions:
- name: service.namespace
default: default
@@ -103,15 +105,19 @@ processors:
exporters:
clickhousetraces:
datasource: tcp://clickhouse:9000/?database=signoz_traces
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
prometheus:
endpoint: 0.0.0.0:8889
# logging: {}
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
timeout: 5s
sending_queue:
queue_size: 100
@@ -144,9 +150,13 @@ service:
processors: [batch]
exporters: [clickhousemetricswrite]
metrics/generic:
receivers: [hostmetrics, prometheus]
receivers: [hostmetrics]
processors: [resourcedetection, batch]
exporters: [clickhousemetricswrite]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [clickhousemetricswrite/prometheus]
metrics/spanmetrics:
receivers: [otlp/spanmetrics]
exporters: [prometheus]

View File

@@ -7,7 +7,9 @@ receivers:
scrape_interval: 60s
static_configs:
- targets:
- localhost:8888
- localhost:8888
labels:
job_name: otel-collector-metrics
# SigNoz span metrics
- job_name: signozspanmetrics-collector
scrape_interval: 60s

View File

@@ -30,6 +30,8 @@ server {
location /api {
proxy_pass http://query-service:8080/api;
# connection will be closed if no data is read for 600s between successive read operations
proxy_read_timeout 600s;
}
# redirect server error pages to the static page /50x.html

View File

@@ -41,7 +41,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: otel-collector
image: signoz/signoz-otel-collector:0.63.0
image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-config.yaml"]
# user: root # required for reading docker container logs
volumes:
@@ -67,7 +67,7 @@ services:
otel-collector-metrics:
container_name: otel-collector-metrics
image: signoz/signoz-otel-collector:0.63.0
image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml

View File

@@ -132,7 +132,7 @@ services:
# - ./data/clickhouse-3/:/var/lib/clickhouse/
alertmanager:
image: signoz/alertmanager:0.23.0-0.2
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.0-0.2}
volumes:
- ./data/alertmanager:/data
depends_on:
@@ -146,7 +146,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:0.11.4
image: signoz/query-service:${DOCKER_TAG:-0.13.1}
container_name: query-service
command: ["-config=/root/config/prometheus.yml"]
# ports:
@@ -174,7 +174,7 @@ services:
<<: *clickhouse-depend
frontend:
image: signoz/frontend:0.11.4
image: signoz/frontend:${DOCKER_TAG:-0.13.1}
container_name: frontend
restart: on-failure
depends_on:
@@ -186,7 +186,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.63.0
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.66.1}
command: ["--config=/etc/otel-collector-config.yaml"]
user: root # required for reading docker container logs
volumes:
@@ -194,6 +194,7 @@ services:
- /var/lib/docker/containers:/var/lib/docker/containers:ro
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
- DOCKER_MULTI_NODE_CLUSTER=false
ports:
# - "1777:1777" # pprof extension
- "4317:4317" # OTLP gRPC receiver
@@ -210,7 +211,7 @@ services:
<<: *clickhouse-depend
otel-collector-metrics:
image: signoz/signoz-otel-collector:0.63.0
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.66.1}
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml

View File

@@ -64,7 +64,10 @@ receivers:
- job_name: otel-collector
static_configs:
- targets:
- localhost:8888
- localhost:8888
labels:
job_name: otel-collector
processors:
batch:
@@ -74,7 +77,7 @@ processors:
signozspanmetrics/prometheus:
metrics_exporter: prometheus
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 10000
dimensions_cache_size: 100000
dimensions:
- name: service.namespace
default: default
@@ -111,16 +114,20 @@ extensions:
exporters:
clickhousetraces:
datasource: tcp://clickhouse:9000/?database=signoz_traces
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
prometheus:
endpoint: 0.0.0.0:8889
# logging: {}
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
timeout: 5s
sending_queue:
queue_size: 100
@@ -148,9 +155,13 @@ service:
processors: [batch]
exporters: [clickhousemetricswrite]
metrics/generic:
receivers: [hostmetrics, prometheus]
receivers: [hostmetrics]
processors: [resourcedetection, batch]
exporters: [clickhousemetricswrite]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [clickhousemetricswrite/prometheus]
metrics/spanmetrics:
receivers: [otlp/spanmetrics]
exporters: [prometheus]

View File

@@ -11,7 +11,9 @@ receivers:
scrape_interval: 60s
static_configs:
- targets:
- localhost:8888
- localhost:8888
labels:
job_name: otel-collector-metrics
# SigNoz span metrics
- job_name: signozspanmetrics-collector
scrape_interval: 60s

View File

@@ -30,6 +30,8 @@ server {
location /api {
proxy_pass http://query-service:8080/api;
# connection will be closed if no data is read for 600s between successive read operations
proxy_read_timeout 600s;
}
# redirect server error pages to the static page /50x.html

View File

@@ -1,8 +1,11 @@
package app
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net"
"net/http"
_ "net/http/pprof" // http profiler
@@ -266,15 +269,82 @@ func (lrw *loggingResponseWriter) Flush() {
lrw.ResponseWriter.(http.Flusher).Flush()
}
func extractDashboardMetaData(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFrom := "/api/v2/metrics/query_range"
var requestBody map[string]interface{}
data := map[string]interface{}{}
if path == pathToExtractBodyFrom && (r.Method == "POST") {
bodyBytes, _ := ioutil.ReadAll(r.Body)
r.Body.Close() // must close
r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
json.Unmarshal(bodyBytes, &requestBody)
} else {
return nil, false
}
compositeMetricQuery, compositeMetricQueryExists := requestBody["compositeMetricQuery"]
compositeMetricQueryMap := compositeMetricQuery.(map[string]interface{})
signozMetricFound := false
if compositeMetricQueryExists {
signozMetricFound = telemetry.GetInstance().CheckSigNozMetrics(compositeMetricQueryMap)
queryType, queryTypeExists := compositeMetricQueryMap["queryType"]
if queryTypeExists {
data["queryType"] = queryType
}
panelType, panelTypeExists := compositeMetricQueryMap["panelType"]
if panelTypeExists {
data["panelType"] = panelType
}
}
datasource, datasourceExists := requestBody["dataSource"]
if datasourceExists {
data["datasource"] = datasource
}
if !signozMetricFound {
telemetry.GetInstance().AddActiveMetricsUser()
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_DASHBOARDS_METADATA, data, true)
}
return data, true
}
func getActiveLogs(path string, r *http.Request) {
// if path == "/api/v1/dashboards/{uuid}" {
// telemetry.GetInstance().AddActiveMetricsUser()
// }
if path == "/api/v1/logs" {
hasFilters := len(r.URL.Query().Get("q"))
if hasFilters > 0 {
telemetry.GetInstance().AddActiveLogsUser()
}
}
}
func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
route := mux.CurrentRoute(r)
path, _ := route.GetPathTemplate()
dashboardMetadata, metadataExists := extractDashboardMetaData(path, r)
getActiveLogs(path, r)
lrw := NewLoggingResponseWriter(w)
next.ServeHTTP(lrw, r)
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
if metadataExists {
for key, value := range dashboardMetadata {
data[key] = value
}
}
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data)

View File

@@ -6,7 +6,7 @@
"release_notes": "Release Notes",
"read_how_to_upgrade": "Read instructions on how to upgrade",
"latest_version_signoz": "You are running the latest version of SigNoz.",
"stale_version": "You are on an older version and may be losing out on the latest features we have shipped. We recommend to upgrade to the latest version",
"stale_version": "You are on an older version and may be missing out on the latest features we have shipped. We recommend to upgrade to the latest version",
"oops_something_went_wrong_version": "Oops.. facing issues with fetching updated version information",
"n_a": "N/A",
"routes": {

View File

@@ -57,6 +57,7 @@ const afterLogin = async (
profilePictureURL: payload.profilePictureURL,
userId: payload.id,
orgId: payload.orgId,
userFlags: payload.flags,
},
});

View File

@@ -4,14 +4,16 @@ import { ENVIRONMENT } from 'constants/env';
import { LOCALSTORAGE } from 'constants/localStorage';
import { EventSourcePolyfill } from 'event-source-polyfill';
export const LiveTail = (queryParams: string): EventSourcePolyfill => {
const dict = {
headers: {
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
},
};
return new EventSourcePolyfill(
// 10 min in ms
const TIMEOUT_IN_MS = 10 * 60 * 1000;
export const LiveTail = (queryParams: string): EventSourcePolyfill =>
new EventSourcePolyfill(
`${ENVIRONMENT.baseURL}${apiV1}logs/tail?${queryParams}`,
dict,
{
headers: {
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
},
heartbeatTimeout: TIMEOUT_IN_MS,
},
);
};

View File

@@ -0,0 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/user/setFlags';
const setFlags = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.patch(`/user/${props.userId}/flags`, {
...props.flags,
});
return {
statusCode: 200,
error: null,
message: response.data?.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default setFlags;

View File

@@ -41,6 +41,7 @@ export const Logout = (): void => {
orgName: '',
profilePictureURL: '',
userId: '',
userFlags: {},
},
});

View File

@@ -1,46 +1,21 @@
import { Button, Popover } from 'antd';
import getStep from 'lib/getStep';
import { generateFilterQuery } from 'lib/logs/generateFilterQuery';
import React, { memo, useCallback, useMemo } from 'react';
import { connect, useDispatch, useSelector } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { getLogs } from 'store/actions/logs/getLogs';
import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate';
import { useDispatch, useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import { SET_SEARCH_QUERY_STRING, TOGGLE_LIVE_TAIL } from 'types/actions/logs';
import { GlobalReducer } from 'types/reducer/globalTime';
import { SET_SEARCH_QUERY_STRING } from 'types/actions/logs';
import { ILogsReducer } from 'types/reducer/logs';
interface AddToQueryHOCProps {
fieldKey: string;
fieldValue: string;
children: React.ReactNode;
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
getLogsAggregate: (
props: Parameters<typeof getLogsAggregate>[0],
) => ReturnType<typeof getLogsAggregate>;
}
function AddToQueryHOC({
fieldKey,
fieldValue,
children,
getLogs,
getLogsAggregate,
}: AddToQueryHOCProps): JSX.Element {
const {
searchFilter: { queryString },
logLinesPerPage,
idStart,
idEnd,
liveTail,
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
const dispatch = useDispatch();
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const generatedQuery = useMemo(
() => generateFilterQuery({ fieldKey, fieldValue, type: 'IN' }),
[fieldKey, fieldValue],
@@ -58,69 +33,14 @@ function AddToQueryHOC({
type: SET_SEARCH_QUERY_STRING,
payload: updatedQueryString,
});
if (liveTail === 'STOPPED') {
getLogs({
q: updatedQueryString,
limit: logLinesPerPage,
orderBy: 'timestamp',
order: 'desc',
timestampStart: minTime,
timestampEnd: maxTime,
...(idStart ? { idGt: idStart } : {}),
...(idEnd ? { idLt: idEnd } : {}),
});
getLogsAggregate({
timestampStart: minTime,
timestampEnd: maxTime,
step: getStep({
start: minTime,
end: maxTime,
inputFormat: 'ns',
}),
q: updatedQueryString,
...(idStart ? { idGt: idStart } : {}),
...(idEnd ? { idLt: idEnd } : {}),
});
} else if (liveTail === 'PLAYING') {
dispatch({
type: TOGGLE_LIVE_TAIL,
payload: 'PAUSED',
});
setTimeout(
() =>
dispatch({
type: TOGGLE_LIVE_TAIL,
payload: liveTail,
}),
0,
);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
dispatch,
generatedQuery,
getLogs,
idEnd,
idStart,
logLinesPerPage,
maxTime,
minTime,
queryString,
}, [dispatch, generatedQuery, queryString]);
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
fieldKey,
]);
const popOverContent = (
<span style={{ fontSize: '0.9rem' }}>Add to query: {fieldKey}</span>
);
return (
<Button
size="small"
type="text"
style={{
margin: 0,
padding: 0,
}}
onClick={handleQueryAdd}
>
<Button size="small" type="text" onClick={handleQueryAdd}>
<Popover placement="top" content={popOverContent}>
{children}
</Popover>
@@ -128,20 +48,10 @@ function AddToQueryHOC({
);
}
interface DispatchProps {
getLogs: (
props: Parameters<typeof getLogs>[0],
) => (dispatch: Dispatch<AppActions>) => void;
getLogsAggregate: (
props: Parameters<typeof getLogsAggregate>[0],
) => (dispatch: Dispatch<AppActions>) => void;
interface AddToQueryHOCProps {
fieldKey: string;
fieldValue: string;
children: React.ReactNode;
}
const mapDispatchToProps = (
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
): DispatchProps => ({
getLogs: bindActionCreators(getLogs, dispatch),
getLogsAggregate: bindActionCreators(getLogsAggregate, dispatch),
});
export default connect(null, mapDispatchToProps)(memo(AddToQueryHOC));
export default memo(AddToQueryHOC);

View File

@@ -0,0 +1,27 @@
import React from 'react';
import { StyledAlert } from './styles';
interface MessageTipProps {
show?: boolean;
message: React.ReactNode | string;
action: React.ReactNode | undefined;
}
function MessageTip({
show,
message,
action,
}: MessageTipProps): JSX.Element | null {
if (!show) return null;
return (
<StyledAlert showIcon description={message} type="info" action={action} />
);
}
MessageTip.defaultProps = {
show: false,
};
export default MessageTip;

View File

@@ -0,0 +1,6 @@
import { Alert } from 'antd';
import styled from 'styled-components';
export const StyledAlert = styled(Alert)`
align-items: center;
`;

View File

@@ -0,0 +1 @@
export const defaultText = 'Ah, seems like we reached a dead end!';

View File

@@ -2,45 +2,52 @@ import getLocalStorageKey from 'api/browser/localstorage/get';
import NotFoundImage from 'assets/NotFound';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import React from 'react';
import React, { useCallback } from 'react';
import { useDispatch } from 'react-redux';
import { Dispatch } from 'redux';
import AppActions from 'types/actions';
import { LOGGED_IN } from 'types/actions/app';
import { defaultText } from './constant';
import { Button, Container, Text, TextContainer } from './styles';
function NotFound(): JSX.Element {
function NotFound({ text = defaultText }: Props): JSX.Element {
const dispatch = useDispatch<Dispatch<AppActions>>();
const isLoggedIn = getLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN);
const onClickHandler = useCallback(() => {
if (isLoggedIn) {
dispatch({
type: LOGGED_IN,
payload: {
isLoggedIn: true,
},
});
}
}, [dispatch, isLoggedIn]);
return (
<Container>
<NotFoundImage />
<TextContainer>
<Text>Ah, seems like we reached a dead end!</Text>
<Text>{text}</Text>
<Text>Page Not Found</Text>
</TextContainer>
<Button
onClick={(): void => {
if (isLoggedIn) {
dispatch({
type: LOGGED_IN,
payload: {
isLoggedIn: true,
},
});
}
}}
to={ROUTES.APPLICATION}
tabIndex={0}
>
<Button onClick={onClickHandler} to={ROUTES.APPLICATION} tabIndex={0}>
Return To Services Page
</Button>
</Container>
);
}
interface Props {
text?: string;
}
NotFound.defaultProps = {
text: defaultText,
};
export default NotFound;

View File

@@ -0,0 +1,4 @@
export default interface ReleaseNoteProps {
path?: string;
release?: string;
}

View File

@@ -0,0 +1,73 @@
import { Button, Space } from 'antd';
import setFlags from 'api/user/setFlags';
import MessageTip from 'components/MessageTip';
import React, { useCallback } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { Dispatch } from 'redux';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import { UPDATE_USER_FLAG } from 'types/actions/app';
import { UserFlags } from 'types/api/user/setFlags';
import AppReducer from 'types/reducer/app';
import ReleaseNoteProps from '../ReleaseNoteProps';
export default function ReleaseNote0120({
release,
}: ReleaseNoteProps): JSX.Element | null {
const { user } = useSelector<AppState, AppReducer>((state) => state.app);
const dispatch = useDispatch<Dispatch<AppActions>>();
const handleDontShow = useCallback(async (): Promise<void> => {
const flags: UserFlags = { ReleaseNote0120Hide: 'Y' };
try {
dispatch({
type: UPDATE_USER_FLAG,
payload: {
flags,
},
});
if (!user) {
// no user is set, so escape the routine
return;
}
const response = await setFlags({ userId: user?.userId, flags });
if (response.statusCode !== 200) {
console.log('failed to complete do not show status', response.error);
}
} catch (e) {
// here we do not nothing as the cost of error is minor,
// the user can switch the do no show option again in the further.
console.log('unexpected error: failed to complete do not show status', e);
}
}, [dispatch, user]);
return (
<MessageTip
show
message={
<div>
You are using {release} of SigNoz. We have introduced distributed setup in
v0.12.0 release. If you use or plan to use clickhouse queries in dashboard
or alerts, you might want to read about querying the new distributed tables{' '}
<a
href="https://signoz.io/docs/operate/migration/upgrade-0.12/#querying-distributed-tables"
target="_blank"
rel="noreferrer"
>
here
</a>
</div>
}
action={
<Space>
<Button onClick={handleDontShow}>Do not show again</Button>
</Space>
}
/>
);
}

View File

@@ -0,0 +1,66 @@
import ReleaseNoteProps from 'components/ReleaseNote/ReleaseNoteProps';
import ReleaseNote0120 from 'components/ReleaseNote/Releases/ReleaseNote0120';
import ROUTES from 'constants/routes';
import React from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { UserFlags } from 'types/api/user/setFlags';
import AppReducer from 'types/reducer/app';
interface ComponentMapType {
match: (
path: string | undefined,
version: string,
userFlags: UserFlags | null,
) => boolean;
component: ({ path, release }: ReleaseNoteProps) => JSX.Element | null;
}
const allComponentMap: ComponentMapType[] = [
{
match: (
path: string | undefined,
version: string,
userFlags: UserFlags | null,
): boolean => {
if (!path) {
return false;
}
const allowedPaths = [
ROUTES.LIST_ALL_ALERT,
ROUTES.APPLICATION,
ROUTES.ALL_DASHBOARD,
];
return (
userFlags?.ReleaseNote0120Hide !== 'Y' &&
allowedPaths.includes(path) &&
version.startsWith('v0.12')
);
},
component: ReleaseNote0120,
},
];
// ReleaseNote prints release specific warnings and notes that
// user needs to be aware of before using the upgraded version.
function ReleaseNote({ path }: ReleaseNoteProps): JSX.Element | null {
const { userFlags, currentVersion } = useSelector<AppState, AppReducer>(
(state) => state.app,
);
const c = allComponentMap.find((item) => {
return item.match(path, currentVersion, userFlags);
});
if (!c) {
return null;
}
return <c.component path={path} release={currentVersion} />;
}
ReleaseNote.defaultProps = {
path: '',
};
export default ReleaseNote;

View File

@@ -0,0 +1,9 @@
const DEFAULT_FILTER_VALUE = '';
const EXCEPTION_TYPE_FILTER_NAME = 'exceptionType';
const SERVICE_NAME_FILTER_NAME = 'serviceName';
export {
DEFAULT_FILTER_VALUE,
EXCEPTION_TYPE_FILTER_NAME,
SERVICE_NAME_FILTER_NAME,
};

View File

@@ -17,6 +17,7 @@ import getAll from 'api/errors/getAll';
import getErrorCounts from 'api/errors/getErrorCounts';
import ROUTES from 'constants/routes';
import dayjs from 'dayjs';
import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams';
import history from 'lib/history';
import React, { useCallback, useEffect, useMemo } from 'react';
@@ -30,7 +31,11 @@ import { Exception, PayloadProps } from 'types/api/errors/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import {
extractFilterValues,
getDefaultFilterValue,
getDefaultOrder,
getFilterString,
getFilterValues,
getNanoSeconds,
getOffSet,
getOrder,
@@ -43,15 +48,27 @@ function AllErrors(): JSX.Element {
const { maxTime, minTime, loading } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const { search, pathname } = useLocation();
const params = useMemo(() => new URLSearchParams(search), [search]);
const { pathname } = useLocation();
const params = useUrlQuery();
const { t } = useTranslation(['common']);
const updatedOrder = getOrder(params.get(urlKey.order));
const getUpdatedOffset = getOffSet(params.get(urlKey.offset));
const getUpdatedParams = getOrderParams(params.get(urlKey.orderParam));
const getUpdatedPageSize = getUpdatePageSize(params.get(urlKey.pageSize));
const {
updatedOrder,
getUpdatedOffset,
getUpdatedParams,
getUpdatedPageSize,
getUpdatedExceptionType,
getUpdatedServiceName,
} = useMemo(
() => ({
updatedOrder: getOrder(params.get(urlKey.order)),
getUpdatedOffset: getOffSet(params.get(urlKey.offset)),
getUpdatedParams: getOrderParams(params.get(urlKey.orderParam)),
getUpdatedPageSize: getUpdatePageSize(params.get(urlKey.pageSize)),
getUpdatedExceptionType: getFilterString(params.get(urlKey.exceptionType)),
getUpdatedServiceName: getFilterString(params.get(urlKey.serviceName)),
}),
[params],
);
const updatedPath = useMemo(
() =>
@@ -60,6 +77,8 @@ function AllErrors(): JSX.Element {
offset: getUpdatedOffset,
orderParam: getUpdatedParams,
pageSize: getUpdatedPageSize,
exceptionType: getUpdatedExceptionType,
serviceName: getUpdatedServiceName,
})}`,
[
pathname,
@@ -67,6 +86,8 @@ function AllErrors(): JSX.Element {
getUpdatedOffset,
getUpdatedParams,
getUpdatedPageSize,
getUpdatedExceptionType,
getUpdatedServiceName,
],
);
@@ -81,16 +102,27 @@ function AllErrors(): JSX.Element {
limit: getUpdatedPageSize,
offset: getUpdatedOffset,
orderParam: getUpdatedParams,
exceptionType: getUpdatedExceptionType,
serviceName: getUpdatedServiceName,
}),
enabled: !loading,
},
{
queryKey: ['getErrorCounts', maxTime, minTime],
queryKey: [
'getErrorCounts',
maxTime,
minTime,
getUpdatedExceptionType,
getUpdatedServiceName,
],
queryFn: (): Promise<ErrorResponse | SuccessResponse<number>> =>
getErrorCounts({
end: maxTime,
start: minTime,
exceptionType: getUpdatedExceptionType,
serviceName: getUpdatedServiceName,
}),
enabled: !loading,
},
]);
@@ -108,14 +140,43 @@ function AllErrors(): JSX.Element {
const filterIcon = useCallback(() => <SearchOutlined />, []);
const handleSearch = (
confirm: (param?: FilterConfirmProps) => void,
): VoidFunction => (): void => {
confirm();
};
const handleSearch = useCallback(
(
confirm: (param?: FilterConfirmProps) => void,
filterValue: string,
filterKey: string,
): VoidFunction => (): void => {
const { exceptionFilterValue, serviceFilterValue } = getFilterValues(
getUpdatedServiceName || '',
getUpdatedExceptionType || '',
filterKey,
filterValue || '',
);
history.replace(
`${pathname}?${createQueryParams({
order: updatedOrder,
offset: getUpdatedOffset,
orderParam: getUpdatedParams,
pageSize: getUpdatedPageSize,
exceptionType: exceptionFilterValue,
serviceName: serviceFilterValue,
})}`,
);
confirm();
},
[
getUpdatedExceptionType,
getUpdatedOffset,
getUpdatedPageSize,
getUpdatedParams,
getUpdatedServiceName,
pathname,
updatedOrder,
],
);
const filterDropdownWrapper = useCallback(
({ setSelectedKeys, selectedKeys, confirm, placeholder }) => {
({ setSelectedKeys, selectedKeys, confirm, placeholder, filterKey }) => {
return (
<Card size="small">
<Space align="start" direction="vertical">
@@ -126,11 +187,16 @@ function AllErrors(): JSX.Element {
setSelectedKeys(e.target.value ? [e.target.value] : [])
}
allowClear
onPressEnter={handleSearch(confirm)}
defaultValue={getDefaultFilterValue(
filterKey,
getUpdatedServiceName,
getUpdatedExceptionType,
)}
onPressEnter={handleSearch(confirm, selectedKeys[0], filterKey)}
/>
<Button
type="primary"
onClick={handleSearch(confirm)}
onClick={handleSearch(confirm, selectedKeys[0], filterKey)}
icon={<SearchOutlined />}
size="small"
>
@@ -140,7 +206,7 @@ function AllErrors(): JSX.Element {
</Card>
);
},
[],
[getUpdatedExceptionType, getUpdatedServiceName, handleSearch],
);
const onExceptionTypeFilter = useCallback(
@@ -167,6 +233,7 @@ function AllErrors(): JSX.Element {
(
onFilter: ColumnType<Exception>['onFilter'],
placeholder: string,
filterKey: string,
): ColumnType<Exception> => ({
onFilter,
filterIcon,
@@ -176,6 +243,7 @@ function AllErrors(): JSX.Element {
selectedKeys,
confirm,
placeholder,
filterKey,
}),
}),
[filterIcon, filterDropdownWrapper],
@@ -186,7 +254,7 @@ function AllErrors(): JSX.Element {
title: 'Exception Type',
dataIndex: 'exceptionType',
key: 'exceptionType',
...getFilter(onExceptionTypeFilter, 'Search By Exception'),
...getFilter(onExceptionTypeFilter, 'Search By Exception', 'exceptionType'),
render: (value, record): JSX.Element => (
<Tooltip overlay={(): JSX.Element => value}>
<Link
@@ -266,30 +334,39 @@ function AllErrors(): JSX.Element {
updatedOrder,
'serviceName',
),
...getFilter(onApplicationTypeFilter, 'Search By Application'),
...getFilter(
onApplicationTypeFilter,
'Search By Application',
'serviceName',
),
},
];
const onChangeHandler: TableProps<Exception>['onChange'] = (
paginations,
_,
sorter,
) => {
if (!Array.isArray(sorter)) {
const { pageSize = 0, current = 0 } = paginations;
const { columnKey = '', order } = sorter;
const updatedOrder = order === 'ascend' ? 'ascending' : 'descending';
history.replace(
`${pathname}?${createQueryParams({
order: updatedOrder,
offset: (current - 1) * pageSize,
orderParam: columnKey,
pageSize,
})}`,
);
}
};
const onChangeHandler: TableProps<Exception>['onChange'] = useCallback(
(paginations, filters, sorter) => {
if (!Array.isArray(sorter)) {
const { pageSize = 0, current = 0 } = paginations;
const { columnKey = '', order } = sorter;
const updatedOrder = order === 'ascend' ? 'ascending' : 'descending';
const params = new URLSearchParams(window.location.search);
const { exceptionType, serviceName } = extractFilterValues(filters, {
serviceName: getFilterString(params.get(urlKey.serviceName)),
exceptionType: getFilterString(params.get(urlKey.exceptionType)),
});
history.replace(
`${pathname}?${createQueryParams({
order: updatedOrder,
offset: (current - 1) * pageSize,
orderParam: columnKey,
pageSize,
exceptionType,
serviceName,
})}`,
);
}
},
[pathname],
);
return (
<Table

View File

@@ -1,7 +1,13 @@
import { SortOrder } from 'antd/lib/table/interface';
import { FilterValue, SortOrder } from 'antd/lib/table/interface';
import Timestamp from 'timestamp-nano';
import { Order, OrderBy } from 'types/api/errors/getAll';
import {
DEFAULT_FILTER_VALUE,
EXCEPTION_TYPE_FILTER_NAME,
SERVICE_NAME_FILTER_NAME,
} from './constant';
export const isOrder = (order: string | null): order is Order =>
!!(order === 'ascending' || order === 'descending');
@@ -10,6 +16,8 @@ export const urlKey = {
offset: 'offset',
orderParam: 'orderParam',
pageSize: 'pageSize',
exceptionType: 'exceptionType',
serviceName: 'serviceName',
};
export const isOrderParams = (orderBy: string | null): orderBy is OrderBy => {
@@ -87,3 +95,94 @@ export const getUpdatePageSize = (pageSize: string | null): number => {
}
return 10;
};
export const getFilterString = (filter: string | null): string => {
if (filter) {
return filter;
}
return '';
};
export const getDefaultFilterValue = (
filterKey: string | null,
serviceName: string,
exceptionType: string,
): string | undefined => {
let defaultValue: string | undefined;
switch (filterKey) {
case SERVICE_NAME_FILTER_NAME:
defaultValue = serviceName;
break;
case EXCEPTION_TYPE_FILTER_NAME:
defaultValue = exceptionType;
break;
default:
break;
}
return defaultValue;
};
export const getFilterValues = (
serviceName: string,
exceptionType: string,
filterKey: string,
filterValue: string,
): { exceptionFilterValue: string; serviceFilterValue: string } => {
let serviceFilterValue = serviceName;
let exceptionFilterValue = exceptionType;
switch (filterKey) {
case EXCEPTION_TYPE_FILTER_NAME:
exceptionFilterValue = filterValue;
break;
case SERVICE_NAME_FILTER_NAME:
serviceFilterValue = filterValue;
break;
default:
break;
}
return { exceptionFilterValue, serviceFilterValue };
};
type FilterValues = { exceptionType: string; serviceName: string };
const extractSingleFilterValue = (
filterName: string,
filters: Filter,
): string => {
const filterValues = filters[filterName];
if (
!filterValues ||
!Array.isArray(filterValues) ||
filterValues.length === 0
) {
return DEFAULT_FILTER_VALUE;
}
return String(filterValues[0]);
};
type Filter = Record<string, FilterValue | null>;
export const extractFilterValues = (
filters: Filter,
prefilledFilters: FilterValues,
): FilterValues => {
const filterValues: FilterValues = {
exceptionType: prefilledFilters.exceptionType,
serviceName: prefilledFilters.serviceName,
};
if (filters[EXCEPTION_TYPE_FILTER_NAME]) {
filterValues.exceptionType = extractSingleFilterValue(
EXCEPTION_TYPE_FILTER_NAME,
filters,
);
}
if (filters[SERVICE_NAME_FILTER_NAME]) {
filterValues.serviceName = extractSingleFilterValue(
SERVICE_NAME_FILTER_NAME,
filters,
);
}
return filterValues;
};

View File

@@ -70,8 +70,8 @@ export const logAlertDefaults: AlertDef = {
chQueries: {
A: {
name: 'A',
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
rawQuery: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
rawQuery: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
legend: '',
disabled: false,
},
@@ -117,8 +117,8 @@ export const traceAlertDefaults: AlertDef = {
chQueries: {
A: {
name: 'A',
rawQuery: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
rawQuery: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
legend: '',
disabled: false,
},
@@ -164,8 +164,8 @@ export const exceptionAlertDefaults: AlertDef = {
chQueries: {
A: {
name: 'A',
rawQuery: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
query: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
rawQuery: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.distributed_signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
query: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.distributed_signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
legend: '',
disabled: false,
},

View File

@@ -20,8 +20,8 @@ export const rawQueryToIChQuery = (
}
return {
rawQuery: rawQuery !== undefined ? rawQuery : src.rawQuery,
query: rawQuery !== undefined ? rawQuery : src.rawQuery,
rawQuery: rawQuery !== undefined ? rawQuery : src.query,
query: rawQuery !== undefined ? rawQuery : src.query,
legend: legend !== undefined ? legend : src.legend,
name: 'A',
disabled: false,

View File

@@ -1,14 +1,17 @@
import { notification } from 'antd';
import { notification, Space } from 'antd';
import getAll from 'api/alerts/getAll';
import ReleaseNote from 'components/ReleaseNote';
import Spinner from 'components/Spinner';
import React, { useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import { useQuery } from 'react-query';
import { useLocation } from 'react-router-dom';
import ListAlert from './ListAlert';
function ListAlertRules(): JSX.Element {
const { t } = useTranslation('common');
const location = useLocation();
const { data, isError, isLoading, refetch, status } = useQuery('allAlerts', {
queryFn: getAll,
cacheTime: 0,
@@ -45,12 +48,15 @@ function ListAlertRules(): JSX.Element {
}
return (
<ListAlert
{...{
allAlertRules: data.payload,
refetch,
}}
/>
<Space direction="vertical" size="large" style={{ width: '100%' }}>
<ReleaseNote path={location.pathname} />
<ListAlert
{...{
allAlertRules: data.payload,
refetch,
}}
/>
</Space>
);
}

View File

@@ -0,0 +1 @@
export const ITEMS_PER_PAGE_OPTIONS = [25, 50, 100, 200];

View File

@@ -4,41 +4,29 @@ import {
RightOutlined,
} from '@ant-design/icons';
import { Button, Divider, Select } from 'antd';
import React, { memo } from 'react';
import { connect, useDispatch, useSelector } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { getLogs } from 'store/actions/logs/getLogs';
import React, { memo, useMemo } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import {
GET_NEXT_LOG_LINES,
GET_PREVIOUS_LOG_LINES,
RESET_ID_START_AND_END,
SET_LOG_LINES_PER_PAGE,
} from 'types/actions/logs';
import { GlobalReducer } from 'types/reducer/globalTime';
import { ILogsReducer } from 'types/reducer/logs';
import { ITEMS_PER_PAGE_OPTIONS } from './config';
import { Container } from './styles';
const { Option } = Select;
const ITEMS_PER_PAGE_OPTIONS = [25, 50, 100, 200];
interface LogControlsProps {
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
}
function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
function LogControls(): JSX.Element | null {
const {
logLinesPerPage,
idStart,
idEnd,
liveTail,
searchFilter: { queryString },
isLoading: isLogsLoading,
isLoadingAggregate,
logs,
} = useSelector<AppState, ILogsReducer>((state) => state.logs);
const dispatch = useDispatch();
@@ -53,18 +41,6 @@ function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
dispatch({
type: RESET_ID_START_AND_END,
});
if (liveTail === 'STOPPED')
getLogs({
q: queryString,
limit: logLinesPerPage,
orderBy: 'timestamp',
order: 'desc',
timestampStart: minTime,
timestampEnd: maxTime,
...(idStart ? { idGt: idStart } : {}),
...(idEnd ? { idLt: idEnd } : {}),
});
};
const handleNavigatePrevious = (): void => {
@@ -78,44 +54,61 @@ function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
});
};
const isLoading = isLogsLoading || isLoadingAggregate;
const isNextAndPreviousDisabled = useMemo(
() =>
isLoading ||
logLinesPerPage === 0 ||
logs.length === 0 ||
logs.length < logLinesPerPage,
[isLoading, logLinesPerPage, logs.length],
);
if (liveTail !== 'STOPPED') {
return null;
}
return (
<Container>
<Button size="small" type="link" onClick={handleGoToLatest}>
<Button
loading={isLoading}
size="small"
type="link"
onClick={handleGoToLatest}
>
<FastBackwardOutlined /> Go to latest
</Button>
<Divider type="vertical" />
<Button size="small" type="link" onClick={handleNavigatePrevious}>
<Button
loading={isLoading}
size="small"
type="link"
disabled={isNextAndPreviousDisabled}
onClick={handleNavigatePrevious}
>
<LeftOutlined /> Previous
</Button>
<Button size="small" type="link" onClick={handleNavigateNext}>
<Button
loading={isLoading}
size="small"
type="link"
disabled={isNextAndPreviousDisabled}
onClick={handleNavigateNext}
>
Next <RightOutlined />
</Button>
<Select
style={{ width: 120 }}
loading={isLoading}
value={logLinesPerPage}
onChange={handleLogLinesPerPageChange}
>
{ITEMS_PER_PAGE_OPTIONS.map((count) => {
return <Option key={count} value={count}>{`${count} / page`}</Option>;
})}
{ITEMS_PER_PAGE_OPTIONS.map((count) => (
<Option key={count} value={count}>{`${count} / page`}</Option>
))}
</Select>
</Container>
);
}
interface DispatchProps {
getLogs: (
props: Parameters<typeof getLogs>[0],
) => (dispatch: Dispatch<AppActions>) => void;
}
const mapDispatchToProps = (
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
): DispatchProps => ({
getLogs: bindActionCreators(getLogs, dispatch),
});
export default connect(null, mapDispatchToProps)(memo(LogControls));
export default memo(LogControls);

View File

@@ -3,7 +3,7 @@ import Graph from 'components/Graph';
import Spinner from 'components/Spinner';
import dayjs from 'dayjs';
import getStep from 'lib/getStep';
import React, { memo, useEffect, useRef } from 'react';
import React, { memo, useEffect, useMemo, useRef } from 'react';
import { connect, useSelector } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
@@ -77,6 +77,18 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [getLogsAggregate, maxTime, minTime, liveTail]);
const graphData = useMemo(() => {
return {
labels: logsAggregate.map((s) => new Date(s.timestamp / 1000000)),
datasets: [
{
data: logsAggregate.map((s) => s.value),
backgroundColor: blue[4],
},
],
};
}, [logsAggregate]);
return (
<Container>
{isLoadingAggregate ? (
@@ -84,15 +96,7 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element {
) : (
<Graph
name="usage"
data={{
labels: logsAggregate.map((s) => new Date(s.timestamp / 1000000)),
datasets: [
{
data: logsAggregate.map((s) => s.value),
backgroundColor: blue[4],
},
],
}}
data={graphData}
type="bar"
containerHeight="100%"
animate

View File

@@ -0,0 +1,36 @@
import { Button, Row } from 'antd';
import React from 'react';
import { QueryFields } from './utils';
interface SearchFieldsActionBarProps {
fieldsQuery: QueryFields[][];
applyUpdate: () => void;
clearFilters: () => void;
}
export function SearchFieldsActionBar({
fieldsQuery,
applyUpdate,
clearFilters,
}: SearchFieldsActionBarProps): JSX.Element | null {
if (fieldsQuery.length === 0) {
return null;
}
return (
<Row style={{ justifyContent: 'flex-end', paddingRight: '2.4rem' }}>
<Button
type="default"
onClick={clearFilters}
style={{ marginRight: '1rem' }}
>
Clear Filter
</Button>
<Button type="primary" onClick={applyUpdate}>
Apply
</Button>
</Row>
);
}
export default SearchFieldsActionBar;

View File

@@ -12,19 +12,15 @@ import {
QueryOperatorsMultiVal,
QueryOperatorsSingleVal,
} from 'lib/logql/tokens';
import { flatten } from 'lodash-es';
import React, { useEffect, useMemo, useRef, useState } from 'react';
import React, { useMemo } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { ILogsReducer } from 'types/reducer/logs';
import { v4 } from 'uuid';
import { SearchFieldsProps } from '..';
import FieldKey from '../FieldKey';
import { QueryFieldContainer } from '../styles';
import { createParsedQueryStructure } from '../utils';
import { QueryFields } from '../utils';
import { Container, QueryWrapper } from './styles';
import { hashCode, parseQuery } from './utils';
const { Option } = Select;
@@ -68,7 +64,6 @@ function QueryField({
const {
fields: { selected },
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
const getFieldType = (inputKey: string): string => {
// eslint-disable-next-line no-restricted-syntax
for (const selectedField of selected) {
@@ -147,9 +142,12 @@ function QueryField({
/>
) : (
<Input
onChange={(e): void => handleChange(2, e.target.value)}
onChange={(e): void => {
handleChange(2, e.target.value);
}}
style={{ width: '100%' }}
defaultValue={query[2] && query[2].value}
value={query[2] && query[2].value}
/>
)}
</div>
@@ -165,85 +163,78 @@ function QueryField({
}
interface QueryConditionFieldProps {
query: { value: string | string[]; type: string }[];
query: QueryFields;
queryIndex: number;
onUpdate: (arg0: unknown, arg1: number) => void;
}
export type Query = { value: string | string[]; type: string }[];
export interface QueryBuilderProps {
keyPrefix: string;
onDropDownToggleHandler: (value: boolean) => VoidFunction;
fieldsQuery: QueryFields[][];
setFieldsQuery: (q: QueryFields[][]) => void;
}
function QueryBuilder({
updateParsedQuery,
keyPrefix,
fieldsQuery,
setFieldsQuery,
onDropDownToggleHandler,
}: SearchFieldsProps): JSX.Element {
const {
searchFilter: { parsedQuery },
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
const keyPrefixRef = useRef(hashCode(JSON.stringify(parsedQuery)));
const [keyPrefix, setKeyPrefix] = useState(keyPrefixRef.current);
const generatedQueryStructure = createParsedQueryStructure(
parsedQuery as never[],
);
useEffect(() => {
const incomingHashCode = hashCode(JSON.stringify(parsedQuery));
if (incomingHashCode !== keyPrefixRef.current) {
keyPrefixRef.current = incomingHashCode;
setKeyPrefix(incomingHashCode);
}
}, [parsedQuery]);
}: QueryBuilderProps): JSX.Element {
const handleUpdate = (query: Query, queryIndex: number): void => {
const updatedParsedQuery = generatedQueryStructure;
updatedParsedQuery[queryIndex] = parseQuery(query) as never;
const flatParsedQuery = flatten(updatedParsedQuery).filter((q) => q.value);
keyPrefixRef.current = hashCode(JSON.stringify(flatParsedQuery));
updateParsedQuery(flatParsedQuery);
const updated = [...fieldsQuery];
updated[queryIndex] = query as never; // parseQuery(query) as never;
setFieldsQuery(updated);
};
const handleDelete = (queryIndex: number): void => {
const updatedParsedQuery = generatedQueryStructure;
updatedParsedQuery.splice(queryIndex - 1, 2);
const updated = [...fieldsQuery];
if (queryIndex !== 0) updated.splice(queryIndex - 1, 2);
else updated.splice(queryIndex, 2);
const flatParsedQuery = flatten(updatedParsedQuery).filter((q) => q.value);
keyPrefixRef.current = v4();
updateParsedQuery(flatParsedQuery);
setFieldsQuery(updated);
};
const QueryUI = (): JSX.Element | JSX.Element[] =>
generatedQueryStructure.map((query, idx) => {
if (Array.isArray(query))
return (
const QueryUI = (
fieldsQuery: QueryFields[][],
): JSX.Element | JSX.Element[] => {
const result: JSX.Element[] = [];
fieldsQuery.forEach((query, idx) => {
if (Array.isArray(query) && query.length > 1) {
result.push(
<QueryField
key={keyPrefix + idx}
query={query as never}
queryIndex={idx}
onUpdate={handleUpdate as never}
onDelete={handleDelete}
/>
/>,
);
return (
<div key={keyPrefix + idx}>
<QueryConditionField
query={query}
queryIndex={idx}
onUpdate={handleUpdate as never}
/>
</div>
);
} else {
result.push(
<div key={keyPrefix + idx}>
<QueryConditionField
query={Array.isArray(query) ? query[0] : query}
queryIndex={idx}
onUpdate={handleUpdate as never}
/>
</div>,
);
}
});
return result;
};
return (
<>
<Container isMargin={generatedQueryStructure.length === 0}>
<Container isMargin={fieldsQuery.length === 0}>
<CategoryHeading>LOG QUERY BUILDER</CategoryHeading>
<CloseSquareOutlined onClick={onDropDownToggleHandler(false)} />
</Container>
<QueryWrapper>{QueryUI()}</QueryWrapper>
<QueryWrapper key={keyPrefix}>{QueryUI(fieldsQuery)}</QueryWrapper>
</>
);
}

View File

@@ -21,17 +21,3 @@ export const parseQuery = (queries: Query): Query => {
}
return queries;
};
export const hashCode = (s: string): string => {
if (!s) {
return '0';
}
return `${Math.abs(
s.split('').reduce((a, b) => {
// eslint-disable-next-line no-bitwise, no-param-reassign
a = (a << 5) - a + b.charCodeAt(0);
// eslint-disable-next-line no-bitwise
return a & a;
}, 0),
)}`;
};

View File

@@ -2,9 +2,9 @@ import { Button } from 'antd';
import CategoryHeading from 'components/Logs/CategoryHeading';
import map from 'lodash-es/map';
import React from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { ADD_SEARCH_FIELD_QUERY_STRING } from 'types/actions/logs';
// import { ADD_SEARCH_FIELD_QUERY_STRING } from 'types/actions/logs';
import { ILogsReducer } from 'types/reducer/logs';
import FieldKey from './FieldKey';
@@ -12,15 +12,15 @@ import FieldKey from './FieldKey';
interface SuggestedItemProps {
name: string;
type: string;
applySuggestion: (name: string) => void;
}
function SuggestedItem({ name, type }: SuggestedItemProps): JSX.Element {
const dispatch = useDispatch();
function SuggestedItem({
name,
type,
applySuggestion,
}: SuggestedItemProps): JSX.Element {
const addSuggestedField = (): void => {
dispatch({
type: ADD_SEARCH_FIELD_QUERY_STRING,
payload: name,
});
applySuggestion(name);
};
return (
<Button
@@ -33,7 +33,11 @@ function SuggestedItem({ name, type }: SuggestedItemProps): JSX.Element {
);
}
function Suggestions(): JSX.Element {
interface SuggestionsProps {
applySuggestion: (name: string) => void;
}
function Suggestions({ applySuggestion }: SuggestionsProps): JSX.Element {
const {
fields: { selected },
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
@@ -47,6 +51,7 @@ function Suggestions(): JSX.Element {
key={JSON.stringify(field)}
name={field.name}
type={field.type}
applySuggestion={applySuggestion}
/>
))}
</div>

View File

@@ -1,8 +1,21 @@
import React from 'react';
import { notification } from 'antd';
import { flatten } from 'lodash-es';
import React, { useCallback, useEffect, useRef, useState } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { ILogsReducer } from 'types/reducer/logs';
import { SearchFieldsActionBar } from './ActionBar';
import QueryBuilder from './QueryBuilder/QueryBuilder';
import Suggestions from './Suggestions';
import { QueryFields } from './utils';
import {
createParsedQueryStructure,
fieldsQueryIsvalid,
hashCode,
initQueryKOVPair,
prepareConditionOperator,
QueryFields,
} from './utils';
export interface SearchFieldsProps {
updateParsedQuery: (query: QueryFields[]) => void;
@@ -13,13 +26,85 @@ function SearchFields({
updateParsedQuery,
onDropDownToggleHandler,
}: SearchFieldsProps): JSX.Element {
const {
searchFilter: { parsedQuery },
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
const [fieldsQuery, setFieldsQuery] = useState(
createParsedQueryStructure([...parsedQuery] as never[]),
);
const keyPrefixRef = useRef(hashCode(JSON.stringify(fieldsQuery)));
useEffect(() => {
const updatedFieldsQuery = createParsedQueryStructure([
...parsedQuery,
] as never[]);
setFieldsQuery(updatedFieldsQuery);
const incomingHashCode = hashCode(JSON.stringify(updatedFieldsQuery));
if (incomingHashCode !== keyPrefixRef.current) {
keyPrefixRef.current = incomingHashCode;
}
}, [parsedQuery]);
const addSuggestedField = useCallback(
(name: string): void => {
if (!name) {
return;
}
const query = [...fieldsQuery];
if (fieldsQuery.length > 0) {
query.push([prepareConditionOperator()]);
}
const newField: QueryFields[] = [];
initQueryKOVPair(name).forEach((q) => newField.push(q));
query.push(newField);
keyPrefixRef.current = hashCode(JSON.stringify(query));
setFieldsQuery(query);
},
[fieldsQuery, setFieldsQuery],
);
const applyUpdate = useCallback((): void => {
const flatParsedQuery = flatten(fieldsQuery);
if (!fieldsQueryIsvalid(flatParsedQuery)) {
notification.error({
message: 'Please enter a valid criteria for each of the selected fields',
});
return;
}
keyPrefixRef.current = hashCode(JSON.stringify(flatParsedQuery));
updateParsedQuery(flatParsedQuery);
onDropDownToggleHandler(false)();
}, [onDropDownToggleHandler, fieldsQuery, updateParsedQuery]);
const clearFilters = useCallback((): void => {
keyPrefixRef.current = hashCode(JSON.stringify([]));
updateParsedQuery([]);
onDropDownToggleHandler(false)();
}, [onDropDownToggleHandler, updateParsedQuery]);
return (
<>
<QueryBuilder
key={keyPrefixRef.current}
keyPrefix={keyPrefixRef.current}
onDropDownToggleHandler={onDropDownToggleHandler}
updateParsedQuery={updateParsedQuery}
fieldsQuery={fieldsQuery}
setFieldsQuery={setFieldsQuery}
/>
<Suggestions />
<SearchFieldsActionBar
applyUpdate={applyUpdate}
clearFilters={clearFilters}
fieldsQuery={fieldsQuery}
/>
<Suggestions applySuggestion={addSuggestedField} />
</>
);
}

View File

@@ -2,11 +2,30 @@
// @ts-ignore
// @ts-nocheck
import { QueryTypes, QueryOperatorsSingleVal } from 'lib/logql/tokens';
import { QueryTypes, ConditionalOperators, ValidTypeSequence, ValidTypeValue } from 'lib/logql/tokens';
export interface QueryFields {
type: keyof typeof QueryTypes;
value: string;
value: string | string[];
}
export function fieldsQueryIsvalid(queryFields: QueryFields[]): boolean {
let lastOp: string;
let result = true;
queryFields.forEach((q, idx)=> {
if (!q.value || q.value === null || q.value === '') result = false;
if (Array.isArray(q.value) && q.value.length === 0 ) result = false;
const nextOp = idx < queryFields.length ? queryFields[idx+1]: undefined;
if (!ValidTypeSequence(lastOp?.type, q?.type, nextOp?.type)) result = false
if (!ValidTypeValue(lastOp?.value, q.value)) result = false;
lastOp = q;
});
return result
}
export const queryKOVPair = (): QueryFields[] => [
@@ -23,6 +42,29 @@ export const queryKOVPair = (): QueryFields[] => [
value: null,
},
];
export const initQueryKOVPair = (name?: string = null, op?: string = null , value?: string | string[] = null ): QueryFields[] => [
{
type: QueryTypes.QUERY_KEY,
value: name,
},
{
type: QueryTypes.QUERY_OPERATOR,
value: op,
},
{
type: QueryTypes.QUERY_VALUE,
value: value,
},
];
export const prepareConditionOperator = (op?: string = ConditionalOperators.AND): QueryFields => {
return {
type: QueryTypes.CONDITIONAL_OPERATOR,
value: op,
}
}
export const createParsedQueryStructure = (parsedQuery = []) => {
if (!parsedQuery.length) {
return parsedQuery;
@@ -64,3 +106,17 @@ export const createParsedQueryStructure = (parsedQuery = []) => {
});
return structuredArray;
};
export const hashCode = (s: string): string => {
if (!s) {
return '0';
}
return `${Math.abs(
s.split('').reduce((a, b) => {
// eslint-disable-next-line no-bitwise, no-param-reassign
a = (a << 5) - a + b.charCodeAt(0);
// eslint-disable-next-line no-bitwise
return a & a;
}, 0),
)}`;
};

View File

@@ -1,7 +1,14 @@
import { Input, InputRef, Popover } from 'antd';
import useUrlQuery from 'hooks/useUrlQuery';
import getStep from 'lib/getStep';
import React, { useCallback, useEffect, useRef, useState } from 'react';
import { debounce } from 'lodash-es';
import React, {
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from 'react';
import { connect, useDispatch, useSelector } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
@@ -9,7 +16,12 @@ import { getLogs } from 'store/actions/logs/getLogs';
import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import { TOGGLE_LIVE_TAIL } from 'types/actions/logs';
import {
FLUSH_LOGS,
SET_LOADING,
SET_LOADING_AGGREGATE,
TOGGLE_LIVE_TAIL,
} from 'types/actions/logs';
import { GlobalReducer } from 'types/reducer/globalTime';
import { ILogsReducer } from 'types/reducer/logs';
@@ -22,12 +34,31 @@ function SearchFilter({
getLogsAggregate,
}: SearchFilterProps): JSX.Element {
const {
queryString,
updateParsedQuery,
updateQueryString,
queryString,
} = useSearchParser();
const [searchText, setSearchText] = useState(queryString);
const [showDropDown, setShowDropDown] = useState(false);
const searchRef = useRef<InputRef>(null);
const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector<
AppState,
ILogsReducer
>((state) => state.logs);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const dispatch = useDispatch<Dispatch<AppActions>>();
// keep sync with url queryString
useEffect(() => {
setSearchText(queryString);
}, [queryString]);
const debouncedupdateQueryString = useMemo(
() => debounce(updateQueryString, 300),
[updateQueryString],
);
const onDropDownToggleHandler = useCallback(
(value: boolean) => (): void => {
@@ -36,17 +67,6 @@ function SearchFilter({
[],
);
const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector<
AppState,
ILogsReducer
>((state) => state.logs);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const dispatch = useDispatch<Dispatch<AppActions>>();
const handleSearch = useCallback(
(customQuery) => {
if (liveTail === 'PLAYING') {
@@ -54,6 +74,9 @@ function SearchFilter({
type: TOGGLE_LIVE_TAIL,
payload: 'PAUSED',
});
dispatch({
type: FLUSH_LOGS,
});
setTimeout(
() =>
dispatch({
@@ -103,9 +126,32 @@ function SearchFilter({
const urlQueryString = urlQuery.get('q');
useEffect(() => {
handleSearch(urlQueryString || '');
dispatch({
type: SET_LOADING,
payload: true,
});
dispatch({
type: SET_LOADING_AGGREGATE,
payload: true,
});
const debouncedHandleSearch = debounce(handleSearch, 600);
debouncedHandleSearch(urlQueryString || '');
return (): void => {
debouncedHandleSearch.cancel();
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [urlQueryString, maxTime, minTime]);
}, [
urlQueryString,
maxTime,
minTime,
idEnd,
idStart,
logLinesPerPage,
dispatch,
]);
return (
<Container>
@@ -132,12 +178,13 @@ function SearchFilter({
<Input.Search
ref={searchRef}
placeholder="Search Filter"
value={queryString}
value={searchText}
onChange={(e): void => {
updateQueryString(e.target.value);
const { value } = e.target;
setSearchText(value);
debouncedupdateQueryString(value);
}}
allowClear
onSearch={handleSearch}
/>
</Popover>
</Container>
@@ -145,12 +192,8 @@ function SearchFilter({
}
interface DispatchProps {
getLogs: (
props: Parameters<typeof getLogs>[0],
) => (dispatch: Dispatch<AppActions>) => void;
getLogsAggregate: (
props: Parameters<typeof getLogsAggregate>[0],
) => (dispatch: Dispatch<AppActions>) => void;
getLogs: typeof getLogs;
getLogsAggregate: typeof getLogsAggregate;
}
type SearchFilterProps = DispatchProps;

View File

@@ -23,12 +23,10 @@ export function useSearchParser(): {
const updateQueryString = useCallback(
(updatedQueryString) => {
if (updatedQueryString) {
history.push({
pathname: history.location.pathname,
search: updatedQueryString ? `?q=${updatedQueryString}` : '',
});
}
history.replace({
pathname: history.location.pathname,
search: updatedQueryString ? `?q=${updatedQueryString}` : '',
});
dispatch({
type: SET_SEARCH_QUERY_STRING,

View File

@@ -31,7 +31,7 @@ export const externalCallErrorPercent = ({
const legendFormula = 'External Call Error Percentage';
const expression = 'A*100/B';
const disabled = false;
const disabled = true;
return getQueryBuilderQuerieswithAdditionalItems({
metricNameA,
metricNameB,
@@ -102,7 +102,7 @@ export const externalCallDurationByAddress = ({
const metricNameB = 'signoz_external_call_latency_count';
const expression = 'A/B';
const legendFormula = legend;
const disabled = false;
const disabled = true;
return getQueryBuilderQuerieswithFormula({
servicename,
legend,

View File

@@ -12,7 +12,7 @@ import AppReducer from 'types/reducer/app';
import { NameInput } from '../styles';
function UpdateName(): JSX.Element {
const { user, role, org } = useSelector<AppState, AppReducer>(
const { user, role, org, userFlags } = useSelector<AppState, AppReducer>(
(state) => state.app,
);
const { t } = useTranslation();
@@ -47,6 +47,7 @@ function UpdateName(): JSX.Element {
ROLE: role || 'ADMIN',
orgId: org[0].id,
orgName: org[0].name,
userFlags: userFlags || {},
},
});
} else {

View File

@@ -79,7 +79,7 @@ function DisplayName({
/>
<Button
onClick={onClickHandler}
disabled={isLoading}
disabled={isLoading || orgName === name}
loading={isLoading}
type="primary"
>

View File

@@ -89,12 +89,14 @@ function SideNav(): JSX.Element {
},
];
const currentMenu = menus.find((menu) => pathname.startsWith(menu.to));
return (
<Sider collapsible collapsed={collapsed} onCollapse={onCollapse} width={200}>
<Menu
theme="dark"
defaultSelectedKeys={[ROUTES.APPLICATION]}
selectedKeys={[pathname]}
selectedKeys={currentMenu ? [currentMenu?.to] : []}
mode="inline"
>
{menus.map(({ to, Icon, name, tags }) => (

View File

@@ -27,7 +27,7 @@ const menus: SidebarMenu[] = [
Icon: AlignLeftOutlined,
to: ROUTES.LOGS,
name: 'Logs',
tags: ['Beta'],
// tags: ['Beta'],
},
{
Icon: DashboardFilled,

View File

@@ -1,6 +1,4 @@
import { InfoCircleOutlined } from '@ant-design/icons';
import { Collapse, Popover, Space } from 'antd';
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
import { Collapse } from 'antd';
import useThemeMode from 'hooks/useThemeMode';
import keys from 'lodash-es/keys';
import map from 'lodash-es/map';
@@ -9,6 +7,8 @@ import { ITraceTree } from 'types/api/trace/getTraceItem';
import EllipsedButton from '../EllipsedButton';
import { CustomSubText, CustomSubTitle } from '../styles';
import EventStartTime from './EventStartTime';
import RelativeStartTime from './RelativeStartTime';
const { Panel } = Collapse;
@@ -25,10 +25,6 @@ function ErrorTag({
{map(event, ({ attributeMap, name, timeUnixNano }) => {
const attributes = keys(attributeMap);
const { time, timeUnitName } = convertTimeToRelevantUnit(
timeUnixNano / 1e6 - firstSpanStartTime,
);
return (
<Collapse
key={`${name}${JSON.stringify(attributeMap)}`}
@@ -39,18 +35,14 @@ function ErrorTag({
header={name || attributeMap?.event}
key={name || attributeMap.event}
>
<Space direction="horizontal" align="center">
<CustomSubTitle style={{ margin: 0 }} ellipsis>
Event Start Time
</CustomSubTitle>
<Popover content="Relative to start of the full trace">
<InfoCircleOutlined />
</Popover>
</Space>
<CustomSubText isDarkMode={isDarkMode}>
{`${time.toFixed(2)} ${timeUnitName}`}
</CustomSubText>
{firstSpanStartTime ? (
<RelativeStartTime
firstSpanStartTime={firstSpanStartTime}
timeUnixNano={timeUnixNano}
/>
) : (
<EventStartTime timeUnixNano={timeUnixNano} />
)}
{map(attributes, (event) => {
const value = attributeMap[event];
@@ -93,7 +85,11 @@ interface ErrorTagProps {
event: ITraceTree['event'];
onToggleHandler: (isOpen: boolean) => void;
setText: (text: { subText: string; text: string }) => void;
firstSpanStartTime: number;
firstSpanStartTime?: number;
}
ErrorTag.defaultProps = {
firstSpanStartTime: undefined,
};
export default ErrorTag;

View File

@@ -0,0 +1,31 @@
import { Popover } from 'antd';
import dayjs from 'dayjs';
import useThemeMode from 'hooks/useThemeMode';
import React from 'react';
import { CustomSubText, CustomSubTitle } from '../styles';
function EventStartTime({ timeUnixNano }: EventStartTimeProps): JSX.Element {
const { isDarkMode } = useThemeMode();
const humanReadableTimeInDayJs = dayjs(timeUnixNano / 1e6).format(
'YYYY-MM-DD hh:mm:ss.SSS A',
);
return (
<>
<CustomSubTitle style={{ margin: 0 }}>Event Time</CustomSubTitle>
<CustomSubText ellipsis isDarkMode={isDarkMode}>
<Popover content={humanReadableTimeInDayJs}>
{humanReadableTimeInDayJs}
</Popover>
</CustomSubText>
</>
);
}
interface EventStartTimeProps {
timeUnixNano: number;
}
export default EventStartTime;

View File

@@ -0,0 +1,42 @@
import { InfoCircleOutlined } from '@ant-design/icons';
import { Popover, Space } from 'antd';
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
import useThemeMode from 'hooks/useThemeMode';
import React from 'react';
import { CustomSubText, CustomSubTitle } from '../styles';
function StartTime({
firstSpanStartTime,
timeUnixNano,
}: StartTimeProps): JSX.Element {
const { isDarkMode } = useThemeMode();
const { time, timeUnitName } = convertTimeToRelevantUnit(
timeUnixNano / 1e6 - (firstSpanStartTime || 0),
);
return (
<>
<Space direction="horizontal" align="center">
<CustomSubTitle style={{ margin: 0 }} ellipsis>
Event Start Time
</CustomSubTitle>
<Popover content="Relative to start of the full trace">
<InfoCircleOutlined />
</Popover>
</Space>
<CustomSubText isDarkMode={isDarkMode}>
{`${time.toFixed(2)} ${timeUnitName}`}
</CustomSubText>
</>
);
}
interface StartTimeProps {
timeUnixNano: number;
firstSpanStartTime: number;
}
export default StartTime;

View File

@@ -76,7 +76,7 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
/* eslint-enable */
}, [treesData, spanServiceColors]);
const firstSpanStartTime = tree.spanTree[0].startTime;
const firstSpanStartTime = tree.spanTree[0]?.startTime;
const [globalTraceMetadata] = useState<ITraceMetaData>({
...traceMetaData,

View File

@@ -34,9 +34,10 @@ export const traceDateAndTimelineContainer = css`
export const traceDateTimeContainer = css`
display: flex;
aligh-items: center;
align-items: center;
justify-content: center;
`;
export const timelineContainer = css`
overflow: visible;
margin: 0 1rem 0 0;
@@ -48,7 +49,7 @@ export const ganttChartContainer = css`
position: relative;
flex: 1;
overflow-y: auto;
overflow-x: hidden;
overflow-x: scroll;
`;
export const selectedSpanDetailContainer = css`

View File

@@ -1,6 +1,6 @@
import { WarningFilled } from '@ant-design/icons';
import { Button, Card, Form, Space, Typography } from 'antd';
import React, { useCallback } from 'react';
import React from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -14,10 +14,6 @@ function Version(): JSX.Element {
const [form] = Form.useForm();
const { t } = useTranslation();
const onClickUpgradeHandler = useCallback((link: string) => {
window.open(link, '_blank');
}, []);
const {
currentVersion,
latestVersion,
@@ -60,9 +56,8 @@ function Version(): JSX.Element {
placeholder={t('latest_version')}
/>
<Button
onClick={(): void =>
onClickUpgradeHandler('https://github.com/SigNoz/signoz/releases')
}
href="https://github.com/SigNoz/signoz/releases"
target="_blank"
type="link"
>
{t('release_notes')}
@@ -94,11 +89,8 @@ function Version(): JSX.Element {
{!isError && !isLatestVersion && (
<Button
onClick={(): void =>
onClickUpgradeHandler(
'https://signoz.io/docs/operate/docker-standalone/#upgrade',
)
}
href="https://signoz.io/docs/operate/docker-standalone/#upgrade"
target="_blank"
>
{t('read_how_to_upgrade')}
</Button>

View File

@@ -2,20 +2,34 @@
// @ts-ignore
// @ts-nocheck
import { QueryTypes, StringTypeQueryOperators } from "./tokens";
export const reverseParser = (
parserQueryArr: { type: string; value: any }[] = [],
) => {
let queryString = '';
let lastToken: { type: string; value: any };
parserQueryArr.forEach((query) => {
if (queryString) {
queryString += ' ';
}
if (Array.isArray(query.value) && query.value.length > 0) {
// if the values are array type, here we spread them in
// ('a', 'b') format
queryString += `(${query.value.map((val) => `'${val}'`).join(',')})`;
} else {
queryString += query.value;
if (query.type === QueryTypes.QUERY_VALUE
&& lastToken.type === QueryTypes.QUERY_OPERATOR
&& Object.values(StringTypeQueryOperators).includes(lastToken.value) ) {
// for operators that need string type value, here we append single
// quotes. if the content has single quote they would be removed
queryString += `'${query.value?.replace(/'/g, '')}'`;
} else {
queryString += query.value;
}
}
lastToken = query;
});
// console.log(queryString);

View File

@@ -7,6 +7,21 @@ export const QueryOperatorsSingleVal = {
NCONTAINS: 'NCONTAINS',
};
// list of operators that support only number values
export const NumTypeQueryOperators = {
GTE: 'GTE',
GT: 'GT',
LTE: 'LTE',
LT: 'LT',
};
// list of operators that support only string values
export const StringTypeQueryOperators = {
CONTAINS: 'CONTAINS',
NCONTAINS: 'NCONTAINS',
};
// list of operators that support array values
export const QueryOperatorsMultiVal = {
IN: 'IN',
NIN: 'NIN',
@@ -23,3 +38,46 @@ export const QueryTypes = {
QUERY_VALUE: 'QUERY_VALUE',
CONDITIONAL_OPERATOR: 'CONDITIONAL_OPERATOR',
};
export const ValidTypeValue = (
op: string,
value: string | string[],
): boolean => {
if (!op) return true;
if (Object.values(NumTypeQueryOperators).includes(op)) {
if (Array.isArray(value)) return false;
return !Number.isNaN(Number(value));
}
return true;
};
// ValidTypeSequence takes prior, current and next op to confirm
// the proper sequence. For example, if QUERY_VALUE needs to be
// in between QUERY_OPERATOR and (empty or CONDITIONAL_OPERATOR).
export const ValidTypeSequence = (
prior: string | undefined,
current: string | undefined,
next: string | undefined,
): boolean => {
switch (current) {
case QueryTypes.QUERY_KEY:
// query key can have an empty prior
if (!prior) return true;
return [QueryTypes.CONDITIONAL_OPERATOR].includes(prior);
case QueryTypes.QUERY_OPERATOR:
// empty prior is not allowed
if (!prior || ![QueryTypes.QUERY_KEY].includes(prior)) return false;
if (!next || ![QueryTypes.QUERY_VALUE].includes(next)) return false;
return true;
case QueryTypes.QUERY_VALUE:
// empty prior is not allowed
if (!prior) return false;
return [QueryTypes.QUERY_OPERATOR].includes(prior);
case QueryTypes.CONDITIONAL_OPERATOR:
// empty prior is not allowed
if (!next) return false;
return [QueryTypes.QUERY_KEY].includes(next);
default:
return false;
}
};

View File

@@ -17,6 +17,10 @@ import SelectService from './SelectService';
import { getGraphData, getTooltip, getZoomPx, transformLabel } from './utils';
const Container = styled.div`
.force-graph-container {
overflow: scroll;
}
.force-graph-container .graph-tooltip {
background: black;
padding: 1px;

View File

@@ -1,17 +1,26 @@
import { Space } from 'antd';
import ReleaseNote from 'components/ReleaseNote';
import ListOfAllDashboard from 'container/ListOfDashboard';
import React, { useEffect } from 'react';
import { connect } from 'react-redux';
import { useLocation } from 'react-router-dom';
import { bindActionCreators } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { GetAllDashboards } from 'store/actions';
import AppActions from 'types/actions';
function Dashboard({ getAllDashboards }: DashboardProps): JSX.Element {
const location = useLocation();
useEffect(() => {
getAllDashboards();
}, [getAllDashboards]);
return <ListOfAllDashboard />;
return (
<Space direction="vertical" size="middle" style={{ width: '100%' }}>
<ReleaseNote path={location.pathname} />
<ListOfAllDashboard />
</Space>
);
}
interface DispatchProps {

View File

@@ -6,6 +6,7 @@ import LogsAggregate from 'container/LogsAggregate';
import LogsFilters from 'container/LogsFilters';
import LogsSearchFilter from 'container/LogsSearchFilter';
import LogsTable from 'container/LogsTable';
import useMountedState from 'hooks/useMountedState';
import useUrlQuery from 'hooks/useUrlQuery';
import React, { memo, useEffect } from 'react';
import { connect, useDispatch } from 'react-redux';
@@ -18,16 +19,21 @@ import { SET_SEARCH_QUERY_STRING } from 'types/actions/logs';
import SpaceContainer from './styles';
function Logs({ getLogsFields }: LogsProps): JSX.Element {
const urlQuery = useUrlQuery();
const getMountedState = useMountedState();
const urlQuery = useUrlQuery();
const dispatch = useDispatch();
useEffect(() => {
dispatch({
type: SET_SEARCH_QUERY_STRING,
payload: urlQuery.get('q'),
});
}, [dispatch, urlQuery]);
const hasMounted = getMountedState();
if (!hasMounted) {
dispatch({
type: SET_SEARCH_QUERY_STRING,
payload: urlQuery.get('q'),
});
}
}, [dispatch, getMountedState, urlQuery]);
useEffect(() => {
getLogsFields();

View File

@@ -1,5 +1,6 @@
import { notification } from 'antd';
import { notification, Space } from 'antd';
import getLocalStorageKey from 'api/browser/localstorage/get';
import ReleaseNote from 'components/ReleaseNote';
import Spinner from 'components/Spinner';
import { SKIP_ONBOARDING } from 'constants/onboarding';
import ResourceAttributesFilter from 'container/MetricsApplication/ResourceAttributesFilter';
@@ -7,6 +8,7 @@ import MetricTable from 'container/MetricsTable';
import { convertRawQueriesToTraceSelectedTags } from 'lib/resourceAttributes';
import React, { useEffect, useMemo } from 'react';
import { connect, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { GetService, GetServiceProps } from 'store/actions/metrics';
@@ -21,6 +23,7 @@ function Metrics({ getService }: MetricsProps): JSX.Element {
AppState,
GlobalReducer
>((state) => state.globalTime);
const location = useLocation();
const {
services,
resourceAttributeQueries,
@@ -86,10 +89,12 @@ function Metrics({ getService }: MetricsProps): JSX.Element {
}
return (
<>
<Space direction="vertical" style={{ width: '100%' }}>
<ReleaseNote path={location.pathname} />
<ResourceAttributesFilter />
<MetricTable />
</>
</Space>
);
}

View File

@@ -6,10 +6,8 @@
*/
export const isPasswordValid = (value: string): boolean => {
// eslint-disable-next-line prefer-regex-literals
const pattern = new RegExp(
'^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[#?!@$%^&*-]).{8,}$',
);
const pattern = new RegExp('^.{8,}$');
return pattern.test(value);
};
export const isPasswordNotValidMessage = `Password must a have minimum of 8 characters with at least one lower case, one number ,one upper case and one special character`;
export const isPasswordNotValidMessage = `Password must a have minimum of 8 characters`;

View File

@@ -1 +1,4 @@
export const SPAN_DETAILS_LEFT_COL_WIDTH = 350;
export const noEventMessage =
'The requested trace id was not found. Sometimes this happens because of insertion delay in trace data. Please try again after some time';

View File

@@ -1,5 +1,6 @@
import { Typography } from 'antd';
import getTraceItem from 'api/trace/getTraceItem';
import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import TraceDetailContainer from 'container/TraceDetail';
import useUrlQuery from 'hooks/useUrlQuery';
@@ -8,6 +9,8 @@ import { useQuery } from 'react-query';
import { useParams } from 'react-router-dom';
import { Props as TraceDetailProps } from 'types/api/trace/getTraceItem';
import { noEventMessage } from './constants';
function TraceDetail(): JSX.Element {
const { id } = useParams<TraceDetailProps>();
const urlQuery = useUrlQuery();
@@ -19,6 +22,7 @@ function TraceDetail(): JSX.Element {
}),
[urlQuery],
);
const { data: traceDetailResponse, error, isLoading, isError } = useQuery(
`getTraceItem/${id}`,
() => getTraceItem({ id, spanId, levelUp, levelDown }),
@@ -39,6 +43,10 @@ function TraceDetail(): JSX.Element {
return <Spinner tip="Loading.." />;
}
if (traceDetailResponse.payload[0].events.length === 0) {
return <NotFound text={noEventMessage} />;
}
return <TraceDetailContainer response={traceDetailResponse.payload} />;
}

View File

@@ -18,6 +18,7 @@ import {
UPDATE_ORG_NAME,
UPDATE_USER,
UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN,
UPDATE_USER_FLAG,
UPDATE_USER_IS_FETCH,
UPDATE_USER_ORG_ROLE,
} from 'types/actions/app';
@@ -58,6 +59,7 @@ const InitialValue: InitialValueTypes = {
org: null,
role: null,
configs: {},
userFlags: {},
};
const appReducer = (
@@ -153,6 +155,7 @@ const appReducer = (
ROLE,
orgId,
orgName,
userFlags,
} = action.payload;
const orgIndex = org.findIndex((e) => e.id === orgId);
@@ -179,6 +182,7 @@ const appReducer = (
},
org: [...updatedOrg],
role: ROLE,
userFlags,
};
}
@@ -219,6 +223,14 @@ const appReducer = (
};
}
case UPDATE_USER_FLAG: {
console.log('herei n update user flag');
return {
...state,
userFlags: { ...state.userFlags, ...action.payload.flags },
};
}
default:
return state;
}

View File

@@ -3,6 +3,7 @@ import {
Organization,
PayloadProps as OrgPayload,
} from 'types/api/user/getOrganization';
import { UserFlags } from 'types/api/user/setFlags';
import AppReducer, { User } from 'types/reducer/app';
import { ROLES } from 'types/roles';
@@ -24,6 +25,7 @@ export const UPDATE_ORG_NAME = 'UPDATE_ORG_NAME';
export const UPDATE_ORG = 'UPDATE_ORG';
export const UPDATE_FEATURE_FLAGS = 'UPDATE_FEATURE_FLAGS';
export const UPDATE_CONFIGS = 'UPDATE_CONFIGS';
export const UPDATE_USER_FLAG = 'UPDATE_USER_FLAG';
export interface SwitchDarkMode {
type: typeof SWITCH_DARK_MODE;
@@ -92,6 +94,7 @@ export interface UpdateUser {
orgName: Organization['name'];
ROLE: ROLES;
orgId: Organization['id'];
userFlags: UserFlags;
};
}
@@ -110,6 +113,13 @@ export interface UpdateOrgName {
};
}
export interface UpdateUserFlag {
type: typeof UPDATE_USER_FLAG;
payload: {
flags: UserFlags | null;
};
}
export interface UpdateOrg {
type: typeof UPDATE_ORG;
payload: {
@@ -137,4 +147,5 @@ export type AppAction =
| UpdateOrgName
| UpdateOrg
| UpdateFeatureFlags
| UpdateConfigs;
| UpdateConfigs
| UpdateUserFlag;

View File

@@ -15,6 +15,8 @@ export interface Props {
orderParam?: OrderBy;
limit?: number;
offset?: number;
exceptionType?: string;
serviceName?: string;
}
export interface Exception {

View File

@@ -3,6 +3,8 @@ import { GlobalTime } from 'types/actions/globalTime';
export type Props = {
start: GlobalTime['minTime'];
end: GlobalTime['minTime'];
exceptionType: string;
serviceName: string;
};
export type PayloadProps = number;

View File

@@ -1,3 +1,4 @@
import { UserFlags } from 'types/api/user/setFlags';
import { User } from 'types/reducer/app';
import { ROLES } from 'types/roles';
@@ -15,4 +16,5 @@ export interface PayloadProps {
profilePictureURL: string;
organization: string;
role: ROLES;
flags: UserFlags;
}

View File

@@ -0,0 +1,12 @@
import { User } from 'types/reducer/app';
export interface UserFlags {
ReleaseNote0120Hide?: string;
}
export type PayloadProps = UserFlags;
export interface Props {
userId: User['userId'];
flags: UserFlags;
}

View File

@@ -2,6 +2,7 @@ import { PayloadProps as ConfigPayload } from 'types/api/dynamicConfigs/getDynam
import { PayloadProps as FeatureFlagPayload } from 'types/api/features/getFeaturesFlags';
import { PayloadProps as OrgPayload } from 'types/api/user/getOrganization';
import { PayloadProps as UserPayload } from 'types/api/user/getUser';
import { UserFlags } from 'types/api/user/setFlags';
import { ROLES } from 'types/roles';
export interface User {
@@ -28,4 +29,5 @@ export default interface AppReducer {
org: OrgPayload | null;
featureFlags: null | FeatureFlagPayload;
configs: ConfigPayload;
userFlags: null | UserFlags;
}

View File

@@ -45,6 +45,7 @@ import (
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model"
"go.signoz.io/signoz/pkg/query-service/telemetry"
"go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
@@ -1177,33 +1178,54 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": 0}
}
case constants.Duration:
finalQuery := fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query
finalQuery += " ORDER BY durationNano LIMIT 1"
var dBResponse []model.DBResponseTotal
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.S().Info(finalQuery)
err := r.featureFlags.CheckFeature(constants.DurationSort)
durationSortEnabled := err == nil
finalQuery := ""
if !durationSortEnabled {
// if duration sort is not enabled, we need to get the min and max duration from the index table
finalQuery = fmt.Sprintf("SELECT min(durationNano) as min, max(durationNano) as max FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
var dBResponse []model.DBResponseMinMax
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.S().Info(finalQuery)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
}
if len(dBResponse) > 0 {
traceFilterReponse.Duration = map[string]uint64{"minDuration": dBResponse[0].Min, "maxDuration": dBResponse[0].Max}
}
} else {
// when duration sort is enabled, we need to get the min and max duration from the duration table
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query
finalQuery += " ORDER BY durationNano LIMIT 1"
var dBResponse []model.DBResponseTotal
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.S().Info(finalQuery)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
}
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query
finalQuery += " ORDER BY durationNano DESC LIMIT 1"
var dBResponse2 []model.DBResponseTotal
err = r.db.Select(ctx, &dBResponse2, finalQuery, args...)
zap.S().Info(finalQuery)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
}
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
}
if len(dBResponse) > 0 {
traceFilterReponse.Duration["minDuration"] = dBResponse[0].NumTotal
}
if len(dBResponse2) > 0 {
traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query
finalQuery += " ORDER BY durationNano DESC LIMIT 1"
var dBResponse2 []model.DBResponseTotal
err = r.db.Select(ctx, &dBResponse2, finalQuery, args...)
zap.S().Info(finalQuery)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
}
if len(dBResponse) > 0 {
traceFilterReponse.Duration["minDuration"] = dBResponse[0].NumTotal
}
if len(dBResponse2) > 0 {
traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal
}
}
case constants.RPCMethod:
finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
@@ -1379,18 +1401,13 @@ func (r *ClickHouseReader) GetFilteredSpans(ctx context.Context, queryParams *mo
var getFilterSpansResponseItems []model.GetFilterSpansResponseItem
baseQuery := fmt.Sprintf("SELECT timestamp, spanID, traceID, serviceName, name, durationNano, httpCode, gRPCCode, gRPCMethod, httpMethod, rpcMethod, responseStatusCode FROM %s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryTable)
baseQuery := fmt.Sprintf("SELECT timestamp, spanID, traceID, serviceName, name, durationNano, httpMethod, rpcMethod, responseStatusCode FROM %s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryTable)
baseQuery += query
err := r.db.Select(ctx, &getFilterSpansResponseItems, baseQuery, args...)
// Fill status and method
for i, e := range getFilterSpansResponseItems {
if e.GRPCode != "" {
getFilterSpansResponseItems[i].StatusCode = e.GRPCode
} else {
getFilterSpansResponseItems[i].StatusCode = e.HttpCode
}
if e.GRPMethod != "" {
getFilterSpansResponseItems[i].Method = e.GRPMethod
if e.RPCMethod != "" {
getFilterSpansResponseItems[i].Method = e.RPCMethod
} else {
getFilterSpansResponseItems[i].Method = e.HttpMethod
}
@@ -2506,8 +2523,35 @@ func (r *ClickHouseReader) ListErrors(ctx context.Context, queryParams *model.Li
var getErrorResponses []model.Error
query := fmt.Sprintf("SELECT any(exceptionType) as exceptionType, any(exceptionMessage) as exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, any(serviceName) as serviceName, groupID FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU GROUP BY groupID", r.TraceDB, r.errorTable)
query := "SELECT any(exceptionMessage) as exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, groupID"
if len(queryParams.ServiceName) != 0 {
query = query + ", serviceName"
} else {
query = query + ", any(serviceName) as serviceName"
}
if len(queryParams.ExceptionType) != 0 {
query = query + ", exceptionType"
} else {
query = query + ", any(exceptionType) as exceptionType"
}
query += fmt.Sprintf(" FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
if len(queryParams.ServiceName) != 0 {
query = query + " AND serviceName ilike @serviceName"
args = append(args, clickhouse.Named("serviceName", "%"+queryParams.ServiceName+"%"))
}
if len(queryParams.ExceptionType) != 0 {
query = query + " AND exceptionType ilike @exceptionType"
args = append(args, clickhouse.Named("exceptionType", "%"+queryParams.ExceptionType+"%"))
}
query = query + " GROUP BY groupID"
if len(queryParams.ServiceName) != 0 {
query = query + ", serviceName"
}
if len(queryParams.ExceptionType) != 0 {
query = query + ", exceptionType"
}
if len(queryParams.OrderParam) != 0 {
if queryParams.Order == constants.Descending {
query = query + " ORDER BY " + queryParams.OrderParam + " DESC"
@@ -2542,7 +2586,14 @@ func (r *ClickHouseReader) CountErrors(ctx context.Context, queryParams *model.C
query := fmt.Sprintf("SELECT count(distinct(groupID)) FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
if len(queryParams.ServiceName) != 0 {
query = query + " AND serviceName ilike @serviceName"
args = append(args, clickhouse.Named("serviceName", "%"+queryParams.ServiceName+"%"))
}
if len(queryParams.ExceptionType) != 0 {
query = query + " AND exceptionType ilike @exceptionType"
args = append(args, clickhouse.Named("exceptionType", "%"+queryParams.ExceptionType+"%"))
}
err := r.db.QueryRow(ctx, query, args...).Scan(&errorCount)
zap.S().Info(query)
@@ -3067,6 +3118,20 @@ func (r *ClickHouseReader) GetSamplesInfoInLastHeartBeatInterval(ctx context.Con
return totalSamples, nil
}
func (r *ClickHouseReader) GetDistributedInfoInLastHeartBeatInterval(ctx context.Context) (map[string]interface{}, error) {
clusterInfo := []model.ClusterInfo{}
queryStr := `SELECT shard_num, shard_weight, replica_num, errors_count, slowdowns_count, estimated_recovery_time FROM system.clusters where cluster='cluster';`
r.db.Select(ctx, &clusterInfo, queryStr)
if len(clusterInfo) == 1 {
return clusterInfo[0].GetMapFromStruct(), nil
}
return nil, nil
}
func (r *ClickHouseReader) GetLogsInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error) {
var totalLogLines uint64
@@ -3173,7 +3238,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda
return &model.ApiError{Err: err, Typ: model.ErrorInternal}
}
query = fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD COLUMN IF NOT EXISTS %s %s", r.logsDB, r.logsTable, cluster, field.Name, field.DataType)
query = fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD COLUMN IF NOT EXISTS %s %s MATERIALIZED -1", r.logsDB, r.logsTable, cluster, field.Name, field.DataType)
err = r.db.Exec(ctx, query)
if err != nil {
return &model.ApiError{Err: err, Typ: model.ErrorInternal}
@@ -3197,7 +3262,8 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda
// remove index
query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s DROP INDEX IF EXISTS %s_idx", r.logsDB, r.logsLocalTable, cluster, field.Name)
err := r.db.Exec(ctx, query)
if err != nil {
// we are ignoring errors with code 341 as it is an error with updating old part https://github.com/SigNoz/engineering-pod/issues/919#issuecomment-1366344346
if err != nil && !strings.HasPrefix(err.Error(), "code: 341") {
return &model.ApiError{Err: err, Typ: model.ErrorInternal}
}
}
@@ -3212,11 +3278,18 @@ func (r *ClickHouseReader) GetLogs(ctx context.Context, params *model.LogsFilter
}
isPaginatePrev := logs.CheckIfPrevousPaginateAndModifyOrder(params)
filterSql, err := logs.GenerateSQLWhere(fields, params)
filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, params)
if err != nil {
return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData}
}
data := map[string]interface{}{
"lenFilters": lenFilters,
}
if lenFilters != 0 {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data)
}
query := fmt.Sprintf("%s from %s.%s", constants.LogsSQLSelect, r.logsDB, r.logsTable)
if filterSql != "" {
@@ -3246,10 +3319,17 @@ func (r *ClickHouseReader) TailLogs(ctx context.Context, client *model.LogsTailC
return
}
filterSql, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
Query: client.Filter.Query,
})
data := map[string]interface{}{
"lenFilters": lenFilters,
}
if lenFilters != 0 {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data)
}
if err != nil {
client.Error <- err
return
@@ -3326,13 +3406,20 @@ func (r *ClickHouseReader) AggregateLogs(ctx context.Context, params *model.Logs
return nil, apiErr
}
filterSql, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
Query: params.Query,
})
if err != nil {
return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData}
}
data := map[string]interface{}{
"lenFilters": lenFilters,
}
if lenFilters != 0 {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data)
}
query := ""
if params.GroupBy != "" {
query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, toString(%s) as groupBy, "+

View File

@@ -392,6 +392,8 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router) {
router.HandleFunc("/api/v1/user/{id}", SelfAccess(aH.editUser)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/user/{id}", AdminAccess(aH.deleteUser)).Methods(http.MethodDelete)
router.HandleFunc("/api/v1/user/{id}/flags", SelfAccess(aH.patchUserFlag)).Methods(http.MethodPatch)
router.HandleFunc("/api/v1/rbac/role/{id}", SelfAccess(aH.getRole)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/rbac/role/{id}", AdminAccess(aH.editRole)).Methods(http.MethodPut)
@@ -1157,6 +1159,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
RespondError(w, &model.ApiError{model.ErrorTimeout, res.Err}, nil)
}
RespondError(w, &model.ApiError{model.ErrorExec, res.Err}, nil)
return
}
response_data := &model.QueryData{
@@ -1330,6 +1333,9 @@ func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
}
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data)
if (data["number"] != 0) && (data["number"] != telemetry.DEFAULT_NUMBER_OF_SERVICES) {
telemetry.GetInstance().AddActiveTracesUser()
}
aH.WriteJSON(w, r, result)
}
@@ -1854,6 +1860,37 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
aH.WriteJSON(w, r, map[string]string{"data": "user deleted successfully"})
}
// addUserFlag patches a user flags with the changes
func (aH *APIHandler) patchUserFlag(w http.ResponseWriter, r *http.Request) {
// read user id from path var
userId := mux.Vars(r)["id"]
// read input into user flag
defer r.Body.Close()
b, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("failed read user flags from http request for userId ", userId, "with error: ", err)
RespondError(w, model.BadRequestStr("received user flags in invalid format"), nil)
return
}
flags := make(map[string]string, 0)
err = json.Unmarshal(b, &flags)
if err != nil {
zap.S().Errorf("failed parsing user flags for userId ", userId, "with error: ", err)
RespondError(w, model.BadRequestStr("received user flags in invalid format"), nil)
return
}
newflags, apiError := dao.DB().UpdateUserFlags(r.Context(), userId, flags)
if !apiError.IsNil() {
RespondError(w, apiError, nil)
return
}
aH.Respond(w, newflags)
}
func (aH *APIHandler) getRole(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
@@ -2157,6 +2194,8 @@ func (aH *APIHandler) tailLogs(w http.ResponseWriter, r *http.Request) {
RespondError(w, &err, "streaming is not supported")
return
}
// flush the headers
flusher.Flush()
for {
select {

View File

@@ -36,7 +36,7 @@ const (
DESC = "desc"
)
var tokenRegex, _ = regexp.Compile(`(?i)(and( )*?|or( )*?)?(([\w.-]+ (in|nin) \([^(]+\))|([\w.]+ (gt|lt|gte|lte) (')?[\S]+(')?)|([\w.]+ (contains|ncontains)) [^\\]?'(.*?[^\\])')`)
var tokenRegex, _ = regexp.Compile(`(?i)(and( )*?|or( )*?)?(([\w.-]+( )+(in|nin)( )+\([^(]+\))|([\w.]+( )+(gt|lt|gte|lte)( )+(')?[\S]+(')?)|([\w.]+( )+(contains|ncontains))( )+[^\\]?'(.*?[^\\])')`)
var operatorRegex, _ = regexp.Compile(`(?i)(?: )(in|nin|gt|lt|gte|lte|contains|ncontains)(?: )`)
func ParseLogFilterParams(r *http.Request) (*model.LogsFilterParams, error) {
@@ -152,6 +152,7 @@ func ParseLogAggregateParams(r *http.Request) (*model.LogsAggregateParams, error
func parseLogQuery(query string) ([]string, error) {
sqlQueryTokens := []string{}
filterTokens := tokenRegex.FindAllString(query, -1)
if len(filterTokens) == 0 {
@@ -190,7 +191,13 @@ func parseLogQuery(query string) ([]string, error) {
sqlQueryTokens = append(sqlQueryTokens, f)
} else {
symbol := operatorMapping[strings.ToLower(op)]
sqlQueryTokens = append(sqlQueryTokens, strings.Replace(v, " "+op+" ", " "+symbol+" ", 1)+" ")
sqlExpr := strings.Replace(v, " "+op+" ", " "+symbol+" ", 1)
splittedExpr := strings.Split(sqlExpr, symbol)
if len(splittedExpr) != 2 {
return nil, fmt.Errorf("error while splitting expression: %s", sqlExpr)
}
trimmedSqlExpr := fmt.Sprintf("%s %s %s ", strings.Join(strings.Fields(splittedExpr[0]), " "), symbol, strings.TrimSpace(splittedExpr[1]))
sqlQueryTokens = append(sqlQueryTokens, trimmedSqlExpr)
}
}
@@ -198,8 +205,6 @@ func parseLogQuery(query string) ([]string, error) {
}
func parseColumn(s string) (*string, error) {
s = strings.ToLower(s)
colName := ""
// if has and/or as prefix
@@ -208,7 +213,8 @@ func parseColumn(s string) (*string, error) {
return nil, fmt.Errorf("incorrect filter")
}
if strings.HasPrefix(s, AND) || strings.HasPrefix(s, OR) {
first := strings.ToLower(filter[0])
if first == AND || first == OR {
colName = filter[1]
} else {
colName = filter[0]
@@ -231,27 +237,37 @@ func replaceInterestingFields(allFields *model.GetFieldsResponse, queryTokens []
interestingFieldLookup := arrayToMap(allFields.Interesting)
for index := 0; index < len(queryTokens); index++ {
queryToken := queryTokens[index]
col, err := parseColumn(queryToken)
result, err := replaceFieldInToken(queryTokens[index], selectedFieldsLookup, interestingFieldLookup)
if err != nil {
return nil, err
}
sqlColName := *col
if _, ok := selectedFieldsLookup[*col]; !ok && *col != "body" {
if field, ok := interestingFieldLookup[*col]; ok {
if field.Type != constants.Static {
sqlColName = fmt.Sprintf("%s_%s_value[indexOf(%s_%s_key, '%s')]", field.Type, strings.ToLower(field.DataType), field.Type, strings.ToLower(field.DataType), *col)
}
} else if strings.Compare(strings.ToLower(*col), "fulltext") != 0 && field.Type != constants.Static {
return nil, fmt.Errorf("field not found for filtering")
}
}
queryTokens[index] = strings.Replace(queryToken, *col, sqlColName, 1)
queryTokens[index] = result
}
return queryTokens, nil
}
func replaceFieldInToken(queryToken string, selectedFieldsLookup map[string]model.LogField, interestingFieldLookup map[string]model.LogField) (string, error) {
col, err := parseColumn(queryToken)
if err != nil {
return "", err
}
sqlColName := *col
lowerColName := strings.ToLower(*col)
if lowerColName != "body" {
if _, ok := selectedFieldsLookup[sqlColName]; !ok {
if field, ok := interestingFieldLookup[sqlColName]; ok {
if field.Type != constants.Static {
sqlColName = fmt.Sprintf("%s_%s_value[indexOf(%s_%s_key, '%s')]", field.Type, strings.ToLower(field.DataType), field.Type, strings.ToLower(field.DataType), field.Name)
}
} else if strings.Compare(strings.ToLower(*col), "fulltext") != 0 && field.Type != constants.Static {
return "", fmt.Errorf("field not found for filtering")
}
}
}
return strings.Replace(queryToken, *col, sqlColName, 1), nil
}
func CheckIfPrevousPaginateAndModifyOrder(params *model.LogsFilterParams) (isPaginatePrevious bool) {
if params.IdGt != "" && params.OrderBy == TIMESTAMP && params.Order == DESC {
isPaginatePrevious = true
@@ -263,20 +279,23 @@ func CheckIfPrevousPaginateAndModifyOrder(params *model.LogsFilterParams) (isPag
return
}
func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilterParams) (string, error) {
func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilterParams) (string, int, error) {
var tokens []string
var err error
var sqlWhere string
var lenTokens = 0
if params.Query != "" {
tokens, err = parseLogQuery(params.Query)
if err != nil {
return sqlWhere, err
return sqlWhere, -1, err
}
lenTokens = len(tokens)
}
tokens, err = replaceInterestingFields(allFields, tokens)
if err != nil {
return sqlWhere, err
return sqlWhere, -1, err
}
filterTokens := []string{}
@@ -326,5 +345,5 @@ func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilt
sqlWhere = strings.Join(tokens, "")
return sqlWhere, nil
return sqlWhere, lenTokens, nil
}

View File

@@ -80,7 +80,17 @@ var correctQueriesTest = []struct {
{
`filters with extra spaces`,
`service IN ('name > 100') AND length gt 100`,
[]string{`service IN ('name > 100') `, `AND length > 100 `},
[]string{`service IN ('name > 100') `, `AND length > 100 `},
},
{
`Extra space within a filter expression`,
`service IN ('name > 100')`,
[]string{`service IN ('name > 100') `},
},
{
`Extra space between a query filter`,
`data contains 'hello world .'`,
[]string{`data ILIKE '%hello world .%' `},
},
{
`filters with special characters in key name`,
@@ -161,6 +171,26 @@ var parseCorrectColumns = []struct {
"and id_userid >= 50 ",
"id_userid",
},
{
"column starting with and",
"andor = 1",
"andor",
},
{
"column starting with and after an 'and'",
"and andor = 1",
"andor",
},
{
"column starting with And",
"Andor = 1",
"Andor",
},
{
"column starting with and after an 'and'",
"and Andor = 1",
"Andor",
},
{
"column with ilike",
`AND body ILIKE '%searchstring%' `,
@@ -279,7 +309,7 @@ var generateSQLQueryFields = model.GetFieldsResponse{
Type: "attributes",
},
{
Name: "field2",
Name: "Field2",
DataType: "double64",
Type: "attributes",
},
@@ -290,6 +320,11 @@ var generateSQLQueryFields = model.GetFieldsResponse{
},
},
Interesting: []model.LogField{
{
Name: "FielD1",
DataType: "int64",
Type: "attributes",
},
{
Name: "code",
DataType: "int64",
@@ -323,6 +358,15 @@ var generateSQLQueryTestCases = []struct {
},
SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' ) and ( field1 < 100 and field1 > 50 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ",
},
{
Name: "generate case sensitive query",
Filter: model.LogsFilterParams{
Query: "field1 lt 100 and FielD1 gt 50 and Field2 gt 10 and code lte 500 and code gte 400",
TimestampStart: uint64(1657689292000),
TimestampEnd: uint64(1657689294000),
},
SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' ) and ( field1 < 100 and attributes_int64_value[indexOf(attributes_int64_key, 'FielD1')] > 50 and Field2 > 10 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ",
},
}
func TestGenerateSQLQuery(t *testing.T) {

View File

@@ -142,7 +142,7 @@ func BuildMetricsTimeSeriesFilterQuery(fs *model.FilterSet, groupTags []string,
}
}
filterSubQuery := fmt.Sprintf("SELECT %s fingerprint FROM %s.%s WHERE %s", selectLabels, constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_LOCAL_TABLENAME, queryString)
filterSubQuery := fmt.Sprintf("SELECT %s fingerprint FROM %s.%s WHERE %s", selectLabels, constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_TABLENAME, queryString)
return filterSubQuery, nil
}
@@ -166,7 +166,7 @@ func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, table
" toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
" %s as value" +
" FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
" INNER JOIN" +
" GLOBAL INNER JOIN" +
" (%s) as filtered_time_series" +
" USING fingerprint" +
" WHERE " + samplesTableTimeFilter +
@@ -228,7 +228,7 @@ func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, table
" toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
" any(value) as value" +
" FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
" INNER JOIN" +
" GLOBAL INNER JOIN" +
" (%s) as filtered_time_series" +
" USING fingerprint" +
" WHERE " + samplesTableTimeFilter +
@@ -371,7 +371,7 @@ func expressionToQuery(qp *model.QueryRangeParamsV2, varToQuery map[string]strin
joinUsing = strings.Join(groupTags, ",")
formulaSubQuery += fmt.Sprintf("(%s) as %s ", query, var_)
if idx < len(vars)-1 {
formulaSubQuery += "INNER JOIN"
formulaSubQuery += "GLOBAL INNER JOIN"
} else if len(vars) > 1 {
formulaSubQuery += fmt.Sprintf("USING (%s)", joinUsing)
}

View File

@@ -480,14 +480,18 @@ func parseListErrorsRequest(r *http.Request) (*model.ListErrorsParams, error) {
if err != nil {
return nil, errors.New("offset param is not in correct format")
}
serviceName := r.URL.Query().Get("serviceName")
exceptionType := r.URL.Query().Get("exceptionType")
params := &model.ListErrorsParams{
Start: startTime,
End: endTime,
OrderParam: orderParam,
Order: order,
Limit: int64(limitInt),
Offset: int64(offsetInt),
Start: startTime,
End: endTime,
OrderParam: orderParam,
Order: order,
Limit: int64(limitInt),
Offset: int64(offsetInt),
ServiceName: serviceName,
ExceptionType: exceptionType,
}
return params, nil
@@ -503,10 +507,14 @@ func parseCountErrorsRequest(r *http.Request) (*model.CountErrorsParams, error)
if err != nil {
return nil, err
}
serviceName := r.URL.Query().Get("serviceName")
exceptionType := r.URL.Query().Get("exceptionType")
params := &model.CountErrorsParams{
Start: startTime,
End: endTime,
Start: startTime,
End: endTime,
ServiceName: serviceName,
ExceptionType: exceptionType,
}
return params, nil

View File

@@ -1,8 +1,11 @@
package app
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net"
"net/http"
_ "net/http/pprof" // http profiler
@@ -235,21 +238,89 @@ func (lrw *loggingResponseWriter) Flush() {
lrw.ResponseWriter.(http.Flusher).Flush()
}
func extractDashboardMetaData(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFrom := "/api/v2/metrics/query_range"
var requestBody map[string]interface{}
data := map[string]interface{}{}
if path == pathToExtractBodyFrom && (r.Method == "POST") {
bodyBytes, _ := ioutil.ReadAll(r.Body)
r.Body.Close() // must close
r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
json.Unmarshal(bodyBytes, &requestBody)
} else {
return nil, false
}
compositeMetricQuery, compositeMetricQueryExists := requestBody["compositeMetricQuery"]
compositeMetricQueryMap := compositeMetricQuery.(map[string]interface{})
signozMetricFound := false
if compositeMetricQueryExists {
signozMetricFound = telemetry.GetInstance().CheckSigNozMetrics(compositeMetricQueryMap)
queryType, queryTypeExists := compositeMetricQueryMap["queryType"]
if queryTypeExists {
data["queryType"] = queryType
}
panelType, panelTypeExists := compositeMetricQueryMap["panelType"]
if panelTypeExists {
data["panelType"] = panelType
}
}
datasource, datasourceExists := requestBody["dataSource"]
if datasourceExists {
data["datasource"] = datasource
}
if !signozMetricFound {
telemetry.GetInstance().AddActiveMetricsUser()
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_DASHBOARDS_METADATA, data, true)
}
return data, true
}
func getActiveLogs(path string, r *http.Request) {
// if path == "/api/v1/dashboards/{uuid}" {
// telemetry.GetInstance().AddActiveMetricsUser()
// }
if path == "/api/v1/logs" {
hasFilters := len(r.URL.Query().Get("q"))
if hasFilters > 0 {
telemetry.GetInstance().AddActiveLogsUser()
}
}
}
func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
route := mux.CurrentRoute(r)
path, _ := route.GetPathTemplate()
dashboardMetadata, metadataExists := extractDashboardMetaData(path, r)
getActiveLogs(path, r)
lrw := NewLoggingResponseWriter(w)
next.ServeHTTP(lrw, r)
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
if telemetry.GetInstance().IsSampled() {
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data)
if metadataExists {
for key, value := range dashboardMetadata {
data[key] = value
}
}
// if telemetry.GetInstance().IsSampled() {
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data)
}
// }
})
}

View File

@@ -12,6 +12,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/dao"
"go.signoz.io/signoz/pkg/query-service/model"
"go.signoz.io/signoz/pkg/query-service/telemetry"
"go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
"golang.org/x/crypto/bcrypt"
@@ -386,6 +387,8 @@ func Login(ctx context.Context, request *model.LoginRequest) (*model.LoginRespon
return nil, err
}
telemetry.GetInstance().IdentifyUser(&user.User)
return &model.LoginResponse{
UserJwtObject: userjwt,
UserId: user.User.Id,

View File

@@ -2,9 +2,7 @@ package auth
import (
"context"
"fmt"
"net/http"
"regexp"
"github.com/pkg/errors"
"go.signoz.io/signoz/pkg/query-service/constants"
@@ -74,21 +72,21 @@ func ValidatePassword(password string) error {
return errors.Errorf("Password should be atleast %d characters.", minimumPasswordLength)
}
num := `[0-9]{1}`
lower := `[a-z]{1}`
upper := `[A-Z]{1}`
symbol := `[!@#$&*]{1}`
if b, err := regexp.MatchString(num, password); !b || err != nil {
return fmt.Errorf("password should have atleast one number")
}
if b, err := regexp.MatchString(lower, password); !b || err != nil {
return fmt.Errorf("password should have atleast one lower case letter")
}
if b, err := regexp.MatchString(upper, password); !b || err != nil {
return fmt.Errorf("password should have atleast one upper case letter")
}
if b, err := regexp.MatchString(symbol, password); !b || err != nil {
return fmt.Errorf("password should have atleast one special character from !@#$&* ")
}
// num := `[0-9]{1}`
// lower := `[a-z]{1}`
// upper := `[A-Z]{1}`
// symbol := `[!@#$&*]{1}`
// if b, err := regexp.MatchString(num, password); !b || err != nil {
// return fmt.Errorf("password should have atleast one number")
// }
// if b, err := regexp.MatchString(lower, password); !b || err != nil {
// return fmt.Errorf("password should have atleast one lower case letter")
// }
// if b, err := regexp.MatchString(upper, password); !b || err != nil {
// return fmt.Errorf("password should have atleast one upper case letter")
// }
// if b, err := regexp.MatchString(symbol, password); !b || err != nil {
// return fmt.Errorf("password should have atleast one special character from !@#$&* ")
// }
return nil
}

View File

@@ -110,10 +110,9 @@ const (
DefaultLogSkipIndexGranularity = 64
)
const (
SIGNOZ_METRIC_DBNAME = "signoz_metrics"
SIGNOZ_SAMPLES_TABLENAME = "distributed_samples_v2"
SIGNOZ_TIMESERIES_TABLENAME = "distributed_time_series_v2"
SIGNOZ_TIMESERIES_LOCAL_TABLENAME = "time_series_v2"
SIGNOZ_METRIC_DBNAME = "signoz_metrics"
SIGNOZ_SAMPLES_TABLENAME = "distributed_samples_v2"
SIGNOZ_TIMESERIES_TABLENAME = "distributed_time_series_v2"
)
var TimeoutExcludedRoutes = map[string]bool{

View File

@@ -41,6 +41,8 @@ type Mutations interface {
EditUser(ctx context.Context, update *model.User) (*model.User, *model.ApiError)
DeleteUser(ctx context.Context, id string) *model.ApiError
UpdateUserFlags(ctx context.Context, userId string, flags map[string]string) (model.UserFlag, *model.ApiError)
CreateGroup(ctx context.Context, group *model.Group) (*model.Group, *model.ApiError)
DeleteGroup(ctx context.Context, id string) *model.ApiError

View File

@@ -68,6 +68,11 @@ func InitDB(dataSourceName string) (*ModelDaoSqlite, error) {
token TEXT NOT NULL,
FOREIGN KEY(user_id) REFERENCES users(id)
);
CREATE TABLE IF NOT EXISTS user_flags (
user_id TEXT PRIMARY KEY,
flags TEXT,
FOREIGN KEY(user_id) REFERENCES users(id)
);
`
_, err = db.Exec(table_schema)
@@ -120,6 +125,13 @@ func (mds *ModelDaoSqlite) initializeOrgPreferences(ctx context.Context) error {
// set telemetry fields from userPreferences
telemetry.GetInstance().SetDistinctId(org.Id)
users, _ := mds.GetUsers(ctx)
countUsers := len(users)
telemetry.GetInstance().SetCountUsers(int8(countUsers))
if countUsers > 0 {
telemetry.GetInstance().SetCompanyDomain(users[countUsers-1].Email)
}
return nil
}

View File

@@ -2,6 +2,7 @@ package sqlite
import (
"context"
"encoding/json"
"fmt"
"time"
@@ -163,6 +164,8 @@ func (mds *ModelDaoSqlite) EditOrg(ctx context.Context, org *model.Organization)
}
telemetry.GetInstance().SetTelemetryAnonymous(org.IsAnonymous)
telemetry.GetInstance().SetDistinctId(org.Id)
return nil
}
@@ -271,11 +274,14 @@ func (mds *ModelDaoSqlite) GetUser(ctx context.Context,
u.org_id,
u.group_id,
g.name as role,
o.name as organization
o.name as organization,
COALESCE((select uf.flags
from user_flags uf
where u.id = uf.user_id), '') as flags
from users u, groups g, organizations o
where
g.id=u.group_id and
o.id = u.org_id and
o.id = u.org_id and
u.id=?;`
if err := mds.db.Select(&users, query, id); err != nil {
@@ -291,6 +297,7 @@ func (mds *ModelDaoSqlite) GetUser(ctx context.Context,
if len(users) == 0 {
return nil, nil
}
return &users[0], nil
}
@@ -531,3 +538,53 @@ func (mds *ModelDaoSqlite) GetResetPasswordEntry(ctx context.Context,
}
return &entries[0], nil
}
// CreateUserFlags inserts user specific flags
func (mds *ModelDaoSqlite) UpdateUserFlags(ctx context.Context, userId string, flags map[string]string) (model.UserFlag, *model.ApiError) {
if len(flags) == 0 {
// nothing to do as flags are empty. In this method, we only append the flags
// but not set them to empty
return flags, nil
}
// fetch existing flags
userPayload, apiError := mds.GetUser(ctx, userId)
if apiError != nil {
return nil, apiError
}
if userPayload.Flags != nil {
for k, v := range userPayload.Flags {
if _, ok := flags[k]; !ok {
// insert only missing keys as we want to retain the
// flags in the db that are not part of this request
flags[k] = v
}
}
}
// append existing flags with new ones
// write the updated flags
flagsBytes, err := json.Marshal(flags)
if err != nil {
return nil, model.InternalError(err)
}
if len(userPayload.Flags) == 0 {
q := `INSERT INTO user_flags (user_id, flags) VALUES (?, ?);`
if _, err := mds.db.ExecContext(ctx, q, userId, string(flagsBytes)); err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
}
} else {
q := `UPDATE user_flags SET flags = ? WHERE user_id = ?;`
if _, err := mds.db.ExecContext(ctx, q, userId, string(flagsBytes)); err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
}
}
return flags, nil
}

View File

@@ -63,6 +63,7 @@ type Reader interface {
GetSamplesInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
GetLogsInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
GetTagsInfoInLastHeartBeatInterval(ctx context.Context) (*model.TagsInfo, error)
GetDistributedInfoInLastHeartBeatInterval(ctx context.Context) (map[string]interface{}, error)
// Logs
GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError)
UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError

View File

@@ -1,5 +1,11 @@
package model
import (
"database/sql/driver"
"encoding/json"
"fmt"
)
type Organization struct {
Id string `json:"id" db:"id"`
Name string `json:"name" db:"name"`
@@ -30,10 +36,42 @@ type User struct {
GroupId string `json:"groupId,omitempty" db:"group_id"`
}
type UserFlag map[string]string
func (uf UserFlag) Value() (driver.Value, error) {
f := make(map[string]string, 0)
for k, v := range uf {
f[k] = v
}
return json.Marshal(f)
}
func (uf *UserFlag) Scan(value interface{}) error {
fmt.Println(" value:", value)
if value == "" {
return nil
}
b, ok := value.(string)
if !ok {
return fmt.Errorf("type assertion to []byte failed while scanning user flag")
}
f := make(map[string]string, 0)
if err := json.Unmarshal([]byte(b), &f); err != nil {
return err
}
*uf = make(UserFlag, len(f))
for k, v := range f {
(*uf)[k] = v
}
return nil
}
type UserPayload struct {
User
Role string `json:"role"`
Organization string `json:"organization"`
Role string `json:"role"`
Organization string `json:"organization"`
Flags UserFlag `json:"flags"`
}
type Group struct {

View File

@@ -296,17 +296,21 @@ type GetTTLParams struct {
}
type ListErrorsParams struct {
Start *time.Time
End *time.Time
Limit int64
OrderParam string
Order string
Offset int64
Start *time.Time
End *time.Time
Limit int64
OrderParam string
Order string
Offset int64
ServiceName string
ExceptionType string
}
type CountErrorsParams struct {
Start *time.Time
End *time.Time
Start *time.Time
End *time.Time
ServiceName string
ExceptionType string
}
type GetErrorParams struct {

View File

@@ -72,6 +72,14 @@ func BadRequest(err error) *ApiError {
}
}
// BadRequestStr returns a ApiError object of bad request
func BadRequestStr(s string) *ApiError {
return &ApiError{
Typ: ErrorBadData,
Err: fmt.Errorf(s),
}
}
// InternalError returns a ApiError object of internal type
func InternalError(err error) *ApiError {
return &ApiError{
@@ -172,13 +180,9 @@ type GetFilterSpansResponseItem struct {
ServiceName string `ch:"serviceName" json:"serviceName"`
Operation string `ch:"name" json:"operation"`
DurationNano uint64 `ch:"durationNano" json:"durationNano"`
HttpCode string `ch:"httpCode"`
HttpMethod string `ch:"httpMethod"`
GRPCode string `ch:"gRPCCode"`
GRPMethod string `ch:"gRPCMethod"`
StatusCode string `json:"statusCode"`
Method string `json:"method"`
ResponseStatusCode string `ch:"responseStatusCode"`
ResponseStatusCode string `ch:"responseStatusCode" json:"statusCode"`
RPCMethod string `ch:"rpcMethod"`
}
@@ -391,6 +395,11 @@ type DBResponseTotal struct {
NumTotal uint64 `ch:"numTotal"`
}
type DBResponseMinMax struct {
Min uint64 `ch:"min"`
Max uint64 `ch:"max"`
}
type SpanFiltersResponse struct {
ServiceName map[string]uint64 `json:"serviceName"`
Status map[string]uint64 `json:"status"`
@@ -556,3 +565,19 @@ type TagTelemetryData struct {
Env string `json:"env" ch:"env"`
Language string `json:"language" ch:"language"`
}
type ClusterInfo struct {
ShardNum uint32 `json:"shard_num" ch:"shard_num"`
ShardWeight uint32 `json:"shard_weight" ch:"shard_weight"`
ReplicaNum uint32 `json:"replica_num" ch:"replica_num"`
ErrorsCount uint32 `json:"errors_count" ch:"errors_count"`
SlowdownsCount uint32 `json:"slowdowns_count" ch:"slowdowns_count"`
EstimatedRecoveryTime uint32 `json:"estimated_recovery_time" ch:"estimated_recovery_time"`
}
func (ci *ClusterInfo) GetMapFromStruct() map[string]interface{} {
var clusterInfoMap map[string]interface{}
data, _ := json.Marshal(*ci)
json.Unmarshal(data, &clusterInfoMap)
return clusterInfoMap
}

Some files were not shown because too many files have changed in this diff Show More