Compare commits
97 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8fde9008b2 | ||
|
|
b35bdf01cc | ||
|
|
9b654143bb | ||
|
|
4841f150f4 | ||
|
|
16a49a8b04 | ||
|
|
1fd819b806 | ||
|
|
cab9e04cdd | ||
|
|
e8f341b850 | ||
|
|
1f6fcb9b8c | ||
|
|
1c7202b5bf | ||
|
|
24ac062bf5 | ||
|
|
b776bf5b09 | ||
|
|
144076e029 | ||
|
|
835251b342 | ||
|
|
ebbad5812f | ||
|
|
7b86022280 | ||
|
|
da1fd4b0cd | ||
|
|
57d28be9f5 | ||
|
|
126c9238ba | ||
|
|
31a3bc09c8 | ||
|
|
6ba5c0ecad | ||
|
|
27cd514fa5 | ||
|
|
f0e13784e5 | ||
|
|
742ceac32c | ||
|
|
545d46c39c | ||
|
|
d134e4f4d9 | ||
|
|
e03b0aa45f | ||
|
|
46e131698e | ||
|
|
d1ee15c372 | ||
|
|
1e035be978 | ||
|
|
88a97fc4b8 | ||
|
|
2e58f6db7a | ||
|
|
1916fc87b0 | ||
|
|
d8882acdd7 | ||
|
|
7f42b39684 | ||
|
|
b11f79b4c7 | ||
|
|
c717e39a1a | ||
|
|
c3253687d0 | ||
|
|
895c721b37 | ||
|
|
35f5fb6957 | ||
|
|
40ec4517c2 | ||
|
|
48a6f536fa | ||
|
|
13a6d7f7c6 | ||
|
|
8b6ed0f951 | ||
|
|
eef48c54f8 | ||
|
|
aad962d07d | ||
|
|
18bbb3cf36 | ||
|
|
a3455fb553 | ||
|
|
ece2988d0d | ||
|
|
db704b212d | ||
|
|
4b13b0a8a4 | ||
|
|
6f6499c267 | ||
|
|
3dcb44a758 | ||
|
|
0595cdc7af | ||
|
|
092c02762f | ||
|
|
d1d2829d2b | ||
|
|
ac446294e7 | ||
|
|
1cceab4d5e | ||
|
|
02898d14f9 | ||
|
|
09af6c262c | ||
|
|
faeaeb61a0 | ||
|
|
9c80ba6b78 | ||
|
|
dbba8b5b55 | ||
|
|
58ce838023 | ||
|
|
5260b152f5 | ||
|
|
f2dd254d83 | ||
|
|
82d53fa45c | ||
|
|
c38d1c150d | ||
|
|
16170eacc0 | ||
|
|
66ddbfc085 | ||
|
|
2715ab61a4 | ||
|
|
4d291e92b9 | ||
|
|
1b73649f8e | ||
|
|
0abae1c09c | ||
|
|
4d02603aed | ||
|
|
c58e43a678 | ||
|
|
b77bbe1e4f | ||
|
|
d4eb241c04 | ||
|
|
98e1a77a43 | ||
|
|
498b04491b | ||
|
|
4e58414cc2 | ||
|
|
67943cfec0 | ||
|
|
f170eb1b23 | ||
|
|
6931b18382 | ||
|
|
8a9d6f664a | ||
|
|
8affe8df31 | ||
|
|
1c8626e933 | ||
|
|
87932de668 | ||
|
|
1b52edb056 | ||
|
|
5a81557df7 | ||
|
|
8bb3eefeb5 | ||
|
|
a46f074e22 | ||
|
|
88fa3b7699 | ||
|
|
7f77bcca2b | ||
|
|
ab5311caac | ||
|
|
8aae9f53a9 | ||
|
|
18d80d47e5 |
2
.github/workflows/remove-label.yaml
vendored
2
.github/workflows/remove-label.yaml
vendored
@@ -11,6 +11,6 @@ jobs:
|
||||
- name: Remove label
|
||||
uses: buildsville/add-remove-label@v1
|
||||
with:
|
||||
label: ok-to-test
|
||||
label: ok-to-test,testing-deploy
|
||||
type: remove
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
38
.github/workflows/staging-deployment.yaml
vendored
Normal file
38
.github/workflows/staging-deployment.yaml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
name: staging-deployment
|
||||
# Trigger deployment only on push to develop branch
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy latest develop branch to staging
|
||||
runs-on: ubuntu-latest
|
||||
environment: staging
|
||||
steps:
|
||||
- name: Executing remote ssh commands using ssh key
|
||||
uses: appleboy/ssh-action@v0.1.6
|
||||
env:
|
||||
GITHUB_BRANCH: develop
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
host: ${{ secrets.HOST_DNS }}
|
||||
username: ${{ secrets.USERNAME }}
|
||||
key: ${{ secrets.EC2_SSH_KEY }}
|
||||
envs: GITHUB_BRANCH,GITHUB_SHA
|
||||
command_timeout: 60m
|
||||
script: |
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
docker system prune --force
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout ${GITHUB_BRANCH}
|
||||
git pull
|
||||
make build-ee-query-service-amd64
|
||||
make build-frontend-amd64
|
||||
make run-signoz
|
||||
39
.github/workflows/testing-deployment.yaml
vendored
Normal file
39
.github/workflows/testing-deployment.yaml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: testing-deployment
|
||||
# Trigger deployment only on testing-deploy label on pull request
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy PR branch to testing
|
||||
runs-on: ubuntu-latest
|
||||
environment: testing
|
||||
if: ${{ github.event.label.name == 'testing-deploy' }}
|
||||
steps:
|
||||
- name: Executing remote ssh commands using ssh key
|
||||
uses: appleboy/ssh-action@v0.1.6
|
||||
env:
|
||||
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
host: ${{ secrets.HOST_DNS }}
|
||||
username: ${{ secrets.USERNAME }}
|
||||
key: ${{ secrets.EC2_SSH_KEY }}
|
||||
envs: GITHUB_BRANCH,GITHUB_SHA
|
||||
command_timeout: 60m
|
||||
script: |
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export DEV_BUILD="1"
|
||||
docker system prune --force
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout ${GITHUB_BRANCH}
|
||||
git pull
|
||||
make build-ee-query-service-amd64
|
||||
make build-frontend-amd64
|
||||
make run-signoz
|
||||
@@ -215,9 +215,26 @@ Please ping us in the [`#contributing`](https://signoz-community.slack.com/archi
|
||||
|
||||
# 4. Contribute to Backend (Query-Service) 🌑
|
||||
|
||||
[**https://github.com/SigNoz/signoz/tree/develop/pkg/query-service**](https://github.com/SigNoz/signoz/tree/develop/pkg/query-service)
|
||||
**Need to Update: [https://github.com/SigNoz/signoz/tree/develop/pkg/query-service](https://github.com/SigNoz/signoz/tree/develop/pkg/query-service)**
|
||||
|
||||
## 4.1 To run ClickHouse setup (recommended for local development)
|
||||
## 4.1 Prerequisites
|
||||
|
||||
### 4.1.1 Install SQLite3
|
||||
|
||||
- Run `sqlite3` command to check if you already have SQLite3 installed on your machine.
|
||||
|
||||
- If not installed already, Install using below command
|
||||
- on Linux
|
||||
- on Debian / Ubuntu
|
||||
```
|
||||
sudo apt install sqlite3
|
||||
```
|
||||
- on CentOS / Fedora / RedHat
|
||||
```
|
||||
sudo yum install sqlite3
|
||||
```
|
||||
|
||||
## 4.2 To run ClickHouse setup (recommended for local development)
|
||||
|
||||
- Clone the SigNoz repository and cd into signoz directory,
|
||||
```
|
||||
|
||||
25
Makefile
25
Makefile
@@ -45,7 +45,7 @@ build-frontend-amd64:
|
||||
@echo "--> Building frontend docker image for amd64"
|
||||
@echo "------------------"
|
||||
@cd $(FRONTEND_DIRECTORY) && \
|
||||
docker build --file Dockerfile --no-cache -t $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
docker build --file Dockerfile -t $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
--build-arg TARGETPLATFORM="linux/amd64" .
|
||||
|
||||
# Step to build and push docker image of frontend(used in push pipeline)
|
||||
@@ -54,7 +54,7 @@ build-push-frontend:
|
||||
@echo "--> Building and pushing frontend docker image"
|
||||
@echo "------------------"
|
||||
@cd $(FRONTEND_DIRECTORY) && \
|
||||
docker buildx build --file Dockerfile --progress plane --no-cache --push --platform linux/amd64 \
|
||||
docker buildx build --file Dockerfile --progress plane --push --platform linux/amd64 \
|
||||
--tag $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
# Steps to build and push docker image of query service
|
||||
@@ -65,7 +65,7 @@ build-query-service-amd64:
|
||||
@echo "--> Building query-service docker image for amd64"
|
||||
@echo "------------------"
|
||||
@docker build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
-t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
--build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="$(LD_FLAGS)" .
|
||||
|
||||
# Step to build and push docker image of query in amd64 and arm64 (used in push pipeline)
|
||||
@@ -73,7 +73,7 @@ build-push-query-service:
|
||||
@echo "------------------"
|
||||
@echo "--> Building and pushing query-service docker image"
|
||||
@echo "------------------"
|
||||
@docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plane --no-cache \
|
||||
@docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plane \
|
||||
--push --platform linux/arm64,linux/amd64 --build-arg LD_FLAGS="$(LD_FLAGS)" \
|
||||
--tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
@@ -84,11 +84,11 @@ build-ee-query-service-amd64:
|
||||
@echo "------------------"
|
||||
@if [ $(DEV_BUILD) != "" ]; then \
|
||||
docker build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
-t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
--build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="${LD_FLAGS} ${DEV_LD_FLAGS}" .; \
|
||||
else \
|
||||
docker build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
-t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
--build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="$(LD_FLAGS)" .; \
|
||||
fi
|
||||
|
||||
@@ -98,7 +98,7 @@ build-push-ee-query-service:
|
||||
@echo "--> Building and pushing query-service docker image"
|
||||
@echo "------------------"
|
||||
@docker buildx build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--progress plane --no-cache --push --platform linux/arm64,linux/amd64 \
|
||||
--progress plane --push --platform linux/arm64,linux/amd64 \
|
||||
--build-arg LD_FLAGS="$(LD_FLAGS)" --tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
dev-setup:
|
||||
@@ -119,16 +119,19 @@ down-local:
|
||||
$(STANDALONE_DIRECTORY)/docker-compose-core.yaml -f $(STANDALONE_DIRECTORY)/docker-compose-local.yaml \
|
||||
down -v
|
||||
|
||||
run-x86:
|
||||
pull-signoz:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml pull
|
||||
|
||||
run-signoz:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml up --build -d
|
||||
|
||||
down-x86:
|
||||
down-signoz:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml down -v
|
||||
|
||||
clear-standalone-data:
|
||||
@docker run --rm -v "$(PWD)/$(STANDALONE_DIRECTORY)/data:/pwd" busybox \
|
||||
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse/* signoz/*"
|
||||
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse*/* signoz/* zookeeper-*/*"
|
||||
|
||||
clear-swarm-data:
|
||||
@docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \
|
||||
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse/* signoz/*"
|
||||
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse*/* signoz/* zookeeper-*/*"
|
||||
|
||||
31
README.md
31
README.md
@@ -25,17 +25,25 @@
|
||||
|
||||
SigNoz helps developers monitor applications and troubleshoot problems in their deployed applications. SigNoz uses distributed tracing to gain visibility into your software stack.
|
||||
|
||||
👉 Visualise Metrics, Traces and Logs in a single pane of glass
|
||||
|
||||
👉 You can see metrics like p99 latency, error rates for your services, external API calls and individual end points.
|
||||
|
||||
👉 You can find the root cause of the problem by going to the exact traces which are causing the problem and see detailed flamegraphs of individual request traces.
|
||||
|
||||
👉 Run aggregates on trace data to get business relevant metrics
|
||||
|
||||

|
||||
👉 Filter and query logs, build dashboards and alerts based on attributes in logs
|
||||
|
||||

|
||||
<br />
|
||||

|
||||

|
||||
<br />
|
||||

|
||||

|
||||
<br />
|
||||

|
||||
|
||||
|
||||
|
||||
<br /><br />
|
||||
|
||||
@@ -51,12 +59,12 @@ Come say Hi to us on [Slack](https://signoz.io/slack) 👋
|
||||
|
||||
## Features:
|
||||
|
||||
- Unified UI for metrics, traces and logs. No need to switch from Prometheus to Jaeger to debug issues, or use a logs tool like Elastic separate from your metrics and traces stack.
|
||||
- Application overview metrics like RPS, 50th/90th/99th Percentile latencies, and Error Rate
|
||||
- Slowest endpoints in your application
|
||||
- See exact request trace to figure out issues in downstream services, slow DB queries, call to 3rd party services like payment gateways, etc
|
||||
- Filter traces by service name, operation, latency, error, tags/annotations.
|
||||
- Run aggregates on trace data (events/spans) to get business relevant metrics. e.g. You can get error rate and 99th percentile latency of `customer_type: gold` or `deployment_version: v2` or `external_call: paypal`
|
||||
- Unified UI for metrics and traces. No need to switch from Prometheus to Jaeger to debug issues.
|
||||
|
||||
<br /><br />
|
||||
|
||||
@@ -129,6 +137,21 @@ Moreover, SigNoz has few more advanced features wrt Jaeger:
|
||||
- Jaegar UI doesn’t show any metrics on traces or on filtered traces
|
||||
- Jaeger can’t get aggregates on filtered traces. For example, p99 latency of requests which have tag - customer_type='premium'. This can be done easily on SigNoz
|
||||
|
||||
<p>  </p>
|
||||
|
||||
### SigNoz vs Elastic
|
||||
|
||||
- SigNoz Logs management are based on ClickHouse, a columnar OLAP datastore which makes aggregate log analytics queries much more efficient
|
||||
- 50% lower resource requirement compared to Elastic during ingestion
|
||||
|
||||
<p>  </p>
|
||||
|
||||
### SigNoz vs Loki
|
||||
|
||||
- SigNoz supports aggregations on high-cardinality data over a huge volume while loki doesn’t.
|
||||
- SigNoz supports indexes over high cardinality data and has no limitations on the number of indexes, while Loki reaches max streams with a few indexes added to it.
|
||||
- Searching over a huge volume of data is difficult and slow in Loki compared to SigNoz
|
||||
|
||||
<br /><br />
|
||||
|
||||
<img align="left" src="https://signoz-public.s3.us-east-2.amazonaws.com/Contributors.svg" width="50px" />
|
||||
|
||||
75
deploy/docker-swarm/clickhouse-setup/clickhouse-cluster.xml
Normal file
75
deploy/docker-swarm/clickhouse-setup/clickhouse-cluster.xml
Normal file
@@ -0,0 +1,75 @@
|
||||
<?xml version="1.0"?>
|
||||
<clickhouse>
|
||||
<!-- ZooKeeper is used to store metadata about replicas, when using Replicated tables.
|
||||
Optional. If you don't use replicated tables, you could omit that.
|
||||
|
||||
See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/
|
||||
-->
|
||||
<zookeeper>
|
||||
<node index="1">
|
||||
<host>zookeeper-1</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
<!-- <node index="2">
|
||||
<host>zookeeper-2</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
<node index="3">
|
||||
<host>zookeeper-3</host>
|
||||
<port>2181</port>
|
||||
</node> -->
|
||||
</zookeeper>
|
||||
|
||||
<!-- Configuration of clusters that could be used in Distributed tables.
|
||||
https://clickhouse.com/docs/en/operations/table_engines/distributed/
|
||||
-->
|
||||
<remote_servers>
|
||||
<cluster>
|
||||
<!-- Inter-server per-cluster secret for Distributed queries
|
||||
default: no secret (no authentication will be performed)
|
||||
|
||||
If set, then Distributed queries will be validated on shards, so at least:
|
||||
- such cluster should exist on the shard,
|
||||
- such cluster should have the same secret.
|
||||
|
||||
And also (and which is more important), the initial_user will
|
||||
be used as current user for the query.
|
||||
|
||||
Right now the protocol is pretty simple and it only takes into account:
|
||||
- cluster name
|
||||
- query
|
||||
|
||||
Also it will be nice if the following will be implemented:
|
||||
- source hostname (see interserver_http_host), but then it will depends from DNS,
|
||||
it can use IP address instead, but then the you need to get correct on the initiator node.
|
||||
- target hostname / ip address (same notes as for source hostname)
|
||||
- time-based security tokens
|
||||
-->
|
||||
<!-- <secret></secret> -->
|
||||
<shard>
|
||||
<!-- Optional. Whether to write data to just one of the replicas. Default: false (write data to all replicas). -->
|
||||
<!-- <internal_replication>false</internal_replication> -->
|
||||
<!-- Optional. Shard weight when writing data. Default: 1. -->
|
||||
<!-- <weight>1</weight> -->
|
||||
<replica>
|
||||
<host>clickhouse</host>
|
||||
<port>9000</port>
|
||||
<!-- Optional. Priority of the replica for load_balancing. Default: 1 (less value has more priority). -->
|
||||
<!-- <priority>1</priority> -->
|
||||
</replica>
|
||||
</shard>
|
||||
<!-- <shard>
|
||||
<replica>
|
||||
<host>clickhouse-2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>clickhouse-3</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard> -->
|
||||
</cluster>
|
||||
</remote_servers>
|
||||
</clickhouse>
|
||||
@@ -236,8 +236,8 @@
|
||||
<openSSL>
|
||||
<server> <!-- Used for https server AND secure tcp port -->
|
||||
<!-- openssl req -subj "/CN=localhost" -new -newkey rsa:2048 -days 365 -nodes -x509 -keyout /etc/clickhouse-server/server.key -out /etc/clickhouse-server/server.crt -->
|
||||
<certificateFile>/etc/clickhouse-server/server.crt</certificateFile>
|
||||
<privateKeyFile>/etc/clickhouse-server/server.key</privateKeyFile>
|
||||
<!-- <certificateFile>/etc/clickhouse-server/server.crt</certificateFile> -->
|
||||
<!-- <privateKeyFile>/etc/clickhouse-server/server.key</privateKeyFile> -->
|
||||
<!-- dhparams are optional. You can delete the <dhParamsFile> element.
|
||||
To generate dhparams, use the following command:
|
||||
openssl dhparam -out /etc/clickhouse-server/dhparam.pem 4096
|
||||
@@ -618,148 +618,6 @@
|
||||
</jdbc_bridge>
|
||||
-->
|
||||
|
||||
<!-- Configuration of clusters that could be used in Distributed tables.
|
||||
https://clickhouse.com/docs/en/operations/table_engines/distributed/
|
||||
-->
|
||||
<remote_servers>
|
||||
<!-- Test only shard config for testing distributed storage -->
|
||||
<test_shard_localhost>
|
||||
<!-- Inter-server per-cluster secret for Distributed queries
|
||||
default: no secret (no authentication will be performed)
|
||||
|
||||
If set, then Distributed queries will be validated on shards, so at least:
|
||||
- such cluster should exist on the shard,
|
||||
- such cluster should have the same secret.
|
||||
|
||||
And also (and which is more important), the initial_user will
|
||||
be used as current user for the query.
|
||||
|
||||
Right now the protocol is pretty simple and it only takes into account:
|
||||
- cluster name
|
||||
- query
|
||||
|
||||
Also it will be nice if the following will be implemented:
|
||||
- source hostname (see interserver_http_host), but then it will depends from DNS,
|
||||
it can use IP address instead, but then the you need to get correct on the initiator node.
|
||||
- target hostname / ip address (same notes as for source hostname)
|
||||
- time-based security tokens
|
||||
-->
|
||||
<!-- <secret></secret> -->
|
||||
|
||||
<shard>
|
||||
<!-- Optional. Whether to write data to just one of the replicas. Default: false (write data to all replicas). -->
|
||||
<!-- <internal_replication>false</internal_replication> -->
|
||||
<!-- Optional. Shard weight when writing data. Default: 1. -->
|
||||
<!-- <weight>1</weight> -->
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9000</port>
|
||||
<!-- Optional. Priority of the replica for load_balancing. Default: 1 (less value has more priority). -->
|
||||
<!-- <priority>1</priority> -->
|
||||
</replica>
|
||||
</shard>
|
||||
</test_shard_localhost>
|
||||
<test_cluster_one_shard_three_replicas_localhost>
|
||||
<shard>
|
||||
<internal_replication>false</internal_replication>
|
||||
<replica>
|
||||
<host>127.0.0.1</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
<replica>
|
||||
<host>127.0.0.2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
<replica>
|
||||
<host>127.0.0.3</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<!--shard>
|
||||
<internal_replication>false</internal_replication>
|
||||
<replica>
|
||||
<host>127.0.0.1</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
<replica>
|
||||
<host>127.0.0.2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
<replica>
|
||||
<host>127.0.0.3</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard-->
|
||||
</test_cluster_one_shard_three_replicas_localhost>
|
||||
<test_cluster_two_shards_localhost>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_cluster_two_shards_localhost>
|
||||
<test_cluster_two_shards>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>127.0.0.1</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>127.0.0.2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_cluster_two_shards>
|
||||
<test_cluster_two_shards_internal_replication>
|
||||
<shard>
|
||||
<internal_replication>true</internal_replication>
|
||||
<replica>
|
||||
<host>127.0.0.1</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<internal_replication>true</internal_replication>
|
||||
<replica>
|
||||
<host>127.0.0.2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_cluster_two_shards_internal_replication>
|
||||
<test_shard_localhost_secure>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9440</port>
|
||||
<secure>1</secure>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_shard_localhost_secure>
|
||||
<test_unavailable_shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>1</port>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_unavailable_shard>
|
||||
</remote_servers>
|
||||
|
||||
<!-- The list of hosts allowed to use in URL-related storage engines and table functions.
|
||||
If this section is not present in configuration, all hosts are allowed.
|
||||
-->
|
||||
@@ -786,29 +644,6 @@
|
||||
Values for substitutions are specified in /clickhouse/name_of_substitution elements in that file.
|
||||
-->
|
||||
|
||||
<!-- ZooKeeper is used to store metadata about replicas, when using Replicated tables.
|
||||
Optional. If you don't use replicated tables, you could omit that.
|
||||
|
||||
See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/
|
||||
-->
|
||||
|
||||
<!--
|
||||
<zookeeper>
|
||||
<node>
|
||||
<host>example1</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
<node>
|
||||
<host>example2</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
<node>
|
||||
<host>example3</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
</zookeeper>
|
||||
-->
|
||||
|
||||
<!-- Substitutions for parameters of replicated tables.
|
||||
Optional. If you don't use replicated tables, you could omit that.
|
||||
|
||||
|
||||
@@ -1,30 +1,127 @@
|
||||
version: "3.9"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
tty: true
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
depends_on:
|
||||
- zookeeper-1
|
||||
# - zookeeper-2
|
||||
# - zookeeper-3
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-clickhouse-depend: &clickhouse-depend
|
||||
depends_on:
|
||||
- clickhouse
|
||||
# - clickhouse-2
|
||||
# - clickhouse-3
|
||||
|
||||
services:
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.0
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
- "2181:2181"
|
||||
- "2888:2888"
|
||||
- "3888:3888"
|
||||
volumes:
|
||||
- ./data/zookeeper-1:/bitnami/zookeeper
|
||||
environment:
|
||||
- ZOO_SERVER_ID=1
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2182:2181"
|
||||
# - "2889:2888"
|
||||
# - "3889:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-2:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=2
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,0.0.0.0:2888:3888,zookeeper-3:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2183:2181"
|
||||
# - "2890:2888"
|
||||
# - "3890:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-3:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=3
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
<<: *clickhouse-defaults
|
||||
hostname: clickhouse
|
||||
# ports:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
tty: true
|
||||
# - "9181:9181"
|
||||
volumes:
|
||||
- ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
# - "8124:8123"
|
||||
# - "9182:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
# - "8125:8123"
|
||||
# - "9183:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:0.23.0-0.2
|
||||
@@ -40,7 +137,7 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.11.4
|
||||
image: signoz/query-service:0.13.1
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
@@ -66,11 +163,10 @@ services:
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
depends_on:
|
||||
- clickhouse
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.11.4
|
||||
image: signoz/frontend:0.13.1
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -83,7 +179,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.63.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@@ -91,6 +187,7 @@ services:
|
||||
- /var/lib/docker/containers:/var/lib/docker/containers:ro
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}},dockerswarm.service.name={{.Service.Name}},dockerswarm.task.name={{.Task.Name}}
|
||||
- DOCKER_MULTI_NODE_CLUSTER=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
@@ -107,11 +204,10 @@ services:
|
||||
mode: global
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
depends_on:
|
||||
- clickhouse
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.63.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
@@ -123,8 +219,7 @@ services:
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
depends_on:
|
||||
- clickhouse
|
||||
<<: *clickhouse-depend
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
|
||||
@@ -64,7 +64,9 @@ receivers:
|
||||
- job_name: otel-collector
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector
|
||||
|
||||
processors:
|
||||
batch:
|
||||
@@ -78,7 +80,7 @@ processors:
|
||||
signozspanmetrics/prometheus:
|
||||
metrics_exporter: prometheus
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 10000
|
||||
dimensions_cache_size: 100000
|
||||
dimensions:
|
||||
- name: service.namespace
|
||||
default: default
|
||||
@@ -103,15 +105,19 @@ processors:
|
||||
exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/?database=signoz_traces
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
prometheus:
|
||||
endpoint: 0.0.0.0:8889
|
||||
# logging: {}
|
||||
clickhouselogsexporter:
|
||||
dsn: tcp://clickhouse:9000/
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
timeout: 5s
|
||||
sending_queue:
|
||||
queue_size: 100
|
||||
@@ -144,9 +150,13 @@ service:
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite]
|
||||
metrics/generic:
|
||||
receivers: [hostmetrics, prometheus]
|
||||
receivers: [hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [clickhousemetricswrite]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite/prometheus]
|
||||
metrics/spanmetrics:
|
||||
receivers: [otlp/spanmetrics]
|
||||
exporters: [prometheus]
|
||||
|
||||
@@ -7,7 +7,9 @@ receivers:
|
||||
scrape_interval: 60s
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector-metrics
|
||||
# SigNoz span metrics
|
||||
- job_name: signozspanmetrics-collector
|
||||
scrape_interval: 60s
|
||||
|
||||
@@ -30,6 +30,8 @@ server {
|
||||
|
||||
location /api {
|
||||
proxy_pass http://query-service:8080/api;
|
||||
# connection will be closed if no data is read for 600s between successive read operations
|
||||
proxy_read_timeout 600s;
|
||||
}
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
|
||||
75
deploy/docker/clickhouse-setup/clickhouse-cluster.xml
Normal file
75
deploy/docker/clickhouse-setup/clickhouse-cluster.xml
Normal file
@@ -0,0 +1,75 @@
|
||||
<?xml version="1.0"?>
|
||||
<clickhouse>
|
||||
<!-- ZooKeeper is used to store metadata about replicas, when using Replicated tables.
|
||||
Optional. If you don't use replicated tables, you could omit that.
|
||||
|
||||
See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/
|
||||
-->
|
||||
<zookeeper>
|
||||
<node index="1">
|
||||
<host>zookeeper-1</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
<!-- <node index="2">
|
||||
<host>zookeeper-2</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
<node index="3">
|
||||
<host>zookeeper-3</host>
|
||||
<port>2181</port>
|
||||
</node> -->
|
||||
</zookeeper>
|
||||
|
||||
<!-- Configuration of clusters that could be used in Distributed tables.
|
||||
https://clickhouse.com/docs/en/operations/table_engines/distributed/
|
||||
-->
|
||||
<remote_servers>
|
||||
<cluster>
|
||||
<!-- Inter-server per-cluster secret for Distributed queries
|
||||
default: no secret (no authentication will be performed)
|
||||
|
||||
If set, then Distributed queries will be validated on shards, so at least:
|
||||
- such cluster should exist on the shard,
|
||||
- such cluster should have the same secret.
|
||||
|
||||
And also (and which is more important), the initial_user will
|
||||
be used as current user for the query.
|
||||
|
||||
Right now the protocol is pretty simple and it only takes into account:
|
||||
- cluster name
|
||||
- query
|
||||
|
||||
Also it will be nice if the following will be implemented:
|
||||
- source hostname (see interserver_http_host), but then it will depends from DNS,
|
||||
it can use IP address instead, but then the you need to get correct on the initiator node.
|
||||
- target hostname / ip address (same notes as for source hostname)
|
||||
- time-based security tokens
|
||||
-->
|
||||
<!-- <secret></secret> -->
|
||||
<shard>
|
||||
<!-- Optional. Whether to write data to just one of the replicas. Default: false (write data to all replicas). -->
|
||||
<!-- <internal_replication>false</internal_replication> -->
|
||||
<!-- Optional. Shard weight when writing data. Default: 1. -->
|
||||
<!-- <weight>1</weight> -->
|
||||
<replica>
|
||||
<host>clickhouse</host>
|
||||
<port>9000</port>
|
||||
<!-- Optional. Priority of the replica for load_balancing. Default: 1 (less value has more priority). -->
|
||||
<!-- <priority>1</priority> -->
|
||||
</replica>
|
||||
</shard>
|
||||
<!-- <shard>
|
||||
<replica>
|
||||
<host>clickhouse-2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>clickhouse-3</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard> -->
|
||||
</cluster>
|
||||
</remote_servers>
|
||||
</clickhouse>
|
||||
@@ -236,8 +236,8 @@
|
||||
<openSSL>
|
||||
<server> <!-- Used for https server AND secure tcp port -->
|
||||
<!-- openssl req -subj "/CN=localhost" -new -newkey rsa:2048 -days 365 -nodes -x509 -keyout /etc/clickhouse-server/server.key -out /etc/clickhouse-server/server.crt -->
|
||||
<certificateFile>/etc/clickhouse-server/server.crt</certificateFile>
|
||||
<privateKeyFile>/etc/clickhouse-server/server.key</privateKeyFile>
|
||||
<!-- <certificateFile>/etc/clickhouse-server/server.crt</certificateFile> -->
|
||||
<!-- <privateKeyFile>/etc/clickhouse-server/server.key</privateKeyFile> -->
|
||||
<!-- dhparams are optional. You can delete the <dhParamsFile> element.
|
||||
To generate dhparams, use the following command:
|
||||
openssl dhparam -out /etc/clickhouse-server/dhparam.pem 4096
|
||||
@@ -618,148 +618,6 @@
|
||||
</jdbc_bridge>
|
||||
-->
|
||||
|
||||
<!-- Configuration of clusters that could be used in Distributed tables.
|
||||
https://clickhouse.com/docs/en/operations/table_engines/distributed/
|
||||
-->
|
||||
<remote_servers>
|
||||
<!-- Test only shard config for testing distributed storage -->
|
||||
<test_shard_localhost>
|
||||
<!-- Inter-server per-cluster secret for Distributed queries
|
||||
default: no secret (no authentication will be performed)
|
||||
|
||||
If set, then Distributed queries will be validated on shards, so at least:
|
||||
- such cluster should exist on the shard,
|
||||
- such cluster should have the same secret.
|
||||
|
||||
And also (and which is more important), the initial_user will
|
||||
be used as current user for the query.
|
||||
|
||||
Right now the protocol is pretty simple and it only takes into account:
|
||||
- cluster name
|
||||
- query
|
||||
|
||||
Also it will be nice if the following will be implemented:
|
||||
- source hostname (see interserver_http_host), but then it will depends from DNS,
|
||||
it can use IP address instead, but then the you need to get correct on the initiator node.
|
||||
- target hostname / ip address (same notes as for source hostname)
|
||||
- time-based security tokens
|
||||
-->
|
||||
<!-- <secret></secret> -->
|
||||
|
||||
<shard>
|
||||
<!-- Optional. Whether to write data to just one of the replicas. Default: false (write data to all replicas). -->
|
||||
<!-- <internal_replication>false</internal_replication> -->
|
||||
<!-- Optional. Shard weight when writing data. Default: 1. -->
|
||||
<!-- <weight>1</weight> -->
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9000</port>
|
||||
<!-- Optional. Priority of the replica for load_balancing. Default: 1 (less value has more priority). -->
|
||||
<!-- <priority>1</priority> -->
|
||||
</replica>
|
||||
</shard>
|
||||
</test_shard_localhost>
|
||||
<test_cluster_one_shard_three_replicas_localhost>
|
||||
<shard>
|
||||
<internal_replication>false</internal_replication>
|
||||
<replica>
|
||||
<host>127.0.0.1</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
<replica>
|
||||
<host>127.0.0.2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
<replica>
|
||||
<host>127.0.0.3</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<!--shard>
|
||||
<internal_replication>false</internal_replication>
|
||||
<replica>
|
||||
<host>127.0.0.1</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
<replica>
|
||||
<host>127.0.0.2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
<replica>
|
||||
<host>127.0.0.3</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard-->
|
||||
</test_cluster_one_shard_three_replicas_localhost>
|
||||
<test_cluster_two_shards_localhost>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_cluster_two_shards_localhost>
|
||||
<test_cluster_two_shards>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>127.0.0.1</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>127.0.0.2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_cluster_two_shards>
|
||||
<test_cluster_two_shards_internal_replication>
|
||||
<shard>
|
||||
<internal_replication>true</internal_replication>
|
||||
<replica>
|
||||
<host>127.0.0.1</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<internal_replication>true</internal_replication>
|
||||
<replica>
|
||||
<host>127.0.0.2</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_cluster_two_shards_internal_replication>
|
||||
<test_shard_localhost_secure>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9440</port>
|
||||
<secure>1</secure>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_shard_localhost_secure>
|
||||
<test_unavailable_shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>9000</port>
|
||||
</replica>
|
||||
</shard>
|
||||
<shard>
|
||||
<replica>
|
||||
<host>localhost</host>
|
||||
<port>1</port>
|
||||
</replica>
|
||||
</shard>
|
||||
</test_unavailable_shard>
|
||||
</remote_servers>
|
||||
|
||||
<!-- The list of hosts allowed to use in URL-related storage engines and table functions.
|
||||
If this section is not present in configuration, all hosts are allowed.
|
||||
-->
|
||||
@@ -786,29 +644,6 @@
|
||||
Values for substitutions are specified in /clickhouse/name_of_substitution elements in that file.
|
||||
-->
|
||||
|
||||
<!-- ZooKeeper is used to store metadata about replicas, when using Replicated tables.
|
||||
Optional. If you don't use replicated tables, you could omit that.
|
||||
|
||||
See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/
|
||||
-->
|
||||
|
||||
<!--
|
||||
<zookeeper>
|
||||
<node>
|
||||
<host>example1</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
<node>
|
||||
<host>example2</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
<node>
|
||||
<host>example3</host>
|
||||
<port>2181</port>
|
||||
</node>
|
||||
</zookeeper>
|
||||
-->
|
||||
|
||||
<!-- Substitutions for parameters of replicated tables.
|
||||
Optional. If you don't use replicated tables, you could omit that.
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: otel-collector
|
||||
image: signoz/signoz-otel-collector:0.63.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
# user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@@ -67,7 +67,7 @@ services:
|
||||
|
||||
otel-collector-metrics:
|
||||
container_name: otel-collector-metrics
|
||||
image: signoz/signoz-otel-collector:0.63.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
|
||||
@@ -1,31 +1,138 @@
|
||||
version: "2.4"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
restart: on-failure
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
tty: true
|
||||
depends_on:
|
||||
- zookeeper-1
|
||||
# - zookeeper-2
|
||||
# - zookeeper-3
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-clickhouse-depend: &clickhouse-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
services:
|
||||
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.0
|
||||
container_name: zookeeper-1
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
- "2181:2181"
|
||||
- "2888:2888"
|
||||
- "3888:3888"
|
||||
volumes:
|
||||
- ./data/zookeeper-1:/bitnami/zookeeper
|
||||
environment:
|
||||
- ZOO_SERVER_ID=1
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-2
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2182:2181"
|
||||
# - "2889:2888"
|
||||
# - "3889:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-2:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=2
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,0.0.0.0:2888:3888,zookeeper-3:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-3
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2183:2181"
|
||||
# - "2890:2888"
|
||||
# - "3890:3888"
|
||||
# volumes:
|
||||
# - ./data/zookeeper-3:/bitnami/zookeeper
|
||||
# environment:
|
||||
# - ZOO_SERVER_ID=3
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
# ports:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
tty: true
|
||||
<<: *clickhouse-defaults
|
||||
container_name: clickhouse
|
||||
hostname: clickhouse
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "8123:8123"
|
||||
- "9181:9181"
|
||||
volumes:
|
||||
- ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
restart: on-failure
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-2
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
# - "8124:8123"
|
||||
# - "9182:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-3
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
# - "8125:8123"
|
||||
# - "9183:9181"
|
||||
# volumes:
|
||||
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:0.23.0-0.2
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.0-0.2}
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
@@ -39,7 +146,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.11.4
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.13.1}
|
||||
container_name: query-service
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
# ports:
|
||||
@@ -64,12 +171,10 @@ services:
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.11.4
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.13.1}
|
||||
container_name: frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -81,7 +186,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.63.0
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.66.1}
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@@ -89,6 +194,7 @@ services:
|
||||
- /var/lib/docker/containers:/var/lib/docker/containers:ro
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- DOCKER_MULTI_NODE_CLUSTER=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
@@ -102,12 +208,10 @@ services:
|
||||
# - "55678:55678" # OpenCensus receiver
|
||||
# - "55679:55679" # zPages extension
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.63.0
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.66.1}
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
@@ -117,9 +221,7 @@ services:
|
||||
# - "13133:13133" # Health check extension
|
||||
# - "55679:55679" # zPages extension
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
<<: *clickhouse-depend
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
|
||||
@@ -64,7 +64,10 @@ receivers:
|
||||
- job_name: otel-collector
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector
|
||||
|
||||
|
||||
processors:
|
||||
batch:
|
||||
@@ -74,7 +77,7 @@ processors:
|
||||
signozspanmetrics/prometheus:
|
||||
metrics_exporter: prometheus
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 10000
|
||||
dimensions_cache_size: 100000
|
||||
dimensions:
|
||||
- name: service.namespace
|
||||
default: default
|
||||
@@ -111,16 +114,20 @@ extensions:
|
||||
exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/?database=signoz_traces
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
|
||||
prometheus:
|
||||
endpoint: 0.0.0.0:8889
|
||||
# logging: {}
|
||||
|
||||
clickhouselogsexporter:
|
||||
dsn: tcp://clickhouse:9000/
|
||||
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
|
||||
timeout: 5s
|
||||
sending_queue:
|
||||
queue_size: 100
|
||||
@@ -148,9 +155,13 @@ service:
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite]
|
||||
metrics/generic:
|
||||
receivers: [hostmetrics, prometheus]
|
||||
receivers: [hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [clickhousemetricswrite]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite/prometheus]
|
||||
metrics/spanmetrics:
|
||||
receivers: [otlp/spanmetrics]
|
||||
exporters: [prometheus]
|
||||
|
||||
@@ -11,7 +11,9 @@ receivers:
|
||||
scrape_interval: 60s
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector-metrics
|
||||
# SigNoz span metrics
|
||||
- job_name: signozspanmetrics-collector
|
||||
scrape_interval: 60s
|
||||
|
||||
@@ -30,6 +30,8 @@ server {
|
||||
|
||||
location /api {
|
||||
proxy_pass http://query-service:8080/api;
|
||||
# connection will be closed if no data is read for 600s between successive read operations
|
||||
proxy_read_timeout 600s;
|
||||
}
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
|
||||
@@ -93,6 +93,10 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router) {
|
||||
baseapp.OpenAccess(ah.receiveSAML)).
|
||||
Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v1/complete/google",
|
||||
baseapp.OpenAccess(ah.receiveGoogleAuth)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/orgs/{orgId}/domains",
|
||||
baseapp.AdminAccess(ah.listDomainsByOrg)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
@@ -8,9 +8,6 @@ import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"go.signoz.io/signoz/ee/query-service/constants"
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
@@ -184,114 +181,152 @@ func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) {
|
||||
ah.Respond(w, resp)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// this is the source url that initiated the login request
|
||||
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
|
||||
ssoError := []byte("Login failed. Please contact your system administrator")
|
||||
dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
|
||||
base64.StdEncoding.Encode(dst, ssoError)
|
||||
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// receiveGoogleAuth completes google OAuth response and forwards a request
|
||||
// to front-end to sign user in
|
||||
func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) {
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
|
||||
var apierr basemodel.BaseApiError
|
||||
|
||||
redirectOnError := func() {
|
||||
ssoError := []byte("Login failed. Please contact your system administrator")
|
||||
dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
|
||||
base64.StdEncoding.Encode(dst, ssoError)
|
||||
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, string(dst)), http.StatusMovedPermanently)
|
||||
}
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.S().Errorf("[ReceiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO)
|
||||
zap.S().Errorf("[receiveGoogleAuth] sso requested but feature unavailable %s in org domain %s", model.SSO)
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
zap.S().Errorf("[ReceiveSAML] failed to process response - invalid response from IDP", err, r)
|
||||
redirectOnError()
|
||||
q := r.URL.Query()
|
||||
if errType := q.Get("error"); errType != "" {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to login with google auth", q.Get("error_description"))
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
// the relay state is sent when a login request is submitted to
|
||||
// Idp.
|
||||
relayState := r.FormValue("RelayState")
|
||||
zap.S().Debug("[ReceiveML] relay state", zap.String("relayState", relayState))
|
||||
relayState := q.Get("state")
|
||||
zap.S().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.S().Errorf("[ReceiveSAML] failed to process response - invalid response from IDP", err, r)
|
||||
redirectOnError()
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to process response - invalid response from IDP", err, r)
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// upgrade redirect url from the relay state for better accuracy
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// derive domain id from relay state now
|
||||
var domainIdStr string
|
||||
for k, v := range parsedState.Query() {
|
||||
if k == "domainId" && len(v) > 0 {
|
||||
domainIdStr = strings.Replace(v[0], ":", "-", -1)
|
||||
}
|
||||
}
|
||||
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[ReceiveSAML] failed to process request- failed to parse domain id ifrom relay", zap.Error(err))
|
||||
redirectOnError()
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
domain, apierr := ah.AppDao().GetDomain(ctx, domainId)
|
||||
if (apierr != nil) || domain == nil {
|
||||
zap.S().Errorf("[ReceiveSAML] failed to process request- invalid domain", domainIdStr, zap.Error(apierr))
|
||||
redirectOnError()
|
||||
// now that we have domain, use domain to fetch sso settings.
|
||||
// prepare google callback handler using parsedState -
|
||||
// which contains redirect URL (front-end endpoint)
|
||||
callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState)
|
||||
|
||||
identity, err := callbackHandler.HandleCallback(r)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to process HandleCallback ", domain.String(), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
http.Redirect(w, r, nextPage, http.StatusSeeOther)
|
||||
}
|
||||
|
||||
|
||||
|
||||
// receiveSAML completes a SAML request and gets user logged in
|
||||
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// this is the source url that initiated the login request
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.S().Errorf("[receiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO)
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// the relay state is sent when a login request is submitted to
|
||||
// Idp.
|
||||
relayState := r.FormValue("RelayState")
|
||||
zap.S().Debug("[receiveML] relay state", zap.String("relayState", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// upgrade redirect url from the relay state for better accuracy
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
sp, err := domain.PrepareSamlRequest(parsedState)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[ReceiveSAML] failed to prepare saml request for domain (%s): %v", domainId, err)
|
||||
redirectOnError()
|
||||
zap.S().Errorf("[receiveSAML] failed to prepare saml request for domain (%s): %v", domain.String(), err)
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse"))
|
||||
if err != nil {
|
||||
zap.S().Errorf("[ReceiveSAML] failed to retrieve assertion info from saml response for organization (%s): %v", domainId, err)
|
||||
redirectOnError()
|
||||
zap.S().Errorf("[receiveSAML] failed to retrieve assertion info from saml response for organization (%s): %v", domain.String(), err)
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
if assertionInfo.WarningInfo.InvalidTime {
|
||||
zap.S().Errorf("[ReceiveSAML] expired saml response for organization (%s): %v", domainId, err)
|
||||
redirectOnError()
|
||||
zap.S().Errorf("[receiveSAML] expired saml response for organization (%s): %v", domain.String(), err)
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
email := assertionInfo.NameID
|
||||
|
||||
// user email found, now start preparing jwt response
|
||||
userPayload, baseapierr := ah.AppDao().GetUserByEmail(ctx, email)
|
||||
if baseapierr != nil {
|
||||
zap.S().Errorf("[ReceiveSAML] failed to find or register a new user for email %s and org %s", email, domainId, zap.Error(baseapierr.Err))
|
||||
redirectOnError()
|
||||
if email == "" {
|
||||
zap.S().Errorf("[receiveSAML] invalid email in the SSO response (%s)", domain.String())
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
tokenStore, err := baseauth.GenerateJWTForUser(&userPayload.User)
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[ReceiveSAML] failed to generate access token for email %s and org %s", email, domainId, zap.Error(err))
|
||||
redirectOnError()
|
||||
zap.S().Errorf("[receiveSAML] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
userID := userPayload.User.Id
|
||||
nextPage := fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
|
||||
redirectUri,
|
||||
tokenStore.AccessJwt,
|
||||
userID,
|
||||
tokenStore.RefreshJwt)
|
||||
|
||||
http.Redirect(w, r, nextPage, http.StatusMovedPermanently)
|
||||
|
||||
http.Redirect(w, r, nextPage, http.StatusSeeOther)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/http"
|
||||
_ "net/http/pprof" // http profiler
|
||||
@@ -20,6 +23,7 @@ import (
|
||||
"go.signoz.io/signoz/ee/query-service/dao"
|
||||
"go.signoz.io/signoz/ee/query-service/interfaces"
|
||||
licensepkg "go.signoz.io/signoz/ee/query-service/license"
|
||||
"go.signoz.io/signoz/ee/query-service/usage"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
|
||||
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
|
||||
@@ -117,6 +121,16 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// start the usagemanager
|
||||
usageManager, err := usage.New("sqlite", localDB, lm.GetRepo(), reader.GetConn())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = usageManager.Start()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
telemetry.GetInstance().SetReader(reader)
|
||||
|
||||
apiOpts := api.APIHandlerOptions{
|
||||
@@ -255,15 +269,82 @@ func (lrw *loggingResponseWriter) Flush() {
|
||||
lrw.ResponseWriter.(http.Flusher).Flush()
|
||||
}
|
||||
|
||||
func extractDashboardMetaData(path string, r *http.Request) (map[string]interface{}, bool) {
|
||||
pathToExtractBodyFrom := "/api/v2/metrics/query_range"
|
||||
var requestBody map[string]interface{}
|
||||
data := map[string]interface{}{}
|
||||
|
||||
if path == pathToExtractBodyFrom && (r.Method == "POST") {
|
||||
bodyBytes, _ := ioutil.ReadAll(r.Body)
|
||||
r.Body.Close() // must close
|
||||
r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
json.Unmarshal(bodyBytes, &requestBody)
|
||||
|
||||
} else {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
compositeMetricQuery, compositeMetricQueryExists := requestBody["compositeMetricQuery"]
|
||||
compositeMetricQueryMap := compositeMetricQuery.(map[string]interface{})
|
||||
signozMetricFound := false
|
||||
|
||||
if compositeMetricQueryExists {
|
||||
signozMetricFound = telemetry.GetInstance().CheckSigNozMetrics(compositeMetricQueryMap)
|
||||
queryType, queryTypeExists := compositeMetricQueryMap["queryType"]
|
||||
if queryTypeExists {
|
||||
data["queryType"] = queryType
|
||||
}
|
||||
panelType, panelTypeExists := compositeMetricQueryMap["panelType"]
|
||||
if panelTypeExists {
|
||||
data["panelType"] = panelType
|
||||
}
|
||||
}
|
||||
|
||||
datasource, datasourceExists := requestBody["dataSource"]
|
||||
if datasourceExists {
|
||||
data["datasource"] = datasource
|
||||
}
|
||||
|
||||
if !signozMetricFound {
|
||||
telemetry.GetInstance().AddActiveMetricsUser()
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_DASHBOARDS_METADATA, data, true)
|
||||
}
|
||||
|
||||
return data, true
|
||||
}
|
||||
|
||||
func getActiveLogs(path string, r *http.Request) {
|
||||
// if path == "/api/v1/dashboards/{uuid}" {
|
||||
// telemetry.GetInstance().AddActiveMetricsUser()
|
||||
// }
|
||||
if path == "/api/v1/logs" {
|
||||
hasFilters := len(r.URL.Query().Get("q"))
|
||||
if hasFilters > 0 {
|
||||
telemetry.GetInstance().AddActiveLogsUser()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
route := mux.CurrentRoute(r)
|
||||
path, _ := route.GetPathTemplate()
|
||||
|
||||
dashboardMetadata, metadataExists := extractDashboardMetaData(path, r)
|
||||
getActiveLogs(path, r)
|
||||
|
||||
lrw := NewLoggingResponseWriter(w)
|
||||
next.ServeHTTP(lrw, r)
|
||||
|
||||
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
|
||||
if metadataExists {
|
||||
for key, value := range dashboardMetadata {
|
||||
data[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data)
|
||||
|
||||
@@ -2,7 +2,7 @@ package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"net/url"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
@@ -22,7 +22,9 @@ type ModelDao interface {
|
||||
// auth methods
|
||||
PrecheckLogin(ctx context.Context, email, sourceUrl string) (*model.PrecheckResponse, basemodel.BaseApiError)
|
||||
CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
|
||||
|
||||
PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr basemodel.BaseApiError)
|
||||
GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*model.OrgDomain, error)
|
||||
|
||||
// org domain (auth domains) CRUD ops
|
||||
ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, basemodel.BaseApiError)
|
||||
GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError)
|
||||
|
||||
@@ -10,9 +10,33 @@ import (
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// PrepareSsoRedirect prepares redirect page link after SSO response
|
||||
// is successfully parsed (i.e. valid email is available)
|
||||
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr basemodel.BaseApiError) {
|
||||
|
||||
userPayload, apierr := m.GetUserByEmail(ctx, email)
|
||||
if !apierr.IsNil() {
|
||||
zap.S().Errorf(" failed to get user with email received from auth provider", apierr.Error())
|
||||
return "", model.BadRequestStr("invalid user email received from the auth provider")
|
||||
}
|
||||
|
||||
tokenStore, err := baseauth.GenerateJWTForUser(&userPayload.User)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to generate token for SSO login user", err)
|
||||
return "", model.InternalErrorStr("failed to generate token for the user")
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
|
||||
redirectUri,
|
||||
tokenStore.AccessJwt,
|
||||
userPayload.User.Id,
|
||||
tokenStore.RefreshJwt), nil
|
||||
}
|
||||
|
||||
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
|
||||
domain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -25,6 +26,34 @@ type StoredDomain struct {
|
||||
UpdatedAt int64 `db:"updated_at"`
|
||||
}
|
||||
|
||||
// GetDomainFromSsoResponse uses relay state received from IdP to fetch
|
||||
// user domain. The domain is further used to process validity of the response.
|
||||
// when sending login request to IdP we send relay state as URL (site url)
|
||||
// with domainId as query parameter.
|
||||
func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*model.OrgDomain, error) {
|
||||
// derive domain id from relay state now
|
||||
var domainIdStr string
|
||||
for k, v := range relayState.Query() {
|
||||
if k == "domainId" && len(v) > 0 {
|
||||
domainIdStr = strings.Replace(v[0], ":", "-", -1)
|
||||
}
|
||||
}
|
||||
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to parse domain id from relay state", err)
|
||||
return nil, fmt.Errorf("failed to parse response from IdP response")
|
||||
}
|
||||
|
||||
domain, err := m.GetDomain(ctx, domainId)
|
||||
if (err != nil) || domain == nil {
|
||||
zap.S().Errorf("failed to find domain received in IdP response", err.Error())
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
|
||||
return domain, nil
|
||||
}
|
||||
|
||||
// GetDomain returns org domain for a given domain id
|
||||
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError) {
|
||||
|
||||
|
||||
@@ -127,7 +127,7 @@ func NewPostRequestWithCtx(ctx context.Context, url string, contentType string,
|
||||
}
|
||||
|
||||
// SendUsage reports the usage of signoz to license server
|
||||
func SendUsage(ctx context.Context, usage *model.UsagePayload) *model.ApiError {
|
||||
func SendUsage(ctx context.Context, usage model.UsagePayload) *model.ApiError {
|
||||
reqString, _ := json.Marshal(usage)
|
||||
req, err := NewPostRequestWithCtx(ctx, C.Prefix+"/usage", APPLICATION_JSON, bytes.NewBuffer(reqString))
|
||||
if err != nil {
|
||||
|
||||
@@ -9,8 +9,10 @@ import (
|
||||
"github.com/google/uuid"
|
||||
"github.com/pkg/errors"
|
||||
saml2 "github.com/russellhaering/gosaml2"
|
||||
"go.signoz.io/signoz/ee/query-service/saml"
|
||||
"go.signoz.io/signoz/ee/query-service/sso/saml"
|
||||
"go.signoz.io/signoz/ee/query-service/sso"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type SSOType string
|
||||
@@ -20,12 +22,6 @@ const (
|
||||
GoogleAuth SSOType = "GOOGLE_AUTH"
|
||||
)
|
||||
|
||||
type SamlConfig struct {
|
||||
SamlEntity string `json:"samlEntity"`
|
||||
SamlIdp string `json:"samlIdp"`
|
||||
SamlCert string `json:"samlCert"`
|
||||
}
|
||||
|
||||
// OrgDomain identify org owned web domains for auth and other purposes
|
||||
type OrgDomain struct {
|
||||
Id uuid.UUID `json:"id"`
|
||||
@@ -33,10 +29,17 @@ type OrgDomain struct {
|
||||
OrgId string `json:"orgId"`
|
||||
SsoEnabled bool `json:"ssoEnabled"`
|
||||
SsoType SSOType `json:"ssoType"`
|
||||
|
||||
SamlConfig *SamlConfig `json:"samlConfig"`
|
||||
GoogleAuthConfig *GoogleOAuthConfig `json:"googleAuthConfig"`
|
||||
|
||||
Org *basemodel.Organization
|
||||
}
|
||||
|
||||
func (od *OrgDomain) String() string {
|
||||
return fmt.Sprintf("[%s]%s-%s ", od.Name, od.Id.String(), od.SsoType)
|
||||
}
|
||||
|
||||
// Valid is used a pipeline function to check if org domain
|
||||
// loaded from db is valid
|
||||
func (od *OrgDomain) Valid(err error) error {
|
||||
@@ -97,6 +100,16 @@ func (od *OrgDomain) GetSAMLCert() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in
|
||||
// requesting OAuth and also used in processing response from google
|
||||
func (od *OrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) {
|
||||
if od.GoogleAuthConfig == nil {
|
||||
return nil, fmt.Errorf("Google auth is not setup correctly for this domain")
|
||||
}
|
||||
|
||||
return od.GoogleAuthConfig.GetProvider(od.Name, siteUrl)
|
||||
}
|
||||
|
||||
// PrepareSamlRequest creates a request accordingly gosaml2
|
||||
func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServiceProvider, error) {
|
||||
|
||||
@@ -124,19 +137,48 @@ func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServicePro
|
||||
}
|
||||
|
||||
func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
|
||||
|
||||
sp, err := od.PrepareSamlRequest(siteUrl)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
|
||||
fmtDomainId := strings.Replace(od.Id.String(), "-", ":", -1)
|
||||
|
||||
// build redirect url from window.location sent by frontend
|
||||
redirectURL := fmt.Sprintf("%s://%s%s", siteUrl.Scheme, siteUrl.Host, siteUrl.Path)
|
||||
|
||||
// prepare state that gets relayed back when the auth provider
|
||||
// calls back our url. here we pass the app url (where signoz runs)
|
||||
// and the domain Id. The domain Id helps in identifying sso config
|
||||
// when the call back occurs and the app url is useful in redirecting user
|
||||
// back to the right path.
|
||||
// why do we need to pass app url? the callback typically is handled by backend
|
||||
// and sometimes backend might right at a different port or is unaware of frontend
|
||||
// endpoint (unless SITE_URL param is set). hence, we receive this build sso request
|
||||
// along with frontend window.location and use it to relay the information through
|
||||
// auth provider to the backend (HandleCallback or HandleSSO method).
|
||||
relayState := fmt.Sprintf("%s?domainId=%s", redirectURL, fmtDomainId)
|
||||
|
||||
|
||||
switch (od.SsoType) {
|
||||
case SAML:
|
||||
|
||||
sp, err := od.PrepareSamlRequest(siteUrl)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return sp.BuildAuthURL(relayState)
|
||||
|
||||
case GoogleAuth:
|
||||
|
||||
googleProvider, err := od.PrepareGoogleOAuthProvider(siteUrl)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return googleProvider.BuildAuthURL(relayState)
|
||||
|
||||
default:
|
||||
zap.S().Errorf("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
|
||||
return "", fmt.Errorf("unsupported SSO config for the domain")
|
||||
}
|
||||
|
||||
relayState := fmt.Sprintf("%s://%s%s?domainId=%s",
|
||||
siteUrl.Scheme,
|
||||
siteUrl.Host,
|
||||
siteUrl.Path,
|
||||
fmtDomainId)
|
||||
|
||||
return sp.BuildAuthURL(relayState)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
@@ -44,6 +45,14 @@ func BadRequest(err error) *ApiError {
|
||||
}
|
||||
}
|
||||
|
||||
// BadRequestStr returns a ApiError object of bad request for string input
|
||||
func BadRequestStr(s string) *ApiError {
|
||||
return &ApiError{
|
||||
Typ: basemodel.ErrorBadData,
|
||||
Err: fmt.Errorf(s),
|
||||
}
|
||||
}
|
||||
|
||||
// InternalError returns a ApiError object of internal type
|
||||
func InternalError(err error) *ApiError {
|
||||
return &ApiError{
|
||||
@@ -52,6 +61,14 @@ func InternalError(err error) *ApiError {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// InternalErrorStr returns a ApiError object of internal type for string input
|
||||
func InternalErrorStr(s string) *ApiError {
|
||||
return &ApiError{
|
||||
Typ: basemodel.ErrorInternal,
|
||||
Err: fmt.Errorf(s),
|
||||
}
|
||||
}
|
||||
var (
|
||||
ErrorNone basemodel.ErrorType = ""
|
||||
ErrorTimeout basemodel.ErrorType = "timeout"
|
||||
|
||||
68
ee/query-service/model/sso.go
Normal file
68
ee/query-service/model/sso.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"context"
|
||||
"net/url"
|
||||
"golang.org/x/oauth2"
|
||||
"github.com/coreos/go-oidc/v3/oidc"
|
||||
"go.signoz.io/signoz/ee/query-service/sso"
|
||||
)
|
||||
|
||||
// SamlConfig contans SAML params to generate and respond to the requests
|
||||
// from SAML provider
|
||||
type SamlConfig struct {
|
||||
SamlEntity string `json:"samlEntity"`
|
||||
SamlIdp string `json:"samlIdp"`
|
||||
SamlCert string `json:"samlCert"`
|
||||
}
|
||||
|
||||
// GoogleOauthConfig contains a generic config to support oauth
|
||||
type GoogleOAuthConfig struct {
|
||||
ClientID string `json:"clientId"`
|
||||
ClientSecret string `json:"clientSecret"`
|
||||
RedirectURI string `json:"redirectURI"`
|
||||
}
|
||||
|
||||
|
||||
const (
|
||||
googleIssuerURL = "https://accounts.google.com"
|
||||
)
|
||||
|
||||
func (g *GoogleOAuthConfig) GetProvider(domain string, siteUrl *url.URL) (sso.OAuthCallbackProvider, error) {
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
provider, err := oidc.NewProvider(ctx, googleIssuerURL)
|
||||
if err != nil {
|
||||
cancel()
|
||||
return nil, fmt.Errorf("failed to get provider: %v", err)
|
||||
}
|
||||
|
||||
// default to email and profile scope as we just use google auth
|
||||
// to verify identity and start a session.
|
||||
scopes := []string{"email"}
|
||||
|
||||
// this is the url google will call after login completion
|
||||
redirectURL := fmt.Sprintf("%s://%s/%s",
|
||||
siteUrl.Scheme,
|
||||
siteUrl.Host,
|
||||
"api/v1/complete/google")
|
||||
|
||||
return &sso.GoogleOAuthProvider{
|
||||
RedirectURI: g.RedirectURI,
|
||||
OAuth2Config: &oauth2.Config{
|
||||
ClientID: g.ClientID,
|
||||
ClientSecret: g.ClientSecret,
|
||||
Endpoint: provider.Endpoint(),
|
||||
Scopes: scopes,
|
||||
RedirectURL: redirectURL,
|
||||
},
|
||||
Verifier: provider.Verifier(
|
||||
&oidc.Config{ClientID: g.ClientID},
|
||||
),
|
||||
Cancel: cancel,
|
||||
HostedDomain: domain,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -6,30 +6,27 @@ import (
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type UsageSnapshot struct {
|
||||
CurrentLogSizeBytes uint64 `json:"currentLogSizeBytes"`
|
||||
CurrentLogSizeBytesColdStorage uint64 `json:"currentLogSizeBytesColdStorage"`
|
||||
CurrentSpansCount uint64 `json:"currentSpansCount"`
|
||||
CurrentSpansCountColdStorage uint64 `json:"currentSpansCountColdStorage"`
|
||||
CurrentSamplesCount uint64 `json:"currentSamplesCount"`
|
||||
CurrentSamplesCountColdStorage uint64 `json:"currentSamplesCountColdStorage"`
|
||||
}
|
||||
|
||||
type UsageBase struct {
|
||||
Id uuid.UUID `json:"id" db:"id"`
|
||||
InstallationId uuid.UUID `json:"installationId" db:"installation_id"`
|
||||
ActivationId uuid.UUID `json:"activationId" db:"activation_id"`
|
||||
CreatedAt time.Time `json:"createdAt" db:"created_at"`
|
||||
FailedSyncRequest int `json:"failedSyncRequest" db:"failed_sync_request_count"`
|
||||
}
|
||||
|
||||
type UsagePayload struct {
|
||||
UsageBase
|
||||
Metrics UsageSnapshot `json:"metrics"`
|
||||
SnapshotDate time.Time `json:"snapshotDate"`
|
||||
InstallationId uuid.UUID `json:"installationId"`
|
||||
LicenseKey uuid.UUID `json:"licenseKey"`
|
||||
Usage []Usage `json:"usage"`
|
||||
}
|
||||
|
||||
type Usage struct {
|
||||
UsageBase
|
||||
Snapshot string `db:"snapshot"`
|
||||
CollectorID string `json:"collectorId"`
|
||||
ExporterID string `json:"exporterId"`
|
||||
Type string `json:"type"`
|
||||
Tenant string `json:"tenant"`
|
||||
TimeStamp time.Time `json:"timestamp"`
|
||||
Count int64 `json:"count"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
type UsageDB struct {
|
||||
CollectorID string `ch:"collector_id" json:"collectorId"`
|
||||
ExporterID string `ch:"exporter_id" json:"exporterId"`
|
||||
Type string `ch:"-" json:"type"`
|
||||
TimeStamp time.Time `ch:"timestamp" json:"timestamp"`
|
||||
Tenant string `ch:"tenant" json:"tenant"`
|
||||
Data string `ch:"data" json:"data"`
|
||||
}
|
||||
|
||||
92
ee/query-service/sso/google.go
Normal file
92
ee/query-service/sso/google.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package sso
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"errors"
|
||||
"context"
|
||||
"net/http"
|
||||
"github.com/coreos/go-oidc/v3/oidc"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
type GoogleOAuthProvider struct {
|
||||
RedirectURI string
|
||||
OAuth2Config *oauth2.Config
|
||||
Verifier *oidc.IDTokenVerifier
|
||||
Cancel context.CancelFunc
|
||||
HostedDomain string
|
||||
}
|
||||
|
||||
|
||||
func (g *GoogleOAuthProvider) BuildAuthURL(state string) (string, error) {
|
||||
var opts []oauth2.AuthCodeOption
|
||||
|
||||
// set hosted domain. google supports multiple hosted domains but in our case
|
||||
// we have one config per host domain.
|
||||
opts = append(opts, oauth2.SetAuthURLParam("hd", g.HostedDomain))
|
||||
|
||||
return g.OAuth2Config.AuthCodeURL(state, opts...), nil
|
||||
}
|
||||
|
||||
type oauth2Error struct{
|
||||
error string
|
||||
errorDescription string
|
||||
}
|
||||
|
||||
func (e *oauth2Error) Error() string {
|
||||
if e.errorDescription == "" {
|
||||
return e.error
|
||||
}
|
||||
return e.error + ": " + e.errorDescription
|
||||
}
|
||||
|
||||
func (g *GoogleOAuthProvider) HandleCallback(r *http.Request) (identity *SSOIdentity, err error) {
|
||||
q := r.URL.Query()
|
||||
if errType := q.Get("error"); errType != "" {
|
||||
return identity, &oauth2Error{errType, q.Get("error_description")}
|
||||
}
|
||||
|
||||
token, err := g.OAuth2Config.Exchange(r.Context(), q.Get("code"))
|
||||
if err != nil {
|
||||
return identity, fmt.Errorf("google: failed to get token: %v", err)
|
||||
}
|
||||
|
||||
return g.createIdentity(r.Context(), token)
|
||||
}
|
||||
|
||||
|
||||
func (g *GoogleOAuthProvider) createIdentity(ctx context.Context, token *oauth2.Token) (identity *SSOIdentity, err error) {
|
||||
rawIDToken, ok := token.Extra("id_token").(string)
|
||||
if !ok {
|
||||
return identity, errors.New("google: no id_token in token response")
|
||||
}
|
||||
idToken, err := g.Verifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
return identity, fmt.Errorf("google: failed to verify ID Token: %v", err)
|
||||
}
|
||||
|
||||
var claims struct {
|
||||
Username string `json:"name"`
|
||||
Email string `json:"email"`
|
||||
EmailVerified bool `json:"email_verified"`
|
||||
HostedDomain string `json:"hd"`
|
||||
}
|
||||
if err := idToken.Claims(&claims); err != nil {
|
||||
return identity, fmt.Errorf("oidc: failed to decode claims: %v", err)
|
||||
}
|
||||
|
||||
if claims.HostedDomain != g.HostedDomain {
|
||||
return identity, fmt.Errorf("oidc: unexpected hd claim %v", claims.HostedDomain)
|
||||
}
|
||||
|
||||
identity = &SSOIdentity{
|
||||
UserID: idToken.Subject,
|
||||
Username: claims.Username,
|
||||
Email: claims.Email,
|
||||
EmailVerified: claims.EmailVerified,
|
||||
ConnectorData: []byte(token.RefreshToken),
|
||||
}
|
||||
|
||||
return identity, nil
|
||||
}
|
||||
|
||||
31
ee/query-service/sso/model.go
Normal file
31
ee/query-service/sso/model.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package sso
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// SSOIdentity contains details of user received from SSO provider
|
||||
type SSOIdentity struct {
|
||||
UserID string
|
||||
Username string
|
||||
PreferredUsername string
|
||||
Email string
|
||||
EmailVerified bool
|
||||
ConnectorData []byte
|
||||
}
|
||||
|
||||
// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth
|
||||
// style redirect flow to determine user information.
|
||||
type OAuthCallbackProvider interface {
|
||||
// The initial URL user would be redirect to.
|
||||
// OAuth2 implementations support various scopes but we only need profile and user as
|
||||
// the roles are still being managed in SigNoz.
|
||||
BuildAuthURL(state string) (string, error)
|
||||
|
||||
// Handle the callback to the server (after login at oauth provider site)
|
||||
// and return a email identity.
|
||||
// At the moment we dont support auto signup flow (based on domain), so
|
||||
// the full identity (including name, group etc) is not required outside of the
|
||||
// connector
|
||||
HandleCallback(r *http.Request) (identity *SSOIdentity, err error)
|
||||
}
|
||||
@@ -4,18 +4,19 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
licenseserver "go.signoz.io/signoz/ee/query-service/integrations/signozio"
|
||||
"go.signoz.io/signoz/ee/query-service/license"
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
"go.signoz.io/signoz/ee/query-service/usage/repository"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/encryption"
|
||||
)
|
||||
|
||||
@@ -27,9 +28,6 @@ const (
|
||||
)
|
||||
|
||||
var (
|
||||
// collect usage every hour
|
||||
collectionFrequency = 1 * time.Hour
|
||||
|
||||
// send usage every 24 hour
|
||||
uploadFrequency = 24 * time.Hour
|
||||
|
||||
@@ -37,8 +35,6 @@ var (
|
||||
)
|
||||
|
||||
type Manager struct {
|
||||
repository *repository.Repository
|
||||
|
||||
clickhouseConn clickhouse.Conn
|
||||
|
||||
licenseRepo *license.Repo
|
||||
@@ -52,15 +48,9 @@ type Manager struct {
|
||||
}
|
||||
|
||||
func New(dbType string, db *sqlx.DB, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn) (*Manager, error) {
|
||||
repo := repository.New(db)
|
||||
|
||||
err := repo.Init(dbType)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to initiate usage repo: %v", err)
|
||||
}
|
||||
|
||||
m := &Manager{
|
||||
repository: repo,
|
||||
// repository: repo,
|
||||
clickhouseConn: clickhouseConn,
|
||||
licenseRepo: licenseRepo,
|
||||
}
|
||||
@@ -74,6 +64,28 @@ func (lm *Manager) Start() error {
|
||||
return fmt.Errorf("usage exporter is locked")
|
||||
}
|
||||
|
||||
go lm.UsageExporter(context.Background())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) UsageExporter(ctx context.Context) {
|
||||
defer close(lm.terminated)
|
||||
|
||||
uploadTicker := time.NewTicker(uploadFrequency)
|
||||
defer uploadTicker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
case <-uploadTicker.C:
|
||||
lm.UploadUsage(ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (lm *Manager) UploadUsage(ctx context.Context) error {
|
||||
// check if license is present or not
|
||||
license, err := lm.licenseRepo.GetActiveLicense(context.Background())
|
||||
if err != nil {
|
||||
@@ -85,203 +97,81 @@ func (lm *Manager) Start() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// upload previous snapshots if any
|
||||
err = lm.UploadUsage(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// collect snapshot if incase it wasn't collect in (t - collectionFrequency)
|
||||
err = lm.CollectCurrentUsage(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
go lm.UsageExporter(context.Background())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CollectCurrentUsage checks if needs to collect usage data
|
||||
func (lm *Manager) CollectCurrentUsage(ctx context.Context) error {
|
||||
// check the DB if anything exist where timestamp > t - collectionFrequency
|
||||
ts := time.Now().Add(-collectionFrequency)
|
||||
alreadyCreated, err := lm.repository.CheckSnapshotGtCreatedAt(ctx, ts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !alreadyCreated {
|
||||
zap.S().Info("Collecting current usage")
|
||||
exportError := lm.CollectAndStoreUsage(ctx)
|
||||
if exportError != nil {
|
||||
return exportError
|
||||
}
|
||||
} else {
|
||||
zap.S().Info("Nothing to collect")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) UsageExporter(ctx context.Context) {
|
||||
defer close(lm.terminated)
|
||||
|
||||
collectionTicker := time.NewTicker(collectionFrequency)
|
||||
defer collectionTicker.Stop()
|
||||
|
||||
uploadTicker := time.NewTicker(uploadFrequency)
|
||||
defer uploadTicker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
case <-collectionTicker.C:
|
||||
lm.CollectAndStoreUsage(ctx)
|
||||
case <-uploadTicker.C:
|
||||
lm.UploadUsage(ctx)
|
||||
// remove the old snapshots
|
||||
lm.repository.DropOldSnapshots(ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type TableSize struct {
|
||||
Table string `ch:"table"`
|
||||
DiskName string `ch:"disk_name"`
|
||||
Rows uint64 `ch:"rows"`
|
||||
UncompressedBytes uint64 `ch:"uncompressed_bytes"`
|
||||
}
|
||||
|
||||
func (lm *Manager) CollectAndStoreUsage(ctx context.Context) error {
|
||||
snap, err := lm.GetUsageFromClickHouse(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
license, err := lm.licenseRepo.GetActiveLicense(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
activationId, _ := uuid.Parse(license.ActivationId)
|
||||
// TODO (nitya) : Add installation ID in the payload
|
||||
payload := model.UsagePayload{
|
||||
UsageBase: model.UsageBase{
|
||||
ActivationId: activationId,
|
||||
FailedSyncRequest: 0,
|
||||
},
|
||||
Metrics: *snap,
|
||||
SnapshotDate: time.Now(),
|
||||
}
|
||||
|
||||
err = lm.repository.InsertSnapshot(ctx, &payload)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) GetUsageFromClickHouse(ctx context.Context) (*model.UsageSnapshot, error) {
|
||||
tableSizes := []TableSize{}
|
||||
snap := model.UsageSnapshot{}
|
||||
usages := []model.UsageDB{}
|
||||
|
||||
// get usage from clickhouse
|
||||
dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"}
|
||||
query := `
|
||||
SELECT
|
||||
table,
|
||||
disk_name,
|
||||
sum(rows) as rows,
|
||||
sum(data_uncompressed_bytes) AS uncompressed_bytes
|
||||
FROM system.parts
|
||||
WHERE active AND (database in ('signoz_logs', 'signoz_metrics', 'signoz_traces')) AND (table in ('logs','samples_v2', 'signoz_index_v2'))
|
||||
GROUP BY
|
||||
table,
|
||||
disk_name
|
||||
ORDER BY table
|
||||
SELECT tenant, collector_id, exporter_id, timestamp, data
|
||||
FROM %s.distributed_usage as u1
|
||||
GLOBAL INNER JOIN
|
||||
(SELECT
|
||||
tenant, collector_id, exporter_id, MAX(timestamp) as ts
|
||||
FROM %s.distributed_usage as u2
|
||||
where timestamp >= $1
|
||||
GROUP BY tenant, collector_id, exporter_id
|
||||
) as t1
|
||||
ON
|
||||
u1.tenant = t1.tenant AND u1.collector_id = t1.collector_id AND u1.exporter_id = t1.exporter_id and u1.timestamp = t1.ts
|
||||
order by timestamp
|
||||
`
|
||||
err := lm.clickhouseConn.Select(ctx, &tableSizes, query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, val := range tableSizes {
|
||||
switch val.Table {
|
||||
case "logs":
|
||||
if val.DiskName == "default" {
|
||||
snap.CurrentLogSizeBytes = val.UncompressedBytes
|
||||
} else {
|
||||
snap.CurrentLogSizeBytesColdStorage = val.UncompressedBytes
|
||||
}
|
||||
case "samples_v2":
|
||||
if val.DiskName == "default" {
|
||||
snap.CurrentSamplesCount = val.Rows
|
||||
} else {
|
||||
snap.CurrentSamplesCountColdStorage = val.Rows
|
||||
}
|
||||
case "signoz_index_v2":
|
||||
if val.DiskName == "default" {
|
||||
snap.CurrentSpansCount = val.Rows
|
||||
} else {
|
||||
snap.CurrentSpansCountColdStorage = val.Rows
|
||||
}
|
||||
for _, db := range dbs {
|
||||
dbusages := []model.UsageDB{}
|
||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||
return err
|
||||
}
|
||||
for _, u := range dbusages {
|
||||
u.Type = db
|
||||
usages = append(usages, u)
|
||||
}
|
||||
}
|
||||
|
||||
return &snap, nil
|
||||
}
|
||||
|
||||
func (lm *Manager) UploadUsage(ctx context.Context) error {
|
||||
snapshots, err := lm.repository.GetSnapshotsNotSynced(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(snapshots) <= 0 {
|
||||
if len(usages) <= 0 {
|
||||
zap.S().Info("no snapshots to upload, skipping.")
|
||||
return nil
|
||||
}
|
||||
|
||||
zap.S().Info("uploading snapshots")
|
||||
for _, snap := range snapshots {
|
||||
metricsBytes, err := encryption.Decrypt([]byte(snap.ActivationId.String()[:32]), []byte(snap.Snapshot))
|
||||
zap.S().Info("uploading usage data")
|
||||
|
||||
usagesPayload := []model.Usage{}
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
metrics := model.UsageSnapshot{}
|
||||
err = json.Unmarshal(metricsBytes, &metrics)
|
||||
usageData := model.Usage{}
|
||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = lm.UploadUsageWithExponentalBackOff(ctx, model.UsagePayload{
|
||||
UsageBase: model.UsageBase{
|
||||
Id: snap.Id,
|
||||
InstallationId: snap.InstallationId,
|
||||
ActivationId: snap.ActivationId,
|
||||
FailedSyncRequest: snap.FailedSyncRequest,
|
||||
},
|
||||
SnapshotDate: snap.CreatedAt,
|
||||
Metrics: metrics,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
usageData.CollectorID = usage.CollectorID
|
||||
usageData.ExporterID = usage.ExporterID
|
||||
usageData.Type = usage.Type
|
||||
usageData.Tenant = usage.Tenant
|
||||
usagesPayload = append(usagesPayload, usageData)
|
||||
}
|
||||
|
||||
key, _ := uuid.Parse(license.Key)
|
||||
payload := model.UsagePayload{
|
||||
LicenseKey: key,
|
||||
Usage: usagesPayload,
|
||||
}
|
||||
err = lm.UploadUsageWithExponentalBackOff(ctx, payload)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload model.UsagePayload) error {
|
||||
for i := 1; i <= MaxRetries; i++ {
|
||||
apiErr := licenseserver.SendUsage(ctx, &payload)
|
||||
apiErr := licenseserver.SendUsage(ctx, payload)
|
||||
if apiErr != nil && i == MaxRetries {
|
||||
err := lm.repository.IncrementFailedRequestCount(ctx, payload.Id)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to updated the failure count for snapshot in DB : ", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
zap.S().Errorf("retries stopped : %v", zap.Error(err))
|
||||
zap.S().Errorf("retries stopped : %v", zap.Error(apiErr))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return nil
|
||||
} else if apiErr != nil {
|
||||
@@ -289,24 +179,10 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
|
||||
sleepDuration := RetryInterval * time.Duration(i)
|
||||
zap.S().Errorf("failed to upload snapshot retrying after %v secs : %v", sleepDuration.Seconds(), zap.Error(apiErr.Err))
|
||||
time.Sleep(sleepDuration)
|
||||
|
||||
// update the failed request count
|
||||
err := lm.repository.IncrementFailedRequestCount(ctx, payload.Id)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to updated the failure count for snapshot in DB : %v", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// update the database that it is synced
|
||||
err := lm.repository.MoveToSynced(ctx, payload.Id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
"go.signoz.io/signoz/ee/query-service/usage/sqlite"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/encryption"
|
||||
)
|
||||
|
||||
const (
|
||||
MaxFailedSyncCount = 9 // a snapshot will be ignored if the max failed count is greater than or equal to 9
|
||||
SnapShotLife = 3 * 24 * time.Hour
|
||||
)
|
||||
|
||||
// Repository is usage Repository which stores usage snapshot in a secured DB
|
||||
type Repository struct {
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
// New initiates a new usage Repository
|
||||
func New(db *sqlx.DB) *Repository {
|
||||
return &Repository{
|
||||
db: db,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Repository) Init(engine string) error {
|
||||
switch engine {
|
||||
case "sqlite3", "sqlite":
|
||||
return sqlite.InitDB(r.db)
|
||||
default:
|
||||
return fmt.Errorf("unsupported db")
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Repository) InsertSnapshot(ctx context.Context, usage *model.UsagePayload) error {
|
||||
|
||||
snapshotBytes, err := json.Marshal(usage.Metrics)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
usage.Id = uuid.New()
|
||||
|
||||
encryptedSnapshot, err := encryption.Encrypt([]byte(usage.ActivationId.String()[:32]), snapshotBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
query := `INSERT INTO usage(id, activation_id, snapshot)
|
||||
VALUES ($1, $2, $3)`
|
||||
_, err = r.db.ExecContext(ctx,
|
||||
query,
|
||||
usage.Id,
|
||||
usage.ActivationId,
|
||||
string(encryptedSnapshot),
|
||||
)
|
||||
if err != nil {
|
||||
zap.S().Errorf("error inserting usage data: %v", zap.Error(err))
|
||||
return fmt.Errorf("failed to insert usage in db: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repository) MoveToSynced(ctx context.Context, id uuid.UUID) error {
|
||||
|
||||
query := `UPDATE usage
|
||||
SET synced = 'true',
|
||||
synced_at = $1
|
||||
WHERE id = $2`
|
||||
|
||||
_, err := r.db.ExecContext(ctx, query, time.Now(), id)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("error in updating usage: %v", zap.Error(err))
|
||||
return fmt.Errorf("failed to update usage in db: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repository) IncrementFailedRequestCount(ctx context.Context, id uuid.UUID) error {
|
||||
|
||||
query := `UPDATE usage SET failed_sync_request_count = failed_sync_request_count + 1 WHERE id = $1`
|
||||
_, err := r.db.ExecContext(ctx, query, id)
|
||||
if err != nil {
|
||||
zap.S().Errorf("error in updating usage: %v", zap.Error(err))
|
||||
return fmt.Errorf("failed to update usage in db: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repository) GetSnapshotsNotSynced(ctx context.Context) ([]*model.Usage, error) {
|
||||
snapshots := []*model.Usage{}
|
||||
|
||||
query := `SELECT id,created_at, activation_id, snapshot, failed_sync_request_count from usage where synced!='true' and failed_sync_request_count < $1 order by created_at asc `
|
||||
|
||||
err := r.db.SelectContext(ctx, &snapshots, query, MaxFailedSyncCount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return snapshots, nil
|
||||
}
|
||||
|
||||
func (r *Repository) DropOldSnapshots(ctx context.Context) error {
|
||||
query := `delete from usage where created_at <= $1`
|
||||
|
||||
_, err := r.db.ExecContext(ctx, query, time.Now().Add(-(SnapShotLife)))
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to remove old snapshots from db: %v", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CheckSnapshotGtCreatedAt checks if there is any snapshot greater than the provided timestamp
|
||||
func (r *Repository) CheckSnapshotGtCreatedAt(ctx context.Context, ts time.Time) (bool, error) {
|
||||
|
||||
var snapshots uint64
|
||||
query := `SELECT count() from usage where created_at > '$1'`
|
||||
|
||||
err := r.db.QueryRowContext(ctx, query, ts).Scan(&snapshots)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return snapshots > 0, err
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
func InitDB(db *sqlx.DB) error {
|
||||
var err error
|
||||
if db == nil {
|
||||
return fmt.Errorf("invalid db connection")
|
||||
}
|
||||
|
||||
table_schema := `CREATE TABLE IF NOT EXISTS usage(
|
||||
id UUID PRIMARY KEY,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
activation_id UUID,
|
||||
snapshot TEXT,
|
||||
synced BOOLEAN DEFAULT 'false',
|
||||
synced_at TIMESTAMP,
|
||||
failed_sync_request_count INTEGER DEFAULT 0
|
||||
);
|
||||
`
|
||||
|
||||
_, err = db.Exec(table_schema)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error in creating usage table: %v", err.Error())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
3
frontend/public/locales/en-GB/traceDetails.json
Normal file
3
frontend/public/locales/en-GB/traceDetails.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"search_tags": "Search Tag Names"
|
||||
}
|
||||
3
frontend/public/locales/en/traceDetails.json
Normal file
3
frontend/public/locales/en/traceDetails.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"search_tags": "Search Tag Names"
|
||||
}
|
||||
@@ -6,7 +6,7 @@
|
||||
"release_notes": "Release Notes",
|
||||
"read_how_to_upgrade": "Read instructions on how to upgrade",
|
||||
"latest_version_signoz": "You are running the latest version of SigNoz.",
|
||||
"stale_version": "You are on an older version and may be losing out on the latest features we have shipped. We recommend to upgrade to the latest version",
|
||||
"stale_version": "You are on an older version and may be missing out on the latest features we have shipped. We recommend to upgrade to the latest version",
|
||||
"oops_something_went_wrong_version": "Oops.. facing issues with fetching updated version information",
|
||||
"n_a": "N/A",
|
||||
"routes": {
|
||||
|
||||
@@ -57,6 +57,7 @@ const afterLogin = async (
|
||||
profilePictureURL: payload.profilePictureURL,
|
||||
userId: payload.id,
|
||||
orgId: payload.orgId,
|
||||
userFlags: payload.flags,
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -4,14 +4,16 @@ import { ENVIRONMENT } from 'constants/env';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { EventSourcePolyfill } from 'event-source-polyfill';
|
||||
|
||||
export const LiveTail = (queryParams: string): EventSourcePolyfill => {
|
||||
const dict = {
|
||||
headers: {
|
||||
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
|
||||
},
|
||||
};
|
||||
return new EventSourcePolyfill(
|
||||
// 10 min in ms
|
||||
const TIMEOUT_IN_MS = 10 * 60 * 1000;
|
||||
|
||||
export const LiveTail = (queryParams: string): EventSourcePolyfill =>
|
||||
new EventSourcePolyfill(
|
||||
`${ENVIRONMENT.baseURL}${apiV1}logs/tail?${queryParams}`,
|
||||
dict,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
|
||||
},
|
||||
heartbeatTimeout: TIMEOUT_IN_MS,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
26
frontend/src/api/user/setFlags.ts
Normal file
26
frontend/src/api/user/setFlags.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/user/setFlags';
|
||||
|
||||
const setFlags = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.patch(`/user/${props.userId}/flags`, {
|
||||
...props.flags,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data?.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default setFlags;
|
||||
@@ -41,6 +41,7 @@ export const Logout = (): void => {
|
||||
orgName: '',
|
||||
profilePictureURL: '',
|
||||
userId: '',
|
||||
userFlags: {},
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -62,6 +62,8 @@ export const legend = (id: string, isLonger: boolean): Plugin<ChartType> => {
|
||||
li.style.marginTop = '5px';
|
||||
|
||||
li.onclick = (): void => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
const { type } = chart.config;
|
||||
if (type === 'pie' || type === 'doughnut') {
|
||||
// Pie and doughnut charts only have a single dataset and visibility is per item
|
||||
|
||||
@@ -1,46 +1,21 @@
|
||||
import { Button, Popover } from 'antd';
|
||||
import getStep from 'lib/getStep';
|
||||
import { generateFilterQuery } from 'lib/logs/generateFilterQuery';
|
||||
import React, { memo, useCallback, useMemo } from 'react';
|
||||
import { connect, useDispatch, useSelector } from 'react-redux';
|
||||
import { bindActionCreators, Dispatch } from 'redux';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
import { getLogs } from 'store/actions/logs/getLogs';
|
||||
import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { SET_SEARCH_QUERY_STRING, TOGGLE_LIVE_TAIL } from 'types/actions/logs';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { SET_SEARCH_QUERY_STRING } from 'types/actions/logs';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
interface AddToQueryHOCProps {
|
||||
fieldKey: string;
|
||||
fieldValue: string;
|
||||
children: React.ReactNode;
|
||||
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
|
||||
getLogsAggregate: (
|
||||
props: Parameters<typeof getLogsAggregate>[0],
|
||||
) => ReturnType<typeof getLogsAggregate>;
|
||||
}
|
||||
function AddToQueryHOC({
|
||||
fieldKey,
|
||||
fieldValue,
|
||||
children,
|
||||
getLogs,
|
||||
getLogsAggregate,
|
||||
}: AddToQueryHOCProps): JSX.Element {
|
||||
const {
|
||||
searchFilter: { queryString },
|
||||
logLinesPerPage,
|
||||
idStart,
|
||||
idEnd,
|
||||
liveTail,
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const generatedQuery = useMemo(
|
||||
() => generateFilterQuery({ fieldKey, fieldValue, type: 'IN' }),
|
||||
[fieldKey, fieldValue],
|
||||
@@ -58,69 +33,14 @@ function AddToQueryHOC({
|
||||
type: SET_SEARCH_QUERY_STRING,
|
||||
payload: updatedQueryString,
|
||||
});
|
||||
if (liveTail === 'STOPPED') {
|
||||
getLogs({
|
||||
q: updatedQueryString,
|
||||
limit: logLinesPerPage,
|
||||
orderBy: 'timestamp',
|
||||
order: 'desc',
|
||||
timestampStart: minTime,
|
||||
timestampEnd: maxTime,
|
||||
...(idStart ? { idGt: idStart } : {}),
|
||||
...(idEnd ? { idLt: idEnd } : {}),
|
||||
});
|
||||
getLogsAggregate({
|
||||
timestampStart: minTime,
|
||||
timestampEnd: maxTime,
|
||||
step: getStep({
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
inputFormat: 'ns',
|
||||
}),
|
||||
q: updatedQueryString,
|
||||
...(idStart ? { idGt: idStart } : {}),
|
||||
...(idEnd ? { idLt: idEnd } : {}),
|
||||
});
|
||||
} else if (liveTail === 'PLAYING') {
|
||||
dispatch({
|
||||
type: TOGGLE_LIVE_TAIL,
|
||||
payload: 'PAUSED',
|
||||
});
|
||||
setTimeout(
|
||||
() =>
|
||||
dispatch({
|
||||
type: TOGGLE_LIVE_TAIL,
|
||||
payload: liveTail,
|
||||
}),
|
||||
0,
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
dispatch,
|
||||
generatedQuery,
|
||||
getLogs,
|
||||
idEnd,
|
||||
idStart,
|
||||
logLinesPerPage,
|
||||
maxTime,
|
||||
minTime,
|
||||
queryString,
|
||||
}, [dispatch, generatedQuery, queryString]);
|
||||
|
||||
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
|
||||
fieldKey,
|
||||
]);
|
||||
|
||||
const popOverContent = (
|
||||
<span style={{ fontSize: '0.9rem' }}>Add to query: {fieldKey}</span>
|
||||
);
|
||||
return (
|
||||
<Button
|
||||
size="small"
|
||||
type="text"
|
||||
style={{
|
||||
margin: 0,
|
||||
padding: 0,
|
||||
}}
|
||||
onClick={handleQueryAdd}
|
||||
>
|
||||
<Button size="small" type="text" onClick={handleQueryAdd}>
|
||||
<Popover placement="top" content={popOverContent}>
|
||||
{children}
|
||||
</Popover>
|
||||
@@ -128,20 +48,10 @@ function AddToQueryHOC({
|
||||
);
|
||||
}
|
||||
|
||||
interface DispatchProps {
|
||||
getLogs: (
|
||||
props: Parameters<typeof getLogs>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
getLogsAggregate: (
|
||||
props: Parameters<typeof getLogsAggregate>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
interface AddToQueryHOCProps {
|
||||
fieldKey: string;
|
||||
fieldValue: string;
|
||||
children: React.ReactNode;
|
||||
}
|
||||
|
||||
const mapDispatchToProps = (
|
||||
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
|
||||
): DispatchProps => ({
|
||||
getLogs: bindActionCreators(getLogs, dispatch),
|
||||
getLogsAggregate: bindActionCreators(getLogsAggregate, dispatch),
|
||||
});
|
||||
|
||||
export default connect(null, mapDispatchToProps)(memo(AddToQueryHOC));
|
||||
export default memo(AddToQueryHOC);
|
||||
|
||||
27
frontend/src/components/MessageTip/index.tsx
Normal file
27
frontend/src/components/MessageTip/index.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
import React from 'react';
|
||||
|
||||
import { StyledAlert } from './styles';
|
||||
|
||||
interface MessageTipProps {
|
||||
show?: boolean;
|
||||
message: React.ReactNode | string;
|
||||
action: React.ReactNode | undefined;
|
||||
}
|
||||
|
||||
function MessageTip({
|
||||
show,
|
||||
message,
|
||||
action,
|
||||
}: MessageTipProps): JSX.Element | null {
|
||||
if (!show) return null;
|
||||
|
||||
return (
|
||||
<StyledAlert showIcon description={message} type="info" action={action} />
|
||||
);
|
||||
}
|
||||
|
||||
MessageTip.defaultProps = {
|
||||
show: false,
|
||||
};
|
||||
|
||||
export default MessageTip;
|
||||
6
frontend/src/components/MessageTip/styles.ts
Normal file
6
frontend/src/components/MessageTip/styles.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { Alert } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const StyledAlert = styled(Alert)`
|
||||
align-items: center;
|
||||
`;
|
||||
1
frontend/src/components/NotFound/constant.ts
Normal file
1
frontend/src/components/NotFound/constant.ts
Normal file
@@ -0,0 +1 @@
|
||||
export const defaultText = 'Ah, seems like we reached a dead end!';
|
||||
@@ -2,45 +2,52 @@ import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import NotFoundImage from 'assets/NotFound';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import React from 'react';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { Dispatch } from 'redux';
|
||||
import AppActions from 'types/actions';
|
||||
import { LOGGED_IN } from 'types/actions/app';
|
||||
|
||||
import { defaultText } from './constant';
|
||||
import { Button, Container, Text, TextContainer } from './styles';
|
||||
|
||||
function NotFound(): JSX.Element {
|
||||
function NotFound({ text = defaultText }: Props): JSX.Element {
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
const isLoggedIn = getLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN);
|
||||
|
||||
const onClickHandler = useCallback(() => {
|
||||
if (isLoggedIn) {
|
||||
dispatch({
|
||||
type: LOGGED_IN,
|
||||
payload: {
|
||||
isLoggedIn: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
}, [dispatch, isLoggedIn]);
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<NotFoundImage />
|
||||
|
||||
<TextContainer>
|
||||
<Text>Ah, seems like we reached a dead end!</Text>
|
||||
<Text>{text}</Text>
|
||||
<Text>Page Not Found</Text>
|
||||
</TextContainer>
|
||||
|
||||
<Button
|
||||
onClick={(): void => {
|
||||
if (isLoggedIn) {
|
||||
dispatch({
|
||||
type: LOGGED_IN,
|
||||
payload: {
|
||||
isLoggedIn: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
}}
|
||||
to={ROUTES.APPLICATION}
|
||||
tabIndex={0}
|
||||
>
|
||||
<Button onClick={onClickHandler} to={ROUTES.APPLICATION} tabIndex={0}>
|
||||
Return To Services Page
|
||||
</Button>
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
|
||||
interface Props {
|
||||
text?: string;
|
||||
}
|
||||
|
||||
NotFound.defaultProps = {
|
||||
text: defaultText,
|
||||
};
|
||||
|
||||
export default NotFound;
|
||||
|
||||
4
frontend/src/components/ReleaseNote/ReleaseNoteProps.ts
Normal file
4
frontend/src/components/ReleaseNote/ReleaseNoteProps.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export default interface ReleaseNoteProps {
|
||||
path?: string;
|
||||
release?: string;
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
import { Button, Space } from 'antd';
|
||||
import setFlags from 'api/user/setFlags';
|
||||
import MessageTip from 'components/MessageTip';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { Dispatch } from 'redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { UPDATE_USER_FLAG } from 'types/actions/app';
|
||||
import { UserFlags } from 'types/api/user/setFlags';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
|
||||
import ReleaseNoteProps from '../ReleaseNoteProps';
|
||||
|
||||
export default function ReleaseNote0120({
|
||||
release,
|
||||
}: ReleaseNoteProps): JSX.Element | null {
|
||||
const { user } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const handleDontShow = useCallback(async (): Promise<void> => {
|
||||
const flags: UserFlags = { ReleaseNote0120Hide: 'Y' };
|
||||
|
||||
try {
|
||||
dispatch({
|
||||
type: UPDATE_USER_FLAG,
|
||||
payload: {
|
||||
flags,
|
||||
},
|
||||
});
|
||||
if (!user) {
|
||||
// no user is set, so escape the routine
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await setFlags({ userId: user?.userId, flags });
|
||||
|
||||
if (response.statusCode !== 200) {
|
||||
console.log('failed to complete do not show status', response.error);
|
||||
}
|
||||
} catch (e) {
|
||||
// here we do not nothing as the cost of error is minor,
|
||||
// the user can switch the do no show option again in the further.
|
||||
console.log('unexpected error: failed to complete do not show status', e);
|
||||
}
|
||||
}, [dispatch, user]);
|
||||
|
||||
return (
|
||||
<MessageTip
|
||||
show
|
||||
message={
|
||||
<div>
|
||||
You are using {release} of SigNoz. We have introduced distributed setup in
|
||||
v0.12.0 release. If you use or plan to use clickhouse queries in dashboard
|
||||
or alerts, you might want to read about querying the new distributed tables{' '}
|
||||
<a
|
||||
href="https://signoz.io/docs/operate/migration/upgrade-0.12/#querying-distributed-tables"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
here
|
||||
</a>
|
||||
</div>
|
||||
}
|
||||
action={
|
||||
<Space>
|
||||
<Button onClick={handleDontShow}>Do not show again</Button>
|
||||
</Space>
|
||||
}
|
||||
/>
|
||||
);
|
||||
}
|
||||
66
frontend/src/components/ReleaseNote/index.tsx
Normal file
66
frontend/src/components/ReleaseNote/index.tsx
Normal file
@@ -0,0 +1,66 @@
|
||||
import ReleaseNoteProps from 'components/ReleaseNote/ReleaseNoteProps';
|
||||
import ReleaseNote0120 from 'components/ReleaseNote/Releases/ReleaseNote0120';
|
||||
import ROUTES from 'constants/routes';
|
||||
import React from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { UserFlags } from 'types/api/user/setFlags';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
|
||||
interface ComponentMapType {
|
||||
match: (
|
||||
path: string | undefined,
|
||||
version: string,
|
||||
userFlags: UserFlags | null,
|
||||
) => boolean;
|
||||
component: ({ path, release }: ReleaseNoteProps) => JSX.Element | null;
|
||||
}
|
||||
|
||||
const allComponentMap: ComponentMapType[] = [
|
||||
{
|
||||
match: (
|
||||
path: string | undefined,
|
||||
version: string,
|
||||
userFlags: UserFlags | null,
|
||||
): boolean => {
|
||||
if (!path) {
|
||||
return false;
|
||||
}
|
||||
const allowedPaths = [
|
||||
ROUTES.LIST_ALL_ALERT,
|
||||
ROUTES.APPLICATION,
|
||||
ROUTES.ALL_DASHBOARD,
|
||||
];
|
||||
return (
|
||||
userFlags?.ReleaseNote0120Hide !== 'Y' &&
|
||||
allowedPaths.includes(path) &&
|
||||
version.startsWith('v0.12')
|
||||
);
|
||||
},
|
||||
component: ReleaseNote0120,
|
||||
},
|
||||
];
|
||||
|
||||
// ReleaseNote prints release specific warnings and notes that
|
||||
// user needs to be aware of before using the upgraded version.
|
||||
function ReleaseNote({ path }: ReleaseNoteProps): JSX.Element | null {
|
||||
const { userFlags, currentVersion } = useSelector<AppState, AppReducer>(
|
||||
(state) => state.app,
|
||||
);
|
||||
|
||||
const c = allComponentMap.find((item) => {
|
||||
return item.match(path, currentVersion, userFlags);
|
||||
});
|
||||
|
||||
if (!c) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <c.component path={path} release={currentVersion} />;
|
||||
}
|
||||
|
||||
ReleaseNote.defaultProps = {
|
||||
path: '',
|
||||
};
|
||||
|
||||
export default ReleaseNote;
|
||||
9
frontend/src/container/AllError/constant.ts
Normal file
9
frontend/src/container/AllError/constant.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
const DEFAULT_FILTER_VALUE = '';
|
||||
const EXCEPTION_TYPE_FILTER_NAME = 'exceptionType';
|
||||
const SERVICE_NAME_FILTER_NAME = 'serviceName';
|
||||
|
||||
export {
|
||||
DEFAULT_FILTER_VALUE,
|
||||
EXCEPTION_TYPE_FILTER_NAME,
|
||||
SERVICE_NAME_FILTER_NAME,
|
||||
};
|
||||
@@ -17,6 +17,7 @@ import getAll from 'api/errors/getAll';
|
||||
import getErrorCounts from 'api/errors/getErrorCounts';
|
||||
import ROUTES from 'constants/routes';
|
||||
import dayjs from 'dayjs';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import history from 'lib/history';
|
||||
import React, { useCallback, useEffect, useMemo } from 'react';
|
||||
@@ -30,7 +31,11 @@ import { Exception, PayloadProps } from 'types/api/errors/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
extractFilterValues,
|
||||
getDefaultFilterValue,
|
||||
getDefaultOrder,
|
||||
getFilterString,
|
||||
getFilterValues,
|
||||
getNanoSeconds,
|
||||
getOffSet,
|
||||
getOrder,
|
||||
@@ -43,15 +48,27 @@ function AllErrors(): JSX.Element {
|
||||
const { maxTime, minTime, loading } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const { search, pathname } = useLocation();
|
||||
const params = useMemo(() => new URLSearchParams(search), [search]);
|
||||
|
||||
const { pathname } = useLocation();
|
||||
const params = useUrlQuery();
|
||||
const { t } = useTranslation(['common']);
|
||||
|
||||
const updatedOrder = getOrder(params.get(urlKey.order));
|
||||
const getUpdatedOffset = getOffSet(params.get(urlKey.offset));
|
||||
const getUpdatedParams = getOrderParams(params.get(urlKey.orderParam));
|
||||
const getUpdatedPageSize = getUpdatePageSize(params.get(urlKey.pageSize));
|
||||
const {
|
||||
updatedOrder,
|
||||
getUpdatedOffset,
|
||||
getUpdatedParams,
|
||||
getUpdatedPageSize,
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedServiceName,
|
||||
} = useMemo(
|
||||
() => ({
|
||||
updatedOrder: getOrder(params.get(urlKey.order)),
|
||||
getUpdatedOffset: getOffSet(params.get(urlKey.offset)),
|
||||
getUpdatedParams: getOrderParams(params.get(urlKey.orderParam)),
|
||||
getUpdatedPageSize: getUpdatePageSize(params.get(urlKey.pageSize)),
|
||||
getUpdatedExceptionType: getFilterString(params.get(urlKey.exceptionType)),
|
||||
getUpdatedServiceName: getFilterString(params.get(urlKey.serviceName)),
|
||||
}),
|
||||
[params],
|
||||
);
|
||||
|
||||
const updatedPath = useMemo(
|
||||
() =>
|
||||
@@ -60,6 +77,8 @@ function AllErrors(): JSX.Element {
|
||||
offset: getUpdatedOffset,
|
||||
orderParam: getUpdatedParams,
|
||||
pageSize: getUpdatedPageSize,
|
||||
exceptionType: getUpdatedExceptionType,
|
||||
serviceName: getUpdatedServiceName,
|
||||
})}`,
|
||||
[
|
||||
pathname,
|
||||
@@ -67,6 +86,8 @@ function AllErrors(): JSX.Element {
|
||||
getUpdatedOffset,
|
||||
getUpdatedParams,
|
||||
getUpdatedPageSize,
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedServiceName,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -81,16 +102,27 @@ function AllErrors(): JSX.Element {
|
||||
limit: getUpdatedPageSize,
|
||||
offset: getUpdatedOffset,
|
||||
orderParam: getUpdatedParams,
|
||||
exceptionType: getUpdatedExceptionType,
|
||||
serviceName: getUpdatedServiceName,
|
||||
}),
|
||||
enabled: !loading,
|
||||
},
|
||||
{
|
||||
queryKey: ['getErrorCounts', maxTime, minTime],
|
||||
queryKey: [
|
||||
'getErrorCounts',
|
||||
maxTime,
|
||||
minTime,
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedServiceName,
|
||||
],
|
||||
queryFn: (): Promise<ErrorResponse | SuccessResponse<number>> =>
|
||||
getErrorCounts({
|
||||
end: maxTime,
|
||||
start: minTime,
|
||||
exceptionType: getUpdatedExceptionType,
|
||||
serviceName: getUpdatedServiceName,
|
||||
}),
|
||||
enabled: !loading,
|
||||
},
|
||||
]);
|
||||
|
||||
@@ -108,14 +140,43 @@ function AllErrors(): JSX.Element {
|
||||
|
||||
const filterIcon = useCallback(() => <SearchOutlined />, []);
|
||||
|
||||
const handleSearch = (
|
||||
confirm: (param?: FilterConfirmProps) => void,
|
||||
): VoidFunction => (): void => {
|
||||
confirm();
|
||||
};
|
||||
const handleSearch = useCallback(
|
||||
(
|
||||
confirm: (param?: FilterConfirmProps) => void,
|
||||
filterValue: string,
|
||||
filterKey: string,
|
||||
): VoidFunction => (): void => {
|
||||
const { exceptionFilterValue, serviceFilterValue } = getFilterValues(
|
||||
getUpdatedServiceName || '',
|
||||
getUpdatedExceptionType || '',
|
||||
filterKey,
|
||||
filterValue || '',
|
||||
);
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: getUpdatedOffset,
|
||||
orderParam: getUpdatedParams,
|
||||
pageSize: getUpdatedPageSize,
|
||||
exceptionType: exceptionFilterValue,
|
||||
serviceName: serviceFilterValue,
|
||||
})}`,
|
||||
);
|
||||
confirm();
|
||||
},
|
||||
[
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedOffset,
|
||||
getUpdatedPageSize,
|
||||
getUpdatedParams,
|
||||
getUpdatedServiceName,
|
||||
pathname,
|
||||
updatedOrder,
|
||||
],
|
||||
);
|
||||
|
||||
const filterDropdownWrapper = useCallback(
|
||||
({ setSelectedKeys, selectedKeys, confirm, placeholder }) => {
|
||||
({ setSelectedKeys, selectedKeys, confirm, placeholder, filterKey }) => {
|
||||
return (
|
||||
<Card size="small">
|
||||
<Space align="start" direction="vertical">
|
||||
@@ -126,11 +187,16 @@ function AllErrors(): JSX.Element {
|
||||
setSelectedKeys(e.target.value ? [e.target.value] : [])
|
||||
}
|
||||
allowClear
|
||||
onPressEnter={handleSearch(confirm)}
|
||||
defaultValue={getDefaultFilterValue(
|
||||
filterKey,
|
||||
getUpdatedServiceName,
|
||||
getUpdatedExceptionType,
|
||||
)}
|
||||
onPressEnter={handleSearch(confirm, selectedKeys[0], filterKey)}
|
||||
/>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handleSearch(confirm)}
|
||||
onClick={handleSearch(confirm, selectedKeys[0], filterKey)}
|
||||
icon={<SearchOutlined />}
|
||||
size="small"
|
||||
>
|
||||
@@ -140,7 +206,7 @@ function AllErrors(): JSX.Element {
|
||||
</Card>
|
||||
);
|
||||
},
|
||||
[],
|
||||
[getUpdatedExceptionType, getUpdatedServiceName, handleSearch],
|
||||
);
|
||||
|
||||
const onExceptionTypeFilter = useCallback(
|
||||
@@ -167,6 +233,7 @@ function AllErrors(): JSX.Element {
|
||||
(
|
||||
onFilter: ColumnType<Exception>['onFilter'],
|
||||
placeholder: string,
|
||||
filterKey: string,
|
||||
): ColumnType<Exception> => ({
|
||||
onFilter,
|
||||
filterIcon,
|
||||
@@ -176,6 +243,7 @@ function AllErrors(): JSX.Element {
|
||||
selectedKeys,
|
||||
confirm,
|
||||
placeholder,
|
||||
filterKey,
|
||||
}),
|
||||
}),
|
||||
[filterIcon, filterDropdownWrapper],
|
||||
@@ -186,7 +254,7 @@ function AllErrors(): JSX.Element {
|
||||
title: 'Exception Type',
|
||||
dataIndex: 'exceptionType',
|
||||
key: 'exceptionType',
|
||||
...getFilter(onExceptionTypeFilter, 'Search By Exception'),
|
||||
...getFilter(onExceptionTypeFilter, 'Search By Exception', 'exceptionType'),
|
||||
render: (value, record): JSX.Element => (
|
||||
<Tooltip overlay={(): JSX.Element => value}>
|
||||
<Link
|
||||
@@ -266,30 +334,39 @@ function AllErrors(): JSX.Element {
|
||||
updatedOrder,
|
||||
'serviceName',
|
||||
),
|
||||
...getFilter(onApplicationTypeFilter, 'Search By Application'),
|
||||
...getFilter(
|
||||
onApplicationTypeFilter,
|
||||
'Search By Application',
|
||||
'serviceName',
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const onChangeHandler: TableProps<Exception>['onChange'] = (
|
||||
paginations,
|
||||
_,
|
||||
sorter,
|
||||
) => {
|
||||
if (!Array.isArray(sorter)) {
|
||||
const { pageSize = 0, current = 0 } = paginations;
|
||||
const { columnKey = '', order } = sorter;
|
||||
const updatedOrder = order === 'ascend' ? 'ascending' : 'descending';
|
||||
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: (current - 1) * pageSize,
|
||||
orderParam: columnKey,
|
||||
pageSize,
|
||||
})}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
const onChangeHandler: TableProps<Exception>['onChange'] = useCallback(
|
||||
(paginations, filters, sorter) => {
|
||||
if (!Array.isArray(sorter)) {
|
||||
const { pageSize = 0, current = 0 } = paginations;
|
||||
const { columnKey = '', order } = sorter;
|
||||
const updatedOrder = order === 'ascend' ? 'ascending' : 'descending';
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
const { exceptionType, serviceName } = extractFilterValues(filters, {
|
||||
serviceName: getFilterString(params.get(urlKey.serviceName)),
|
||||
exceptionType: getFilterString(params.get(urlKey.exceptionType)),
|
||||
});
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: (current - 1) * pageSize,
|
||||
orderParam: columnKey,
|
||||
pageSize,
|
||||
exceptionType,
|
||||
serviceName,
|
||||
})}`,
|
||||
);
|
||||
}
|
||||
},
|
||||
[pathname],
|
||||
);
|
||||
|
||||
return (
|
||||
<Table
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
import { SortOrder } from 'antd/lib/table/interface';
|
||||
import { FilterValue, SortOrder } from 'antd/lib/table/interface';
|
||||
import Timestamp from 'timestamp-nano';
|
||||
import { Order, OrderBy } from 'types/api/errors/getAll';
|
||||
|
||||
import {
|
||||
DEFAULT_FILTER_VALUE,
|
||||
EXCEPTION_TYPE_FILTER_NAME,
|
||||
SERVICE_NAME_FILTER_NAME,
|
||||
} from './constant';
|
||||
|
||||
export const isOrder = (order: string | null): order is Order =>
|
||||
!!(order === 'ascending' || order === 'descending');
|
||||
|
||||
@@ -10,6 +16,8 @@ export const urlKey = {
|
||||
offset: 'offset',
|
||||
orderParam: 'orderParam',
|
||||
pageSize: 'pageSize',
|
||||
exceptionType: 'exceptionType',
|
||||
serviceName: 'serviceName',
|
||||
};
|
||||
|
||||
export const isOrderParams = (orderBy: string | null): orderBy is OrderBy => {
|
||||
@@ -87,3 +95,94 @@ export const getUpdatePageSize = (pageSize: string | null): number => {
|
||||
}
|
||||
return 10;
|
||||
};
|
||||
|
||||
export const getFilterString = (filter: string | null): string => {
|
||||
if (filter) {
|
||||
return filter;
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
export const getDefaultFilterValue = (
|
||||
filterKey: string | null,
|
||||
serviceName: string,
|
||||
exceptionType: string,
|
||||
): string | undefined => {
|
||||
let defaultValue: string | undefined;
|
||||
switch (filterKey) {
|
||||
case SERVICE_NAME_FILTER_NAME:
|
||||
defaultValue = serviceName;
|
||||
break;
|
||||
case EXCEPTION_TYPE_FILTER_NAME:
|
||||
defaultValue = exceptionType;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return defaultValue;
|
||||
};
|
||||
|
||||
export const getFilterValues = (
|
||||
serviceName: string,
|
||||
exceptionType: string,
|
||||
filterKey: string,
|
||||
filterValue: string,
|
||||
): { exceptionFilterValue: string; serviceFilterValue: string } => {
|
||||
let serviceFilterValue = serviceName;
|
||||
let exceptionFilterValue = exceptionType;
|
||||
switch (filterKey) {
|
||||
case EXCEPTION_TYPE_FILTER_NAME:
|
||||
exceptionFilterValue = filterValue;
|
||||
break;
|
||||
case SERVICE_NAME_FILTER_NAME:
|
||||
serviceFilterValue = filterValue;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return { exceptionFilterValue, serviceFilterValue };
|
||||
};
|
||||
|
||||
type FilterValues = { exceptionType: string; serviceName: string };
|
||||
|
||||
const extractSingleFilterValue = (
|
||||
filterName: string,
|
||||
filters: Filter,
|
||||
): string => {
|
||||
const filterValues = filters[filterName];
|
||||
|
||||
if (
|
||||
!filterValues ||
|
||||
!Array.isArray(filterValues) ||
|
||||
filterValues.length === 0
|
||||
) {
|
||||
return DEFAULT_FILTER_VALUE;
|
||||
}
|
||||
|
||||
return String(filterValues[0]);
|
||||
};
|
||||
|
||||
type Filter = Record<string, FilterValue | null>;
|
||||
|
||||
export const extractFilterValues = (
|
||||
filters: Filter,
|
||||
prefilledFilters: FilterValues,
|
||||
): FilterValues => {
|
||||
const filterValues: FilterValues = {
|
||||
exceptionType: prefilledFilters.exceptionType,
|
||||
serviceName: prefilledFilters.serviceName,
|
||||
};
|
||||
if (filters[EXCEPTION_TYPE_FILTER_NAME]) {
|
||||
filterValues.exceptionType = extractSingleFilterValue(
|
||||
EXCEPTION_TYPE_FILTER_NAME,
|
||||
filters,
|
||||
);
|
||||
}
|
||||
if (filters[SERVICE_NAME_FILTER_NAME]) {
|
||||
filterValues.serviceName = extractSingleFilterValue(
|
||||
SERVICE_NAME_FILTER_NAME,
|
||||
filters,
|
||||
);
|
||||
}
|
||||
return filterValues;
|
||||
};
|
||||
|
||||
@@ -70,8 +70,8 @@ export const logAlertDefaults: AlertDef = {
|
||||
chQueries: {
|
||||
A: {
|
||||
name: 'A',
|
||||
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
rawQuery: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
rawQuery: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
@@ -117,8 +117,8 @@ export const traceAlertDefaults: AlertDef = {
|
||||
chQueries: {
|
||||
A: {
|
||||
name: 'A',
|
||||
rawQuery: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
rawQuery: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
@@ -164,8 +164,8 @@ export const exceptionAlertDefaults: AlertDef = {
|
||||
chQueries: {
|
||||
A: {
|
||||
name: 'A',
|
||||
rawQuery: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
query: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
rawQuery: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.distributed_signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
query: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.distributed_signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
|
||||
@@ -20,8 +20,8 @@ export const rawQueryToIChQuery = (
|
||||
}
|
||||
|
||||
return {
|
||||
rawQuery: rawQuery !== undefined ? rawQuery : src.rawQuery,
|
||||
query: rawQuery !== undefined ? rawQuery : src.rawQuery,
|
||||
rawQuery: rawQuery !== undefined ? rawQuery : src.query,
|
||||
query: rawQuery !== undefined ? rawQuery : src.query,
|
||||
legend: legend !== undefined ? legend : src.legend,
|
||||
name: 'A',
|
||||
disabled: false,
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
import { notification } from 'antd';
|
||||
import { notification, Space } from 'antd';
|
||||
import getAll from 'api/alerts/getAll';
|
||||
import ReleaseNote from 'components/ReleaseNote';
|
||||
import Spinner from 'components/Spinner';
|
||||
import React, { useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
import ListAlert from './ListAlert';
|
||||
|
||||
function ListAlertRules(): JSX.Element {
|
||||
const { t } = useTranslation('common');
|
||||
const location = useLocation();
|
||||
const { data, isError, isLoading, refetch, status } = useQuery('allAlerts', {
|
||||
queryFn: getAll,
|
||||
cacheTime: 0,
|
||||
@@ -45,12 +48,15 @@ function ListAlertRules(): JSX.Element {
|
||||
}
|
||||
|
||||
return (
|
||||
<ListAlert
|
||||
{...{
|
||||
allAlertRules: data.payload,
|
||||
refetch,
|
||||
}}
|
||||
/>
|
||||
<Space direction="vertical" size="large" style={{ width: '100%' }}>
|
||||
<ReleaseNote path={location.pathname} />
|
||||
<ListAlert
|
||||
{...{
|
||||
allAlertRules: data.payload,
|
||||
refetch,
|
||||
}}
|
||||
/>
|
||||
</Space>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
1
frontend/src/container/LogControls/config.ts
Normal file
1
frontend/src/container/LogControls/config.ts
Normal file
@@ -0,0 +1 @@
|
||||
export const ITEMS_PER_PAGE_OPTIONS = [25, 50, 100, 200];
|
||||
@@ -4,41 +4,29 @@ import {
|
||||
RightOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Button, Divider, Select } from 'antd';
|
||||
import React, { memo } from 'react';
|
||||
import { connect, useDispatch, useSelector } from 'react-redux';
|
||||
import { bindActionCreators, Dispatch } from 'redux';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
import { getLogs } from 'store/actions/logs/getLogs';
|
||||
import React, { memo, useMemo } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import {
|
||||
GET_NEXT_LOG_LINES,
|
||||
GET_PREVIOUS_LOG_LINES,
|
||||
RESET_ID_START_AND_END,
|
||||
SET_LOG_LINES_PER_PAGE,
|
||||
} from 'types/actions/logs';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
import { ITEMS_PER_PAGE_OPTIONS } from './config';
|
||||
import { Container } from './styles';
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
const ITEMS_PER_PAGE_OPTIONS = [25, 50, 100, 200];
|
||||
|
||||
interface LogControlsProps {
|
||||
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
|
||||
}
|
||||
function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
function LogControls(): JSX.Element | null {
|
||||
const {
|
||||
logLinesPerPage,
|
||||
idStart,
|
||||
idEnd,
|
||||
liveTail,
|
||||
searchFilter: { queryString },
|
||||
isLoading: isLogsLoading,
|
||||
isLoadingAggregate,
|
||||
logs,
|
||||
} = useSelector<AppState, ILogsReducer>((state) => state.logs);
|
||||
const dispatch = useDispatch();
|
||||
|
||||
@@ -53,18 +41,6 @@ function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
|
||||
dispatch({
|
||||
type: RESET_ID_START_AND_END,
|
||||
});
|
||||
|
||||
if (liveTail === 'STOPPED')
|
||||
getLogs({
|
||||
q: queryString,
|
||||
limit: logLinesPerPage,
|
||||
orderBy: 'timestamp',
|
||||
order: 'desc',
|
||||
timestampStart: minTime,
|
||||
timestampEnd: maxTime,
|
||||
...(idStart ? { idGt: idStart } : {}),
|
||||
...(idEnd ? { idLt: idEnd } : {}),
|
||||
});
|
||||
};
|
||||
|
||||
const handleNavigatePrevious = (): void => {
|
||||
@@ -78,44 +54,61 @@ function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
|
||||
});
|
||||
};
|
||||
|
||||
const isLoading = isLogsLoading || isLoadingAggregate;
|
||||
|
||||
const isNextAndPreviousDisabled = useMemo(
|
||||
() =>
|
||||
isLoading ||
|
||||
logLinesPerPage === 0 ||
|
||||
logs.length === 0 ||
|
||||
logs.length < logLinesPerPage,
|
||||
[isLoading, logLinesPerPage, logs.length],
|
||||
);
|
||||
|
||||
if (liveTail !== 'STOPPED') {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<Button size="small" type="link" onClick={handleGoToLatest}>
|
||||
<Button
|
||||
loading={isLoading}
|
||||
size="small"
|
||||
type="link"
|
||||
onClick={handleGoToLatest}
|
||||
>
|
||||
<FastBackwardOutlined /> Go to latest
|
||||
</Button>
|
||||
<Divider type="vertical" />
|
||||
<Button size="small" type="link" onClick={handleNavigatePrevious}>
|
||||
<Button
|
||||
loading={isLoading}
|
||||
size="small"
|
||||
type="link"
|
||||
disabled={isNextAndPreviousDisabled}
|
||||
onClick={handleNavigatePrevious}
|
||||
>
|
||||
<LeftOutlined /> Previous
|
||||
</Button>
|
||||
<Button size="small" type="link" onClick={handleNavigateNext}>
|
||||
<Button
|
||||
loading={isLoading}
|
||||
size="small"
|
||||
type="link"
|
||||
disabled={isNextAndPreviousDisabled}
|
||||
onClick={handleNavigateNext}
|
||||
>
|
||||
Next <RightOutlined />
|
||||
</Button>
|
||||
<Select
|
||||
style={{ width: 120 }}
|
||||
loading={isLoading}
|
||||
value={logLinesPerPage}
|
||||
onChange={handleLogLinesPerPageChange}
|
||||
>
|
||||
{ITEMS_PER_PAGE_OPTIONS.map((count) => {
|
||||
return <Option key={count} value={count}>{`${count} / page`}</Option>;
|
||||
})}
|
||||
{ITEMS_PER_PAGE_OPTIONS.map((count) => (
|
||||
<Option key={count} value={count}>{`${count} / page`}</Option>
|
||||
))}
|
||||
</Select>
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
|
||||
interface DispatchProps {
|
||||
getLogs: (
|
||||
props: Parameters<typeof getLogs>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
}
|
||||
|
||||
const mapDispatchToProps = (
|
||||
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
|
||||
): DispatchProps => ({
|
||||
getLogs: bindActionCreators(getLogs, dispatch),
|
||||
});
|
||||
|
||||
export default connect(null, mapDispatchToProps)(memo(LogControls));
|
||||
export default memo(LogControls);
|
||||
|
||||
@@ -3,7 +3,7 @@ import Graph from 'components/Graph';
|
||||
import Spinner from 'components/Spinner';
|
||||
import dayjs from 'dayjs';
|
||||
import getStep from 'lib/getStep';
|
||||
import React, { memo, useEffect, useRef } from 'react';
|
||||
import React, { memo, useEffect, useMemo, useRef } from 'react';
|
||||
import { connect, useSelector } from 'react-redux';
|
||||
import { bindActionCreators, Dispatch } from 'redux';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
@@ -77,6 +77,18 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [getLogsAggregate, maxTime, minTime, liveTail]);
|
||||
|
||||
const graphData = useMemo(() => {
|
||||
return {
|
||||
labels: logsAggregate.map((s) => new Date(s.timestamp / 1000000)),
|
||||
datasets: [
|
||||
{
|
||||
data: logsAggregate.map((s) => s.value),
|
||||
backgroundColor: blue[4],
|
||||
},
|
||||
],
|
||||
};
|
||||
}, [logsAggregate]);
|
||||
|
||||
return (
|
||||
<Container>
|
||||
{isLoadingAggregate ? (
|
||||
@@ -84,15 +96,7 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element {
|
||||
) : (
|
||||
<Graph
|
||||
name="usage"
|
||||
data={{
|
||||
labels: logsAggregate.map((s) => new Date(s.timestamp / 1000000)),
|
||||
datasets: [
|
||||
{
|
||||
data: logsAggregate.map((s) => s.value),
|
||||
backgroundColor: blue[4],
|
||||
},
|
||||
],
|
||||
}}
|
||||
data={graphData}
|
||||
type="bar"
|
||||
containerHeight="100%"
|
||||
animate
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
import { Button, Row } from 'antd';
|
||||
import React from 'react';
|
||||
|
||||
import { QueryFields } from './utils';
|
||||
|
||||
interface SearchFieldsActionBarProps {
|
||||
fieldsQuery: QueryFields[][];
|
||||
applyUpdate: () => void;
|
||||
clearFilters: () => void;
|
||||
}
|
||||
|
||||
export function SearchFieldsActionBar({
|
||||
fieldsQuery,
|
||||
applyUpdate,
|
||||
clearFilters,
|
||||
}: SearchFieldsActionBarProps): JSX.Element | null {
|
||||
if (fieldsQuery.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Row style={{ justifyContent: 'flex-end', paddingRight: '2.4rem' }}>
|
||||
<Button
|
||||
type="default"
|
||||
onClick={clearFilters}
|
||||
style={{ marginRight: '1rem' }}
|
||||
>
|
||||
Clear Filter
|
||||
</Button>
|
||||
<Button type="primary" onClick={applyUpdate}>
|
||||
Apply
|
||||
</Button>
|
||||
</Row>
|
||||
);
|
||||
}
|
||||
export default SearchFieldsActionBar;
|
||||
@@ -12,19 +12,15 @@ import {
|
||||
QueryOperatorsMultiVal,
|
||||
QueryOperatorsSingleVal,
|
||||
} from 'lib/logql/tokens';
|
||||
import { flatten } from 'lodash-es';
|
||||
import React, { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import React, { useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
import { SearchFieldsProps } from '..';
|
||||
import FieldKey from '../FieldKey';
|
||||
import { QueryFieldContainer } from '../styles';
|
||||
import { createParsedQueryStructure } from '../utils';
|
||||
import { QueryFields } from '../utils';
|
||||
import { Container, QueryWrapper } from './styles';
|
||||
import { hashCode, parseQuery } from './utils';
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
@@ -68,7 +64,6 @@ function QueryField({
|
||||
const {
|
||||
fields: { selected },
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
|
||||
const getFieldType = (inputKey: string): string => {
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const selectedField of selected) {
|
||||
@@ -147,9 +142,12 @@ function QueryField({
|
||||
/>
|
||||
) : (
|
||||
<Input
|
||||
onChange={(e): void => handleChange(2, e.target.value)}
|
||||
onChange={(e): void => {
|
||||
handleChange(2, e.target.value);
|
||||
}}
|
||||
style={{ width: '100%' }}
|
||||
defaultValue={query[2] && query[2].value}
|
||||
value={query[2] && query[2].value}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
@@ -165,85 +163,78 @@ function QueryField({
|
||||
}
|
||||
|
||||
interface QueryConditionFieldProps {
|
||||
query: { value: string | string[]; type: string }[];
|
||||
query: QueryFields;
|
||||
queryIndex: number;
|
||||
onUpdate: (arg0: unknown, arg1: number) => void;
|
||||
}
|
||||
|
||||
export type Query = { value: string | string[]; type: string }[];
|
||||
|
||||
export interface QueryBuilderProps {
|
||||
keyPrefix: string;
|
||||
onDropDownToggleHandler: (value: boolean) => VoidFunction;
|
||||
fieldsQuery: QueryFields[][];
|
||||
setFieldsQuery: (q: QueryFields[][]) => void;
|
||||
}
|
||||
|
||||
function QueryBuilder({
|
||||
updateParsedQuery,
|
||||
keyPrefix,
|
||||
fieldsQuery,
|
||||
setFieldsQuery,
|
||||
onDropDownToggleHandler,
|
||||
}: SearchFieldsProps): JSX.Element {
|
||||
const {
|
||||
searchFilter: { parsedQuery },
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
|
||||
const keyPrefixRef = useRef(hashCode(JSON.stringify(parsedQuery)));
|
||||
const [keyPrefix, setKeyPrefix] = useState(keyPrefixRef.current);
|
||||
const generatedQueryStructure = createParsedQueryStructure(
|
||||
parsedQuery as never[],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const incomingHashCode = hashCode(JSON.stringify(parsedQuery));
|
||||
if (incomingHashCode !== keyPrefixRef.current) {
|
||||
keyPrefixRef.current = incomingHashCode;
|
||||
setKeyPrefix(incomingHashCode);
|
||||
}
|
||||
}, [parsedQuery]);
|
||||
|
||||
}: QueryBuilderProps): JSX.Element {
|
||||
const handleUpdate = (query: Query, queryIndex: number): void => {
|
||||
const updatedParsedQuery = generatedQueryStructure;
|
||||
updatedParsedQuery[queryIndex] = parseQuery(query) as never;
|
||||
|
||||
const flatParsedQuery = flatten(updatedParsedQuery).filter((q) => q.value);
|
||||
keyPrefixRef.current = hashCode(JSON.stringify(flatParsedQuery));
|
||||
updateParsedQuery(flatParsedQuery);
|
||||
const updated = [...fieldsQuery];
|
||||
updated[queryIndex] = query as never; // parseQuery(query) as never;
|
||||
setFieldsQuery(updated);
|
||||
};
|
||||
|
||||
const handleDelete = (queryIndex: number): void => {
|
||||
const updatedParsedQuery = generatedQueryStructure;
|
||||
updatedParsedQuery.splice(queryIndex - 1, 2);
|
||||
const updated = [...fieldsQuery];
|
||||
if (queryIndex !== 0) updated.splice(queryIndex - 1, 2);
|
||||
else updated.splice(queryIndex, 2);
|
||||
|
||||
const flatParsedQuery = flatten(updatedParsedQuery).filter((q) => q.value);
|
||||
keyPrefixRef.current = v4();
|
||||
updateParsedQuery(flatParsedQuery);
|
||||
setFieldsQuery(updated);
|
||||
};
|
||||
|
||||
const QueryUI = (): JSX.Element | JSX.Element[] =>
|
||||
generatedQueryStructure.map((query, idx) => {
|
||||
if (Array.isArray(query))
|
||||
return (
|
||||
const QueryUI = (
|
||||
fieldsQuery: QueryFields[][],
|
||||
): JSX.Element | JSX.Element[] => {
|
||||
const result: JSX.Element[] = [];
|
||||
fieldsQuery.forEach((query, idx) => {
|
||||
if (Array.isArray(query) && query.length > 1) {
|
||||
result.push(
|
||||
<QueryField
|
||||
key={keyPrefix + idx}
|
||||
query={query as never}
|
||||
queryIndex={idx}
|
||||
onUpdate={handleUpdate as never}
|
||||
onDelete={handleDelete}
|
||||
/>
|
||||
/>,
|
||||
);
|
||||
|
||||
return (
|
||||
<div key={keyPrefix + idx}>
|
||||
<QueryConditionField
|
||||
query={query}
|
||||
queryIndex={idx}
|
||||
onUpdate={handleUpdate as never}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
result.push(
|
||||
<div key={keyPrefix + idx}>
|
||||
<QueryConditionField
|
||||
query={Array.isArray(query) ? query[0] : query}
|
||||
queryIndex={idx}
|
||||
onUpdate={handleUpdate as never}
|
||||
/>
|
||||
</div>,
|
||||
);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<Container isMargin={generatedQueryStructure.length === 0}>
|
||||
<Container isMargin={fieldsQuery.length === 0}>
|
||||
<CategoryHeading>LOG QUERY BUILDER</CategoryHeading>
|
||||
<CloseSquareOutlined onClick={onDropDownToggleHandler(false)} />
|
||||
</Container>
|
||||
|
||||
<QueryWrapper>{QueryUI()}</QueryWrapper>
|
||||
<QueryWrapper key={keyPrefix}>{QueryUI(fieldsQuery)}</QueryWrapper>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -21,17 +21,3 @@ export const parseQuery = (queries: Query): Query => {
|
||||
}
|
||||
return queries;
|
||||
};
|
||||
|
||||
export const hashCode = (s: string): string => {
|
||||
if (!s) {
|
||||
return '0';
|
||||
}
|
||||
return `${Math.abs(
|
||||
s.split('').reduce((a, b) => {
|
||||
// eslint-disable-next-line no-bitwise, no-param-reassign
|
||||
a = (a << 5) - a + b.charCodeAt(0);
|
||||
// eslint-disable-next-line no-bitwise
|
||||
return a & a;
|
||||
}, 0),
|
||||
)}`;
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@ import { Button } from 'antd';
|
||||
import CategoryHeading from 'components/Logs/CategoryHeading';
|
||||
import map from 'lodash-es/map';
|
||||
import React from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ADD_SEARCH_FIELD_QUERY_STRING } from 'types/actions/logs';
|
||||
// import { ADD_SEARCH_FIELD_QUERY_STRING } from 'types/actions/logs';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
import FieldKey from './FieldKey';
|
||||
@@ -12,15 +12,15 @@ import FieldKey from './FieldKey';
|
||||
interface SuggestedItemProps {
|
||||
name: string;
|
||||
type: string;
|
||||
applySuggestion: (name: string) => void;
|
||||
}
|
||||
function SuggestedItem({ name, type }: SuggestedItemProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
function SuggestedItem({
|
||||
name,
|
||||
type,
|
||||
applySuggestion,
|
||||
}: SuggestedItemProps): JSX.Element {
|
||||
const addSuggestedField = (): void => {
|
||||
dispatch({
|
||||
type: ADD_SEARCH_FIELD_QUERY_STRING,
|
||||
payload: name,
|
||||
});
|
||||
applySuggestion(name);
|
||||
};
|
||||
return (
|
||||
<Button
|
||||
@@ -33,7 +33,11 @@ function SuggestedItem({ name, type }: SuggestedItemProps): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
function Suggestions(): JSX.Element {
|
||||
interface SuggestionsProps {
|
||||
applySuggestion: (name: string) => void;
|
||||
}
|
||||
|
||||
function Suggestions({ applySuggestion }: SuggestionsProps): JSX.Element {
|
||||
const {
|
||||
fields: { selected },
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
@@ -47,6 +51,7 @@ function Suggestions(): JSX.Element {
|
||||
key={JSON.stringify(field)}
|
||||
name={field.name}
|
||||
type={field.type}
|
||||
applySuggestion={applySuggestion}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
||||
@@ -1,8 +1,21 @@
|
||||
import React from 'react';
|
||||
import { notification } from 'antd';
|
||||
import { flatten } from 'lodash-es';
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
import { SearchFieldsActionBar } from './ActionBar';
|
||||
import QueryBuilder from './QueryBuilder/QueryBuilder';
|
||||
import Suggestions from './Suggestions';
|
||||
import { QueryFields } from './utils';
|
||||
import {
|
||||
createParsedQueryStructure,
|
||||
fieldsQueryIsvalid,
|
||||
hashCode,
|
||||
initQueryKOVPair,
|
||||
prepareConditionOperator,
|
||||
QueryFields,
|
||||
} from './utils';
|
||||
|
||||
export interface SearchFieldsProps {
|
||||
updateParsedQuery: (query: QueryFields[]) => void;
|
||||
@@ -13,13 +26,85 @@ function SearchFields({
|
||||
updateParsedQuery,
|
||||
onDropDownToggleHandler,
|
||||
}: SearchFieldsProps): JSX.Element {
|
||||
const {
|
||||
searchFilter: { parsedQuery },
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
|
||||
const [fieldsQuery, setFieldsQuery] = useState(
|
||||
createParsedQueryStructure([...parsedQuery] as never[]),
|
||||
);
|
||||
|
||||
const keyPrefixRef = useRef(hashCode(JSON.stringify(fieldsQuery)));
|
||||
|
||||
useEffect(() => {
|
||||
const updatedFieldsQuery = createParsedQueryStructure([
|
||||
...parsedQuery,
|
||||
] as never[]);
|
||||
setFieldsQuery(updatedFieldsQuery);
|
||||
const incomingHashCode = hashCode(JSON.stringify(updatedFieldsQuery));
|
||||
if (incomingHashCode !== keyPrefixRef.current) {
|
||||
keyPrefixRef.current = incomingHashCode;
|
||||
}
|
||||
}, [parsedQuery]);
|
||||
|
||||
const addSuggestedField = useCallback(
|
||||
(name: string): void => {
|
||||
if (!name) {
|
||||
return;
|
||||
}
|
||||
|
||||
const query = [...fieldsQuery];
|
||||
|
||||
if (fieldsQuery.length > 0) {
|
||||
query.push([prepareConditionOperator()]);
|
||||
}
|
||||
|
||||
const newField: QueryFields[] = [];
|
||||
initQueryKOVPair(name).forEach((q) => newField.push(q));
|
||||
|
||||
query.push(newField);
|
||||
keyPrefixRef.current = hashCode(JSON.stringify(query));
|
||||
setFieldsQuery(query);
|
||||
},
|
||||
[fieldsQuery, setFieldsQuery],
|
||||
);
|
||||
|
||||
const applyUpdate = useCallback((): void => {
|
||||
const flatParsedQuery = flatten(fieldsQuery);
|
||||
|
||||
if (!fieldsQueryIsvalid(flatParsedQuery)) {
|
||||
notification.error({
|
||||
message: 'Please enter a valid criteria for each of the selected fields',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
keyPrefixRef.current = hashCode(JSON.stringify(flatParsedQuery));
|
||||
updateParsedQuery(flatParsedQuery);
|
||||
onDropDownToggleHandler(false)();
|
||||
}, [onDropDownToggleHandler, fieldsQuery, updateParsedQuery]);
|
||||
|
||||
const clearFilters = useCallback((): void => {
|
||||
keyPrefixRef.current = hashCode(JSON.stringify([]));
|
||||
updateParsedQuery([]);
|
||||
onDropDownToggleHandler(false)();
|
||||
}, [onDropDownToggleHandler, updateParsedQuery]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<QueryBuilder
|
||||
key={keyPrefixRef.current}
|
||||
keyPrefix={keyPrefixRef.current}
|
||||
onDropDownToggleHandler={onDropDownToggleHandler}
|
||||
updateParsedQuery={updateParsedQuery}
|
||||
fieldsQuery={fieldsQuery}
|
||||
setFieldsQuery={setFieldsQuery}
|
||||
/>
|
||||
<Suggestions />
|
||||
<SearchFieldsActionBar
|
||||
applyUpdate={applyUpdate}
|
||||
clearFilters={clearFilters}
|
||||
fieldsQuery={fieldsQuery}
|
||||
/>
|
||||
<Suggestions applySuggestion={addSuggestedField} />
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,11 +2,30 @@
|
||||
// @ts-ignore
|
||||
// @ts-nocheck
|
||||
|
||||
import { QueryTypes, QueryOperatorsSingleVal } from 'lib/logql/tokens';
|
||||
import { QueryTypes, ConditionalOperators, ValidTypeSequence, ValidTypeValue } from 'lib/logql/tokens';
|
||||
|
||||
export interface QueryFields {
|
||||
type: keyof typeof QueryTypes;
|
||||
value: string;
|
||||
value: string | string[];
|
||||
}
|
||||
|
||||
|
||||
export function fieldsQueryIsvalid(queryFields: QueryFields[]): boolean {
|
||||
let lastOp: string;
|
||||
let result = true;
|
||||
queryFields.forEach((q, idx)=> {
|
||||
|
||||
if (!q.value || q.value === null || q.value === '') result = false;
|
||||
|
||||
if (Array.isArray(q.value) && q.value.length === 0 ) result = false;
|
||||
|
||||
const nextOp = idx < queryFields.length ? queryFields[idx+1]: undefined;
|
||||
if (!ValidTypeSequence(lastOp?.type, q?.type, nextOp?.type)) result = false
|
||||
|
||||
if (!ValidTypeValue(lastOp?.value, q.value)) result = false;
|
||||
lastOp = q;
|
||||
});
|
||||
return result
|
||||
}
|
||||
|
||||
export const queryKOVPair = (): QueryFields[] => [
|
||||
@@ -23,6 +42,29 @@ export const queryKOVPair = (): QueryFields[] => [
|
||||
value: null,
|
||||
},
|
||||
];
|
||||
|
||||
export const initQueryKOVPair = (name?: string = null, op?: string = null , value?: string | string[] = null ): QueryFields[] => [
|
||||
{
|
||||
type: QueryTypes.QUERY_KEY,
|
||||
value: name,
|
||||
},
|
||||
{
|
||||
type: QueryTypes.QUERY_OPERATOR,
|
||||
value: op,
|
||||
},
|
||||
{
|
||||
type: QueryTypes.QUERY_VALUE,
|
||||
value: value,
|
||||
},
|
||||
];
|
||||
|
||||
export const prepareConditionOperator = (op?: string = ConditionalOperators.AND): QueryFields => {
|
||||
return {
|
||||
type: QueryTypes.CONDITIONAL_OPERATOR,
|
||||
value: op,
|
||||
}
|
||||
}
|
||||
|
||||
export const createParsedQueryStructure = (parsedQuery = []) => {
|
||||
if (!parsedQuery.length) {
|
||||
return parsedQuery;
|
||||
@@ -64,3 +106,17 @@ export const createParsedQueryStructure = (parsedQuery = []) => {
|
||||
});
|
||||
return structuredArray;
|
||||
};
|
||||
|
||||
export const hashCode = (s: string): string => {
|
||||
if (!s) {
|
||||
return '0';
|
||||
}
|
||||
return `${Math.abs(
|
||||
s.split('').reduce((a, b) => {
|
||||
// eslint-disable-next-line no-bitwise, no-param-reassign
|
||||
a = (a << 5) - a + b.charCodeAt(0);
|
||||
// eslint-disable-next-line no-bitwise
|
||||
return a & a;
|
||||
}, 0),
|
||||
)}`;
|
||||
};
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import { Input, InputRef, Popover } from 'antd';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import getStep from 'lib/getStep';
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { debounce } from 'lodash-es';
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { connect, useDispatch, useSelector } from 'react-redux';
|
||||
import { bindActionCreators, Dispatch } from 'redux';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
@@ -9,7 +16,12 @@ import { getLogs } from 'store/actions/logs/getLogs';
|
||||
import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { TOGGLE_LIVE_TAIL } from 'types/actions/logs';
|
||||
import {
|
||||
FLUSH_LOGS,
|
||||
SET_LOADING,
|
||||
SET_LOADING_AGGREGATE,
|
||||
TOGGLE_LIVE_TAIL,
|
||||
} from 'types/actions/logs';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
@@ -22,12 +34,31 @@ function SearchFilter({
|
||||
getLogsAggregate,
|
||||
}: SearchFilterProps): JSX.Element {
|
||||
const {
|
||||
queryString,
|
||||
updateParsedQuery,
|
||||
updateQueryString,
|
||||
queryString,
|
||||
} = useSearchParser();
|
||||
const [searchText, setSearchText] = useState(queryString);
|
||||
const [showDropDown, setShowDropDown] = useState(false);
|
||||
const searchRef = useRef<InputRef>(null);
|
||||
const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector<
|
||||
AppState,
|
||||
ILogsReducer
|
||||
>((state) => state.logs);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
// keep sync with url queryString
|
||||
useEffect(() => {
|
||||
setSearchText(queryString);
|
||||
}, [queryString]);
|
||||
|
||||
const debouncedupdateQueryString = useMemo(
|
||||
() => debounce(updateQueryString, 300),
|
||||
[updateQueryString],
|
||||
);
|
||||
|
||||
const onDropDownToggleHandler = useCallback(
|
||||
(value: boolean) => (): void => {
|
||||
@@ -36,17 +67,6 @@ function SearchFilter({
|
||||
[],
|
||||
);
|
||||
|
||||
const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector<
|
||||
AppState,
|
||||
ILogsReducer
|
||||
>((state) => state.logs);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const handleSearch = useCallback(
|
||||
(customQuery) => {
|
||||
if (liveTail === 'PLAYING') {
|
||||
@@ -54,6 +74,9 @@ function SearchFilter({
|
||||
type: TOGGLE_LIVE_TAIL,
|
||||
payload: 'PAUSED',
|
||||
});
|
||||
dispatch({
|
||||
type: FLUSH_LOGS,
|
||||
});
|
||||
setTimeout(
|
||||
() =>
|
||||
dispatch({
|
||||
@@ -103,9 +126,32 @@ function SearchFilter({
|
||||
const urlQueryString = urlQuery.get('q');
|
||||
|
||||
useEffect(() => {
|
||||
handleSearch(urlQueryString || '');
|
||||
dispatch({
|
||||
type: SET_LOADING,
|
||||
payload: true,
|
||||
});
|
||||
dispatch({
|
||||
type: SET_LOADING_AGGREGATE,
|
||||
payload: true,
|
||||
});
|
||||
|
||||
const debouncedHandleSearch = debounce(handleSearch, 600);
|
||||
|
||||
debouncedHandleSearch(urlQueryString || '');
|
||||
|
||||
return (): void => {
|
||||
debouncedHandleSearch.cancel();
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [urlQueryString, maxTime, minTime]);
|
||||
}, [
|
||||
urlQueryString,
|
||||
maxTime,
|
||||
minTime,
|
||||
idEnd,
|
||||
idStart,
|
||||
logLinesPerPage,
|
||||
dispatch,
|
||||
]);
|
||||
|
||||
return (
|
||||
<Container>
|
||||
@@ -132,12 +178,13 @@ function SearchFilter({
|
||||
<Input.Search
|
||||
ref={searchRef}
|
||||
placeholder="Search Filter"
|
||||
value={queryString}
|
||||
value={searchText}
|
||||
onChange={(e): void => {
|
||||
updateQueryString(e.target.value);
|
||||
const { value } = e.target;
|
||||
setSearchText(value);
|
||||
debouncedupdateQueryString(value);
|
||||
}}
|
||||
allowClear
|
||||
onSearch={handleSearch}
|
||||
/>
|
||||
</Popover>
|
||||
</Container>
|
||||
@@ -145,12 +192,8 @@ function SearchFilter({
|
||||
}
|
||||
|
||||
interface DispatchProps {
|
||||
getLogs: (
|
||||
props: Parameters<typeof getLogs>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
getLogsAggregate: (
|
||||
props: Parameters<typeof getLogsAggregate>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
getLogs: typeof getLogs;
|
||||
getLogsAggregate: typeof getLogsAggregate;
|
||||
}
|
||||
|
||||
type SearchFilterProps = DispatchProps;
|
||||
|
||||
@@ -23,12 +23,10 @@ export function useSearchParser(): {
|
||||
|
||||
const updateQueryString = useCallback(
|
||||
(updatedQueryString) => {
|
||||
if (updatedQueryString) {
|
||||
history.push({
|
||||
pathname: history.location.pathname,
|
||||
search: updatedQueryString ? `?q=${updatedQueryString}` : '',
|
||||
});
|
||||
}
|
||||
history.replace({
|
||||
pathname: history.location.pathname,
|
||||
search: updatedQueryString ? `?q=${updatedQueryString}` : '',
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: SET_SEARCH_QUERY_STRING,
|
||||
|
||||
@@ -0,0 +1,65 @@
|
||||
import {
|
||||
IMetricsBuilderFormula,
|
||||
IMetricsBuilderQuery,
|
||||
IQueryBuilderTagFilterItems,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
import {
|
||||
getQueryBuilderQueries,
|
||||
getQueryBuilderQuerieswithFormula,
|
||||
} from './MetricsPageQueriesFactory';
|
||||
|
||||
export const databaseCallsRPS = ({
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
}: DatabaseCallsRPSProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => {
|
||||
const metricName = 'signoz_db_latency_count';
|
||||
const groupBy = ['db_system'];
|
||||
|
||||
return getQueryBuilderQueries({
|
||||
metricName,
|
||||
legend,
|
||||
groupBy,
|
||||
servicename,
|
||||
tagFilterItems,
|
||||
});
|
||||
};
|
||||
|
||||
export const databaseCallsAvgDuration = ({
|
||||
servicename,
|
||||
tagFilterItems,
|
||||
}: DatabaseCallProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => {
|
||||
const metricNameA = 'signoz_db_latency_sum';
|
||||
const metricNameB = 'signoz_db_latency_count';
|
||||
const expression = 'A/B';
|
||||
const legendFormula = '';
|
||||
const legend = '';
|
||||
const disabled = true;
|
||||
|
||||
return getQueryBuilderQuerieswithFormula({
|
||||
servicename,
|
||||
legend,
|
||||
disabled,
|
||||
tagFilterItems,
|
||||
metricNameA,
|
||||
metricNameB,
|
||||
expression,
|
||||
legendFormula,
|
||||
});
|
||||
};
|
||||
|
||||
interface DatabaseCallsRPSProps extends DatabaseCallProps {
|
||||
legend: '{{db_system}}';
|
||||
}
|
||||
|
||||
interface DatabaseCallProps {
|
||||
servicename: string | undefined;
|
||||
tagFilterItems: IQueryBuilderTagFilterItems[] | [];
|
||||
}
|
||||
@@ -0,0 +1,126 @@
|
||||
import {
|
||||
IMetricsBuilderFormula,
|
||||
IMetricsBuilderQuery,
|
||||
IQueryBuilderTagFilterItems,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
import {
|
||||
getQueryBuilderQueries,
|
||||
getQueryBuilderQuerieswithAdditionalItems,
|
||||
getQueryBuilderQuerieswithFormula,
|
||||
} from './MetricsPageQueriesFactory';
|
||||
|
||||
const groupBy = ['address'];
|
||||
|
||||
export const externalCallErrorPercent = ({
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
}: ExternalCallDurationByAddressProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => {
|
||||
const metricNameA = 'signoz_external_call_latency_count';
|
||||
const metricNameB = 'signoz_external_call_latency_count';
|
||||
const additionalItems = {
|
||||
id: '',
|
||||
key: 'status_code',
|
||||
op: 'IN',
|
||||
value: ['STATUS_CODE_ERROR'],
|
||||
};
|
||||
|
||||
const legendFormula = 'External Call Error Percentage';
|
||||
const expression = 'A*100/B';
|
||||
const disabled = true;
|
||||
return getQueryBuilderQuerieswithAdditionalItems({
|
||||
metricNameA,
|
||||
metricNameB,
|
||||
additionalItems,
|
||||
servicename,
|
||||
legend,
|
||||
groupBy,
|
||||
disabled,
|
||||
tagFilterItems,
|
||||
expression,
|
||||
legendFormula,
|
||||
});
|
||||
};
|
||||
|
||||
export const externalCallDuration = ({
|
||||
servicename,
|
||||
tagFilterItems,
|
||||
}: ExternalCallProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => {
|
||||
const metricNameA = 'signoz_external_call_latency_sum';
|
||||
const metricNameB = 'signoz_external_call_latency_count';
|
||||
const expression = 'A/B';
|
||||
const legendFormula = 'Average Duration';
|
||||
const legend = '';
|
||||
const disabled = true;
|
||||
|
||||
return getQueryBuilderQuerieswithFormula({
|
||||
servicename,
|
||||
legend,
|
||||
disabled,
|
||||
tagFilterItems,
|
||||
metricNameA,
|
||||
metricNameB,
|
||||
expression,
|
||||
legendFormula,
|
||||
});
|
||||
};
|
||||
|
||||
export const externalCallRpsByAddress = ({
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
}: ExternalCallDurationByAddressProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => {
|
||||
const metricName = 'signoz_external_call_latency_count';
|
||||
return getQueryBuilderQueries({
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
metricName,
|
||||
groupBy,
|
||||
});
|
||||
};
|
||||
|
||||
export const externalCallDurationByAddress = ({
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
}: ExternalCallDurationByAddressProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => {
|
||||
const metricNameA = 'signoz_external_call_latency_sum';
|
||||
const metricNameB = 'signoz_external_call_latency_count';
|
||||
const expression = 'A/B';
|
||||
const legendFormula = legend;
|
||||
const disabled = true;
|
||||
return getQueryBuilderQuerieswithFormula({
|
||||
servicename,
|
||||
legend,
|
||||
disabled,
|
||||
tagFilterItems,
|
||||
metricNameA,
|
||||
metricNameB,
|
||||
expression,
|
||||
legendFormula,
|
||||
groupBy,
|
||||
});
|
||||
};
|
||||
|
||||
interface ExternalCallDurationByAddressProps extends ExternalCallProps {
|
||||
legend: '{{address}}';
|
||||
}
|
||||
|
||||
export interface ExternalCallProps {
|
||||
servicename: string | undefined;
|
||||
tagFilterItems: IQueryBuilderTagFilterItems[];
|
||||
}
|
||||
@@ -1,238 +0,0 @@
|
||||
import {
|
||||
IMetricsBuilderFormula,
|
||||
IMetricsBuilderQuery,
|
||||
IQueryBuilderTagFilterItems,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
export const externalCallErrorPercent = ({
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
}: ExternalCallDurationByAddressProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => ({
|
||||
formulas: [
|
||||
{
|
||||
name: 'F1',
|
||||
expression: 'A*100/B',
|
||||
disabled: false,
|
||||
legend: 'External Call Error Percentage',
|
||||
},
|
||||
],
|
||||
queryBuilder: [
|
||||
{
|
||||
name: 'A',
|
||||
aggregateOperator: 18,
|
||||
metricName: 'signoz_external_call_latency_count',
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
{
|
||||
id: '',
|
||||
key: 'status_code',
|
||||
op: 'IN',
|
||||
value: ['STATUS_CODE_ERROR'],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
|
||||
op: 'AND',
|
||||
},
|
||||
groupBy: ['address'],
|
||||
legend,
|
||||
disabled: false,
|
||||
},
|
||||
{
|
||||
name: 'B',
|
||||
aggregateOperator: 18,
|
||||
metricName: 'signoz_external_call_latency_count',
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
groupBy: ['address'],
|
||||
legend,
|
||||
disabled: false,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
export const externalCallDuration = ({
|
||||
servicename,
|
||||
tagFilterItems,
|
||||
}: ExternalCallProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => ({
|
||||
formulas: [
|
||||
{
|
||||
disabled: false,
|
||||
expression: 'A/B',
|
||||
name: 'F1',
|
||||
legend: 'Average Duration',
|
||||
},
|
||||
],
|
||||
queryBuilder: [
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled: true,
|
||||
groupBy: [],
|
||||
legend: '',
|
||||
metricName: 'signoz_external_call_latency_sum',
|
||||
name: 'A',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled: true,
|
||||
groupBy: [],
|
||||
legend: '',
|
||||
metricName: 'signoz_external_call_latency_count',
|
||||
name: 'B',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
export const externalCallRpsByAddress = ({
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
}: ExternalCallDurationByAddressProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => ({
|
||||
formulas: [],
|
||||
queryBuilder: [
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled: false,
|
||||
groupBy: ['address'],
|
||||
legend,
|
||||
metricName: 'signoz_external_call_latency_count',
|
||||
name: 'A',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
export const externalCallDurationByAddress = ({
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
}: ExternalCallDurationByAddressProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => ({
|
||||
formulas: [
|
||||
{
|
||||
disabled: false,
|
||||
expression: 'A/B',
|
||||
name: 'F1',
|
||||
legend,
|
||||
},
|
||||
],
|
||||
queryBuilder: [
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled: false,
|
||||
groupBy: ['address'],
|
||||
legend,
|
||||
metricName: 'signoz_external_call_latency_sum',
|
||||
name: 'A',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled: false,
|
||||
groupBy: ['address'],
|
||||
legend,
|
||||
metricName: 'signoz_external_call_latency_count',
|
||||
name: 'B',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
interface ExternalCallDurationByAddressProps extends ExternalCallProps {
|
||||
legend: '{{address}}';
|
||||
}
|
||||
|
||||
interface ExternalCallProps {
|
||||
servicename: string | undefined;
|
||||
tagFilterItems: IQueryBuilderTagFilterItems[] | [];
|
||||
}
|
||||
@@ -0,0 +1,206 @@
|
||||
import {
|
||||
IMetricsBuilderFormula,
|
||||
IMetricsBuilderQuery,
|
||||
IQueryBuilderTagFilterItems,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
import { ExternalCallProps } from './ExternalQueries';
|
||||
|
||||
export const getQueryBuilderQueries = ({
|
||||
metricName,
|
||||
groupBy,
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
}: BuilderQueriesProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => ({
|
||||
formulas: [],
|
||||
queryBuilder: [
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled: false,
|
||||
groupBy,
|
||||
legend,
|
||||
metricName,
|
||||
name: 'A',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
export const getQueryBuilderQuerieswithFormula = ({
|
||||
servicename,
|
||||
legend,
|
||||
disabled,
|
||||
tagFilterItems,
|
||||
metricNameA,
|
||||
metricNameB,
|
||||
groupBy,
|
||||
expression,
|
||||
legendFormula,
|
||||
}: BuilderQuerieswithFormulaProps): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => {
|
||||
return {
|
||||
formulas: [
|
||||
{
|
||||
disabled: false,
|
||||
expression,
|
||||
name: 'F1',
|
||||
legend: legendFormula,
|
||||
},
|
||||
],
|
||||
queryBuilder: [
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled,
|
||||
groupBy,
|
||||
legend,
|
||||
metricName: metricNameA,
|
||||
name: 'A',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled,
|
||||
groupBy,
|
||||
legend,
|
||||
metricName: metricNameB,
|
||||
name: 'B',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
export const getQueryBuilderQuerieswithAdditionalItems = ({
|
||||
servicename,
|
||||
legend,
|
||||
disabled,
|
||||
tagFilterItems,
|
||||
metricNameA,
|
||||
metricNameB,
|
||||
groupBy,
|
||||
expression,
|
||||
legendFormula,
|
||||
additionalItems,
|
||||
}: BuilderQuerieswithAdditionalItems): {
|
||||
formulas: IMetricsBuilderFormula[];
|
||||
queryBuilder: IMetricsBuilderQuery[];
|
||||
} => ({
|
||||
formulas: [
|
||||
{
|
||||
disabled: false,
|
||||
expression,
|
||||
name: 'F1',
|
||||
legend: legendFormula,
|
||||
},
|
||||
],
|
||||
queryBuilder: [
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled,
|
||||
groupBy,
|
||||
legend,
|
||||
metricName: metricNameA,
|
||||
name: 'A',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
additionalItems,
|
||||
...tagFilterItems,
|
||||
],
|
||||
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
{
|
||||
aggregateOperator: 18,
|
||||
disabled,
|
||||
groupBy,
|
||||
legend,
|
||||
metricName: metricNameB,
|
||||
name: 'B',
|
||||
reduceTo: 1,
|
||||
tagFilters: {
|
||||
items: [
|
||||
{
|
||||
id: '',
|
||||
key: 'service_name',
|
||||
op: 'IN',
|
||||
value: [`${servicename}`],
|
||||
},
|
||||
...tagFilterItems,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
interface BuilderQueriesProps extends ExternalCallProps {
|
||||
metricName: string;
|
||||
groupBy?: string[];
|
||||
legend: string;
|
||||
}
|
||||
|
||||
interface BuilderQuerieswithFormulaProps extends ExternalCallProps {
|
||||
metricNameA: string;
|
||||
metricNameB: string;
|
||||
legend: string;
|
||||
disabled: boolean;
|
||||
groupBy?: string[];
|
||||
expression: string;
|
||||
legendFormula: string;
|
||||
}
|
||||
|
||||
interface BuilderQuerieswithAdditionalItems
|
||||
extends BuilderQuerieswithFormulaProps {
|
||||
additionalItems: IQueryBuilderTagFilterItems;
|
||||
}
|
||||
@@ -1,19 +1,30 @@
|
||||
import { Col } from 'antd';
|
||||
import FullView from 'container/GridGraphLayout/Graph/FullView';
|
||||
import React from 'react';
|
||||
import FullView from 'container/GridGraphLayout/Graph/FullView/index.metricsBuilder';
|
||||
import {
|
||||
databaseCallsAvgDuration,
|
||||
databaseCallsRPS,
|
||||
} from 'container/MetricsApplication/MetricsPageQueries/DBCallQueries';
|
||||
import { resourceAttributesToTagFilterItems } from 'lib/resourceAttributes';
|
||||
import React, { useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { PromQLWidgets } from 'types/api/dashboard/getAll';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import MetricReducer from 'types/reducer/metrics';
|
||||
|
||||
import { Card, GraphContainer, GraphTitle, Row } from '../styles';
|
||||
|
||||
function DBCall({ getWidget }: DBCallProps): JSX.Element {
|
||||
function DBCall({ getWidgetQueryBuilder }: DBCallProps): JSX.Element {
|
||||
const { servicename } = useParams<{ servicename?: string }>();
|
||||
const { resourceAttributePromQLQuery } = useSelector<AppState, MetricReducer>(
|
||||
const { resourceAttributeQueries } = useSelector<AppState, MetricReducer>(
|
||||
(state) => state.metrics,
|
||||
);
|
||||
const tagFilterItems = useMemo(
|
||||
() => resourceAttributesToTagFilterItems(resourceAttributeQueries) || [],
|
||||
[resourceAttributeQueries],
|
||||
);
|
||||
const legend = '{{db_system}}';
|
||||
|
||||
return (
|
||||
<Row gutter={24}>
|
||||
<Col span={12}>
|
||||
@@ -23,12 +34,16 @@ function DBCall({ getWidget }: DBCallProps): JSX.Element {
|
||||
<FullView
|
||||
name="database_call_rps"
|
||||
fullViewOptions={false}
|
||||
widget={getWidget([
|
||||
{
|
||||
query: `sum(rate(signoz_db_latency_count{service_name="${servicename}"${resourceAttributePromQLQuery}}[5m])) by (db_system)`,
|
||||
legend: '{{db_system}}',
|
||||
},
|
||||
])}
|
||||
widget={getWidgetQueryBuilder({
|
||||
queryType: 1,
|
||||
promQL: [],
|
||||
metricsBuilder: databaseCallsRPS({
|
||||
servicename,
|
||||
legend,
|
||||
tagFilterItems,
|
||||
}),
|
||||
clickHouse: [],
|
||||
})}
|
||||
yAxisUnit="reqps"
|
||||
/>
|
||||
</GraphContainer>
|
||||
@@ -42,12 +57,15 @@ function DBCall({ getWidget }: DBCallProps): JSX.Element {
|
||||
<FullView
|
||||
name="database_call_avg_duration"
|
||||
fullViewOptions={false}
|
||||
widget={getWidget([
|
||||
{
|
||||
query: `sum(rate(signoz_db_latency_sum{service_name="${servicename}"${resourceAttributePromQLQuery}}[5m]))/sum(rate(signoz_db_latency_count{service_name="${servicename}"${resourceAttributePromQLQuery}}[5m]))`,
|
||||
legend: '',
|
||||
},
|
||||
])}
|
||||
widget={getWidgetQueryBuilder({
|
||||
queryType: 1,
|
||||
promQL: [],
|
||||
metricsBuilder: databaseCallsAvgDuration({
|
||||
servicename,
|
||||
tagFilterItems,
|
||||
}),
|
||||
clickHouse: [],
|
||||
})}
|
||||
yAxisUnit="ms"
|
||||
/>
|
||||
</GraphContainer>
|
||||
@@ -58,7 +76,7 @@ function DBCall({ getWidget }: DBCallProps): JSX.Element {
|
||||
}
|
||||
|
||||
interface DBCallProps {
|
||||
getWidget: (query: PromQLWidgets['query']) => PromQLWidgets;
|
||||
getWidgetQueryBuilder: (query: Widgets['query']) => Widgets;
|
||||
}
|
||||
|
||||
export default DBCall;
|
||||
|
||||
@@ -58,7 +58,7 @@ function OverViewTab(): JSX.Element {
|
||||
}
|
||||
|
||||
function DbCallTab(): JSX.Element {
|
||||
return <DBCall getWidget={getWidget} />;
|
||||
return <DBCall getWidgetQueryBuilder={getWidgetQueryBuilder} />;
|
||||
}
|
||||
|
||||
function ExternalTab(): JSX.Element {
|
||||
|
||||
@@ -12,7 +12,7 @@ import AppReducer from 'types/reducer/app';
|
||||
import { NameInput } from '../styles';
|
||||
|
||||
function UpdateName(): JSX.Element {
|
||||
const { user, role, org } = useSelector<AppState, AppReducer>(
|
||||
const { user, role, org, userFlags } = useSelector<AppState, AppReducer>(
|
||||
(state) => state.app,
|
||||
);
|
||||
const { t } = useTranslation();
|
||||
@@ -47,6 +47,7 @@ function UpdateName(): JSX.Element {
|
||||
ROLE: role || 'ADMIN',
|
||||
orgId: org[0].id,
|
||||
orgName: org[0].name,
|
||||
userFlags: userFlags || {},
|
||||
},
|
||||
});
|
||||
} else {
|
||||
|
||||
@@ -48,9 +48,8 @@ function AddTags({ tags, setTags }: AddTagsProps): JSX.Element {
|
||||
{tags.map((tag, index) => {
|
||||
if (editInputIndex === index) {
|
||||
return (
|
||||
<Col lg={4}>
|
||||
<Col key={tag} lg={4}>
|
||||
<Input
|
||||
key={tag}
|
||||
size="small"
|
||||
value={editInputValue}
|
||||
onChangeHandler={(event): void =>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Button, Space, Typography } from 'antd';
|
||||
import React from 'react';
|
||||
|
||||
import { IconContainer, TitleContainer } from './styles';
|
||||
import { IconContainer, TitleContainer, TitleText } from './styles';
|
||||
|
||||
function Row({
|
||||
onClickHandler,
|
||||
@@ -16,8 +16,8 @@ function Row({
|
||||
<IconContainer>{Icon}</IconContainer>
|
||||
|
||||
<TitleContainer>
|
||||
<Typography>{title}</Typography>
|
||||
<Typography.Text italic>{subTitle}</Typography.Text>
|
||||
<TitleText>{title}</TitleText>
|
||||
<Typography.Text>{subTitle}</Typography.Text>
|
||||
</TitleContainer>
|
||||
|
||||
<Button disabled={isDisabled} onClick={onClickHandler} type="primary">
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { Typography } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const TitleContainer = styled.div`
|
||||
@@ -9,3 +10,7 @@ export const TitleContainer = styled.div`
|
||||
export const IconContainer = styled.div`
|
||||
min-width: 70px;
|
||||
`;
|
||||
|
||||
export const TitleText = styled(Typography)`
|
||||
font-weight: bold;
|
||||
`;
|
||||
|
||||
@@ -1,34 +1,51 @@
|
||||
import { GoogleSquareFilled, KeyOutlined } from '@ant-design/icons';
|
||||
import { Space, Typography } from 'antd';
|
||||
import React, { useCallback } from 'react';
|
||||
import { Typography } from 'antd';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import { AuthDomain, GOOGLE_AUTH, SAML } from 'types/api/SAML/listDomain';
|
||||
|
||||
import Row, { RowProps } from './Row';
|
||||
import { RowContainer } from './styles';
|
||||
import { RowContainer, RowSpace } from './styles';
|
||||
|
||||
function Create({
|
||||
ssoMethod,
|
||||
assignSsoMethod,
|
||||
setIsSettingsOpen,
|
||||
setIsEditModalOpen,
|
||||
}: CreateProps): JSX.Element {
|
||||
const onConfigureClickHandler = useCallback(() => {
|
||||
console.log('Configure Clicked');
|
||||
}, []);
|
||||
|
||||
const onEditSAMLHandler = useCallback(() => {
|
||||
const onGoogleAuthClickHandler = useCallback(() => {
|
||||
assignSsoMethod(GOOGLE_AUTH);
|
||||
setIsSettingsOpen(false);
|
||||
setIsEditModalOpen(true);
|
||||
}, [setIsSettingsOpen, setIsEditModalOpen]);
|
||||
}, [assignSsoMethod, setIsSettingsOpen, setIsEditModalOpen]);
|
||||
|
||||
const onEditSAMLHandler = useCallback(() => {
|
||||
assignSsoMethod(SAML);
|
||||
setIsSettingsOpen(false);
|
||||
setIsEditModalOpen(true);
|
||||
}, [assignSsoMethod, setIsSettingsOpen, setIsEditModalOpen]);
|
||||
|
||||
const ConfigureButtonText = useMemo(() => {
|
||||
switch (ssoMethod) {
|
||||
case GOOGLE_AUTH:
|
||||
return 'Edit Google Auth';
|
||||
case SAML:
|
||||
return 'Edit SAML';
|
||||
default:
|
||||
return 'Get Started';
|
||||
}
|
||||
}, [ssoMethod]);
|
||||
|
||||
const data: RowProps[] = [
|
||||
{
|
||||
buttonText: 'Configure',
|
||||
buttonText: ConfigureButtonText,
|
||||
Icon: <GoogleSquareFilled style={{ fontSize: '37px' }} />,
|
||||
title: 'Google Apps Authentication',
|
||||
subTitle: 'Let members sign-in with a Google account',
|
||||
onClickHandler: onConfigureClickHandler,
|
||||
isDisabled: true,
|
||||
onClickHandler: onGoogleAuthClickHandler,
|
||||
isDisabled: false,
|
||||
},
|
||||
{
|
||||
buttonText: 'Edit SAML',
|
||||
buttonText: ConfigureButtonText,
|
||||
Icon: <KeyOutlined style={{ fontSize: '37px' }} />,
|
||||
onClickHandler: onEditSAMLHandler,
|
||||
subTitle: 'Azure, Active Directory, Okta or your custom SAML 2.0 solution',
|
||||
@@ -45,7 +62,7 @@ function Create({
|
||||
</Typography.Text>
|
||||
|
||||
<RowContainer>
|
||||
<Space direction="vertical">
|
||||
<RowSpace direction="vertical">
|
||||
{data.map((rowData) => (
|
||||
<Row
|
||||
Icon={rowData.Icon}
|
||||
@@ -57,13 +74,15 @@ function Create({
|
||||
isDisabled={rowData.isDisabled}
|
||||
/>
|
||||
))}
|
||||
</Space>
|
||||
</RowSpace>
|
||||
</RowContainer>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
interface CreateProps {
|
||||
ssoMethod: AuthDomain['ssoType'];
|
||||
assignSsoMethod: (value: AuthDomain['ssoType']) => void;
|
||||
setIsSettingsOpen: (value: boolean) => void;
|
||||
setIsEditModalOpen: (value: boolean) => void;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { Space } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const RowContainer = styled.div`
|
||||
@@ -5,3 +6,9 @@ export const RowContainer = styled.div`
|
||||
flex-direction: column;
|
||||
margin-top: 1rem;
|
||||
`;
|
||||
|
||||
export const RowSpace = styled(Space)`
|
||||
&&& {
|
||||
row-gap: 1.5rem !important;
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
import { InfoCircleFilled } from '@ant-design/icons';
|
||||
import { Card, Form, Input, Space, Typography } from 'antd';
|
||||
import React from 'react';
|
||||
|
||||
function EditGoogleAuth(): JSX.Element {
|
||||
return (
|
||||
<>
|
||||
<Typography.Paragraph>
|
||||
Enter OAuth 2.0 credentials obtained from the Google API Console below. Read
|
||||
the{' '}
|
||||
<a
|
||||
href="https://signoz.io/docs/userguide/sso-authentication"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
docs
|
||||
</a>{' '}
|
||||
for more information.
|
||||
</Typography.Paragraph>
|
||||
<Form.Item
|
||||
label="Client ID"
|
||||
name={['googleAuthConfig', 'clientId']}
|
||||
rules={[{ required: true, message: 'Please input Google Auth Client ID!' }]}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
label="Client Secret"
|
||||
name={['googleAuthConfig', 'clientSecret']}
|
||||
rules={[
|
||||
{ required: true, message: 'Please input Google Auth Client Secret!' },
|
||||
]}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<Card style={{ marginBottom: '1rem' }}>
|
||||
<Space>
|
||||
<InfoCircleFilled />
|
||||
<Typography>
|
||||
Google OAuth2 won’t be enabled unless you enter all the attributes above
|
||||
</Typography>
|
||||
</Space>
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default EditGoogleAuth;
|
||||
@@ -0,0 +1,44 @@
|
||||
import { InfoCircleFilled } from '@ant-design/icons';
|
||||
import { Card, Form, Input, Space, Typography } from 'antd';
|
||||
import React from 'react';
|
||||
|
||||
function EditSAML(): JSX.Element {
|
||||
return (
|
||||
<>
|
||||
<Form.Item
|
||||
label="SAML ACS URL"
|
||||
name={['samlConfig', 'samlIdp']}
|
||||
rules={[{ required: true, message: 'Please input your ACS URL!' }]}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
label="SAML Entity ID"
|
||||
name={['samlConfig', 'samlEntity']}
|
||||
rules={[{ required: true, message: 'Please input your Entity Id!' }]}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
rules={[{ required: true, message: 'Please input your Certificate!' }]}
|
||||
label="SAML X.509 Certificate"
|
||||
name={['samlConfig', 'samlCert']}
|
||||
>
|
||||
<Input.TextArea rows={4} />
|
||||
</Form.Item>
|
||||
|
||||
<Card style={{ marginBottom: '1rem' }}>
|
||||
<Space>
|
||||
<InfoCircleFilled />
|
||||
<Typography>
|
||||
SAML won’t be enabled unless you enter all the attributes above
|
||||
</Typography>
|
||||
</Space>
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default EditSAML;
|
||||
@@ -0,0 +1,40 @@
|
||||
import {
|
||||
AuthDomain,
|
||||
GOOGLE_AUTH,
|
||||
GoogleAuthConfig,
|
||||
isGoogleAuthConfig,
|
||||
isSAMLConfig,
|
||||
SAML,
|
||||
SAMLConfig,
|
||||
} from 'types/api/SAML/listDomain';
|
||||
|
||||
export function parseSamlForm(
|
||||
current: AuthDomain,
|
||||
formValues: AuthDomain,
|
||||
): SAMLConfig | undefined {
|
||||
if (current?.ssoType === SAML && isSAMLConfig(formValues?.samlConfig)) {
|
||||
return {
|
||||
...current.samlConfig,
|
||||
...formValues?.samlConfig,
|
||||
};
|
||||
}
|
||||
|
||||
return current.samlConfig;
|
||||
}
|
||||
|
||||
export function parseGoogleAuthForm(
|
||||
current: AuthDomain,
|
||||
formValues: AuthDomain,
|
||||
): GoogleAuthConfig | undefined {
|
||||
if (
|
||||
current?.ssoType === GOOGLE_AUTH &&
|
||||
isGoogleAuthConfig(formValues?.googleAuthConfig)
|
||||
) {
|
||||
return {
|
||||
...current.googleAuthConfig,
|
||||
...formValues?.googleAuthConfig,
|
||||
};
|
||||
}
|
||||
|
||||
return current.googleAuthConfig;
|
||||
}
|
||||
@@ -1,27 +1,33 @@
|
||||
import { InfoCircleFilled } from '@ant-design/icons';
|
||||
import {
|
||||
Button,
|
||||
Card,
|
||||
Form,
|
||||
Input,
|
||||
notification,
|
||||
Space,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { Button, Form, notification, Space } from 'antd';
|
||||
import { useForm } from 'antd/lib/form/Form';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { SAMLDomain } from 'types/api/SAML/listDomain';
|
||||
import { AuthDomain, GOOGLE_AUTH, SAML } from 'types/api/SAML/listDomain';
|
||||
|
||||
function EditSaml({
|
||||
certificate,
|
||||
entityId,
|
||||
url,
|
||||
import EditGoogleAuth from './EditGoogleAuth';
|
||||
import EditSAML from './EditSAML';
|
||||
import { parseGoogleAuthForm, parseSamlForm } from './helpers';
|
||||
|
||||
// renderFormInputs selectively renders form fields depending upon
|
||||
// sso type
|
||||
const renderFormInputs = (
|
||||
record: AuthDomain | undefined,
|
||||
): JSX.Element | undefined => {
|
||||
switch (record?.ssoType) {
|
||||
case GOOGLE_AUTH:
|
||||
return <EditGoogleAuth />;
|
||||
case SAML:
|
||||
default:
|
||||
return <EditSAML />;
|
||||
}
|
||||
};
|
||||
|
||||
function EditSSO({
|
||||
onRecordUpdateHandler,
|
||||
record,
|
||||
setEditModalOpen,
|
||||
}: EditFormProps): JSX.Element {
|
||||
const [form] = useForm<EditFormProps>();
|
||||
const [form] = useForm<AuthDomain>();
|
||||
|
||||
const { t } = useTranslation(['common']);
|
||||
|
||||
@@ -32,12 +38,9 @@ function EditSaml({
|
||||
await onRecordUpdateHandler({
|
||||
...record,
|
||||
ssoEnabled: true,
|
||||
samlConfig: {
|
||||
...record.samlConfig,
|
||||
samlCert: values.certificate,
|
||||
samlEntity: values.entityId,
|
||||
samlIdp: values.url,
|
||||
},
|
||||
ssoType: record.ssoType,
|
||||
samlConfig: parseSamlForm(record, values),
|
||||
googleAuthConfig: parseGoogleAuthForm(record, values),
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
@@ -55,7 +58,7 @@ function EditSaml({
|
||||
return (
|
||||
<Form
|
||||
name="basic"
|
||||
initialValues={{ certificate, entityId, url }}
|
||||
initialValues={record}
|
||||
onFinishFailed={(error): void => {
|
||||
error.errorFields.forEach(({ errors }) => {
|
||||
notification.error({
|
||||
@@ -70,39 +73,7 @@ function EditSaml({
|
||||
autoComplete="off"
|
||||
form={form}
|
||||
>
|
||||
<Form.Item
|
||||
label="SAML ACS URL"
|
||||
name="url"
|
||||
rules={[{ required: true, message: 'Please input your ACS URL!' }]}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
label="SAML Entity ID"
|
||||
name="entityId"
|
||||
rules={[{ required: true, message: 'Please input your Entity Id!' }]}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
rules={[{ required: true, message: 'Please input your Certificate!' }]}
|
||||
label="SAML X.509 Certificate"
|
||||
name="certificate"
|
||||
>
|
||||
<Input.TextArea rows={4} />
|
||||
</Form.Item>
|
||||
|
||||
<Card style={{ marginBottom: '1rem' }}>
|
||||
<Space>
|
||||
<InfoCircleFilled />
|
||||
<Typography>
|
||||
SAML won’t be enabled unless you enter all the attributes above
|
||||
</Typography>
|
||||
</Space>
|
||||
</Card>
|
||||
|
||||
{renderFormInputs(record)}
|
||||
<Space
|
||||
style={{ width: '100%', justifyContent: 'flex-end' }}
|
||||
align="end"
|
||||
@@ -120,12 +91,9 @@ function EditSaml({
|
||||
}
|
||||
|
||||
interface EditFormProps {
|
||||
url: string;
|
||||
entityId: string;
|
||||
certificate: string;
|
||||
onRecordUpdateHandler: (record: SAMLDomain) => Promise<boolean>;
|
||||
record: SAMLDomain;
|
||||
onRecordUpdateHandler: (record: AuthDomain) => Promise<boolean>;
|
||||
record: AuthDomain;
|
||||
setEditModalOpen: (open: boolean) => void;
|
||||
}
|
||||
|
||||
export default EditSaml;
|
||||
export default EditSSO;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Switch } from 'antd';
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import { SAMLDomain } from 'types/api/SAML/listDomain';
|
||||
import { AuthDomain } from 'types/api/SAML/listDomain';
|
||||
|
||||
import { getIsValidCertificate } from '../utils';
|
||||
import { isSSOConfigValid } from '../helpers';
|
||||
|
||||
function SwitchComponent({
|
||||
isDefaultChecked,
|
||||
@@ -25,10 +25,7 @@ function SwitchComponent({
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
const isInValidVerificate = useMemo(
|
||||
() => !getIsValidCertificate(record?.samlConfig),
|
||||
[record],
|
||||
);
|
||||
const isInValidVerificate = useMemo(() => !isSSOConfigValid(record), [record]);
|
||||
|
||||
return (
|
||||
<Switch
|
||||
@@ -42,8 +39,8 @@ function SwitchComponent({
|
||||
|
||||
interface SwitchComponentProps {
|
||||
isDefaultChecked: boolean;
|
||||
onRecordUpdateHandler: (record: SAMLDomain) => Promise<boolean>;
|
||||
record: SAMLDomain;
|
||||
onRecordUpdateHandler: (record: AuthDomain) => Promise<boolean>;
|
||||
record: AuthDomain;
|
||||
}
|
||||
|
||||
export default SwitchComponent;
|
||||
|
||||
@@ -0,0 +1,74 @@
|
||||
import { AuthDomain, SAML } from 'types/api/SAML/listDomain';
|
||||
|
||||
import { isSSOConfigValid } from './helpers';
|
||||
|
||||
const inValidCase: AuthDomain['samlConfig'][] = [
|
||||
{
|
||||
samlCert: '',
|
||||
samlEntity: '',
|
||||
samlIdp: '',
|
||||
},
|
||||
{
|
||||
samlCert: '',
|
||||
samlEntity: '',
|
||||
samlIdp: 'asd',
|
||||
},
|
||||
{
|
||||
samlCert: 'sample certificate',
|
||||
samlEntity: '',
|
||||
samlIdp: '',
|
||||
},
|
||||
{
|
||||
samlCert: 'sample cert',
|
||||
samlEntity: 'sample entity',
|
||||
samlIdp: '',
|
||||
},
|
||||
];
|
||||
|
||||
const validCase: AuthDomain['samlConfig'][] = [
|
||||
{
|
||||
samlCert: 'sample cert',
|
||||
samlEntity: 'sample entity',
|
||||
samlIdp: 'sample idp',
|
||||
},
|
||||
];
|
||||
|
||||
describe('Utils', () => {
|
||||
inValidCase.forEach((config) => {
|
||||
it('should return invalid saml config', () => {
|
||||
expect(
|
||||
isSSOConfigValid({
|
||||
id: 'test-0',
|
||||
name: 'test',
|
||||
orgId: '32ed234',
|
||||
ssoEnabled: true,
|
||||
ssoType: SAML,
|
||||
samlConfig: {
|
||||
samlCert: config?.samlCert || '',
|
||||
samlEntity: config?.samlEntity || '',
|
||||
samlIdp: config?.samlIdp || '',
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
validCase.forEach((config) => {
|
||||
it('should return invalid saml config', () => {
|
||||
expect(
|
||||
isSSOConfigValid({
|
||||
id: 'test-0',
|
||||
name: 'test',
|
||||
orgId: '32ed234',
|
||||
ssoEnabled: true,
|
||||
ssoType: SAML,
|
||||
samlConfig: {
|
||||
samlCert: config?.samlCert || '',
|
||||
samlEntity: config?.samlEntity || '',
|
||||
samlIdp: config?.samlIdp || '',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,45 @@
|
||||
import { AuthDomain, GOOGLE_AUTH, SAML } from 'types/api/SAML/listDomain';
|
||||
|
||||
export const ConfigureSsoButtonText = (
|
||||
ssoType: AuthDomain['ssoType'],
|
||||
): string => {
|
||||
switch (ssoType) {
|
||||
case SAML:
|
||||
return 'Edit SAML';
|
||||
case GOOGLE_AUTH:
|
||||
return 'Edit Google Auth';
|
||||
default:
|
||||
return 'Configure SSO';
|
||||
}
|
||||
};
|
||||
|
||||
export const EditModalTitleText = (
|
||||
ssoType: AuthDomain['ssoType'] | undefined,
|
||||
): string => {
|
||||
switch (ssoType) {
|
||||
case SAML:
|
||||
return 'Edit SAML Configuration';
|
||||
case GOOGLE_AUTH:
|
||||
return 'Edit Google Authentication';
|
||||
default:
|
||||
return 'Configure SSO';
|
||||
}
|
||||
};
|
||||
|
||||
export const isSSOConfigValid = (domain: AuthDomain): boolean => {
|
||||
switch (domain.ssoType) {
|
||||
case SAML:
|
||||
return (
|
||||
domain.samlConfig?.samlCert?.length !== 0 &&
|
||||
domain.samlConfig?.samlEntity?.length !== 0 &&
|
||||
domain.samlConfig?.samlIdp?.length !== 0
|
||||
);
|
||||
case GOOGLE_AUTH:
|
||||
return (
|
||||
domain.googleAuthConfig?.clientId?.length !== 0 &&
|
||||
domain.googleAuthConfig?.clientSecret?.length !== 0
|
||||
);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@@ -1,9 +1,10 @@
|
||||
import { LockTwoTone } from '@ant-design/icons';
|
||||
import { Button, Modal, notification, Space, Table } from 'antd';
|
||||
import { Button, Modal, notification, Space, Table, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import deleteDomain from 'api/SAML/deleteDomain';
|
||||
import listAllDomain from 'api/SAML/listAllDomain';
|
||||
import updateDomain from 'api/SAML/updateDomain';
|
||||
import TextToolTip from 'components/TextToolTip';
|
||||
import { SIGNOZ_UPGRADE_PLAN_URL } from 'constants/app';
|
||||
import { FeatureKeys } from 'constants/featureKeys';
|
||||
import useFeatureFlag from 'hooks/useFeatureFlag';
|
||||
@@ -12,25 +13,27 @@ import { useTranslation } from 'react-i18next';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SAMLDomain } from 'types/api/SAML/listDomain';
|
||||
import { AuthDomain } from 'types/api/SAML/listDomain';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
import AddDomain from './AddDomain';
|
||||
import Create from './Create';
|
||||
import EditSaml from './Edit';
|
||||
import EditSSO from './Edit';
|
||||
import { ConfigureSsoButtonText, EditModalTitleText } from './helpers';
|
||||
import { ColumnWithTooltip } from './styles';
|
||||
import SwitchComponent from './Switch';
|
||||
|
||||
function AuthDomains(): JSX.Element {
|
||||
const { t } = useTranslation(['common', 'organizationsettings']);
|
||||
const [isSettingsOpen, setIsSettingsOpen] = useState<boolean>(false);
|
||||
const { org } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
const [currentDomain, setCurrentDomain] = useState<SAMLDomain>();
|
||||
const [currentDomain, setCurrentDomain] = useState<AuthDomain>();
|
||||
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
|
||||
|
||||
const SSOFlag = useFeatureFlag(FeatureKeys.SSO);
|
||||
|
||||
const notEntripriseData: SAMLDomain[] = [
|
||||
const notEntripriseData: AuthDomain[] = [
|
||||
{
|
||||
id: v4(),
|
||||
name: '',
|
||||
@@ -53,6 +56,13 @@ function AuthDomains(): JSX.Element {
|
||||
enabled: org !== null,
|
||||
});
|
||||
|
||||
const assignSsoMethod = useCallback(
|
||||
(typ: AuthDomain['ssoType']): void => {
|
||||
setCurrentDomain({ ...currentDomain, ssoType: typ } as AuthDomain);
|
||||
},
|
||||
[currentDomain, setCurrentDomain],
|
||||
);
|
||||
|
||||
const onCloseHandler = useCallback(
|
||||
(func: React.Dispatch<React.SetStateAction<boolean>>) => (): void => {
|
||||
func(false);
|
||||
@@ -61,7 +71,7 @@ function AuthDomains(): JSX.Element {
|
||||
);
|
||||
|
||||
const onRecordUpdateHandler = useCallback(
|
||||
async (record: SAMLDomain): Promise<boolean> => {
|
||||
async (record: AuthDomain): Promise<boolean> => {
|
||||
try {
|
||||
const response = await updateDomain(record);
|
||||
|
||||
@@ -104,15 +114,20 @@ function AuthDomains(): JSX.Element {
|
||||
);
|
||||
|
||||
const onEditHandler = useCallback(
|
||||
(record: SAMLDomain) => (): void => {
|
||||
onOpenHandler(setIsEditModalOpen)();
|
||||
(record: AuthDomain) => (): void => {
|
||||
if (!record.ssoType) {
|
||||
onOpenHandler(setIsSettingsOpen)();
|
||||
} else {
|
||||
onOpenHandler(setIsEditModalOpen)();
|
||||
}
|
||||
|
||||
setCurrentDomain(record);
|
||||
},
|
||||
[onOpenHandler],
|
||||
);
|
||||
|
||||
const onDeleteHandler = useCallback(
|
||||
(record: SAMLDomain) => (): void => {
|
||||
(record: AuthDomain) => (): void => {
|
||||
Modal.confirm({
|
||||
centered: true,
|
||||
title: t('delete_domain', {
|
||||
@@ -146,17 +161,27 @@ function AuthDomains(): JSX.Element {
|
||||
window.open(SIGNOZ_UPGRADE_PLAN_URL);
|
||||
}, []);
|
||||
|
||||
const columns: ColumnsType<SAMLDomain> = [
|
||||
const columns: ColumnsType<AuthDomain> = [
|
||||
{
|
||||
title: 'Domain',
|
||||
dataIndex: 'name',
|
||||
key: 'name',
|
||||
},
|
||||
{
|
||||
title: 'Enforce SSO',
|
||||
title: (
|
||||
<ColumnWithTooltip>
|
||||
<Typography>Enforce SSO</Typography>
|
||||
<TextToolTip
|
||||
{...{
|
||||
text: `When enabled, this option restricts users to SSO based authentication. For more information, click `,
|
||||
url: 'https://signoz.io/docs/userguide/sso-authentication/',
|
||||
}}
|
||||
/>{' '}
|
||||
</ColumnWithTooltip>
|
||||
),
|
||||
dataIndex: 'ssoEnabled',
|
||||
key: 'ssoEnabled',
|
||||
render: (value: boolean, record: SAMLDomain): JSX.Element => {
|
||||
render: (value: boolean, record: AuthDomain): JSX.Element => {
|
||||
if (!SSOFlag) {
|
||||
return (
|
||||
<Button
|
||||
@@ -182,7 +207,7 @@ function AuthDomains(): JSX.Element {
|
||||
title: '',
|
||||
dataIndex: 'description',
|
||||
key: 'description',
|
||||
render: (_, record: SAMLDomain): JSX.Element => {
|
||||
render: (_, record: AuthDomain): JSX.Element => {
|
||||
if (!SSOFlag) {
|
||||
return (
|
||||
<Button
|
||||
@@ -197,7 +222,7 @@ function AuthDomains(): JSX.Element {
|
||||
|
||||
return (
|
||||
<Button type="link" onClick={onEditHandler(record)}>
|
||||
Edit SSO
|
||||
{ConfigureSsoButtonText(record.ssoType)}
|
||||
</Button>
|
||||
);
|
||||
},
|
||||
@@ -235,12 +260,14 @@ function AuthDomains(): JSX.Element {
|
||||
footer={null}
|
||||
>
|
||||
<Create
|
||||
ssoMethod={currentDomain?.ssoType as AuthDomain['ssoType']}
|
||||
assignSsoMethod={assignSsoMethod}
|
||||
setIsEditModalOpen={setIsEditModalOpen}
|
||||
setIsSettingsOpen={setIsSettingsOpen}
|
||||
/>
|
||||
</Modal>
|
||||
<Table
|
||||
rowKey={(record: SAMLDomain): string => record.name + v4()}
|
||||
rowKey={(record: AuthDomain): string => record.name + v4()}
|
||||
dataSource={!SSOFlag ? notEntripriseData : []}
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
@@ -262,6 +289,8 @@ function AuthDomains(): JSX.Element {
|
||||
footer={null}
|
||||
>
|
||||
<Create
|
||||
ssoMethod={currentDomain?.ssoType as AuthDomain['ssoType']}
|
||||
assignSsoMethod={assignSsoMethod}
|
||||
setIsSettingsOpen={setIsSettingsOpen}
|
||||
setIsEditModalOpen={setIsEditModalOpen}
|
||||
/>
|
||||
@@ -270,18 +299,15 @@ function AuthDomains(): JSX.Element {
|
||||
<Modal
|
||||
visible={isEditModalOpen}
|
||||
centered
|
||||
title="Configure SAML"
|
||||
title={EditModalTitleText(currentDomain?.ssoType)}
|
||||
onCancel={onCloseHandler(setIsEditModalOpen)}
|
||||
destroyOnClose
|
||||
style={{ minWidth: '600px' }}
|
||||
footer={null}
|
||||
>
|
||||
<EditSaml
|
||||
certificate={currentDomain?.samlConfig?.samlCert || ''}
|
||||
entityId={currentDomain?.samlConfig?.samlEntity || ''}
|
||||
url={currentDomain?.samlConfig?.samlIdp || ''}
|
||||
<EditSSO
|
||||
onRecordUpdateHandler={onRecordUpdateHandler}
|
||||
record={currentDomain as SAMLDomain}
|
||||
record={currentDomain as AuthDomain}
|
||||
setEditModalOpen={setIsEditModalOpen}
|
||||
/>
|
||||
</Modal>
|
||||
@@ -294,7 +320,7 @@ function AuthDomains(): JSX.Element {
|
||||
loading={isLoading}
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
rowKey={(record: SAMLDomain): string => record.name + v4()}
|
||||
rowKey={(record: AuthDomain): string => record.name + v4()}
|
||||
/>
|
||||
</Space>
|
||||
</>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { Row } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const Container = styled.div`
|
||||
@@ -5,3 +6,9 @@ export const Container = styled.div`
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
`;
|
||||
|
||||
export const ColumnWithTooltip = styled(Row)`
|
||||
&&& > article {
|
||||
margin-right: 0.5rem;
|
||||
}
|
||||
`;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user