Compare commits
21 Commits
v0.69.0-cl
...
feat/timez
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b48eb81c6 | ||
|
|
504811546e | ||
|
|
e012f10395 | ||
|
|
676e32ea09 | ||
|
|
28045772b8 | ||
|
|
b1120c7d16 | ||
|
|
55c9205aad | ||
|
|
5b4f423f9f | ||
|
|
cc376ce6a8 | ||
|
|
20e00c597a | ||
|
|
ff7da5c05b | ||
|
|
14ccadaeb5 | ||
|
|
984f3829dd | ||
|
|
31a9ead2fc | ||
|
|
65ce8eaf14 | ||
|
|
daec491c79 | ||
|
|
49e29567f4 | ||
|
|
8edd5fe7d6 | ||
|
|
178a3153dd | ||
|
|
e7f1b27a5b | ||
|
|
dbf0f236be |
1
.github/workflows/build.yaml
vendored
1
.github/workflows/build.yaml
vendored
@@ -3,6 +3,7 @@ name: build-pipeline
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- main
|
||||
- release/v*
|
||||
|
||||
|
||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@@ -3,7 +3,7 @@ name: "Update PR labels and Block PR until related docs are shipped for the feat
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
types: [opened, edited, labeled, unlabeled]
|
||||
|
||||
permissions:
|
||||
|
||||
2
.github/workflows/e2e-k3s.yaml
vendored
2
.github/workflows/e2e-k3s.yaml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
kubectl create ns sample-application
|
||||
|
||||
# apply hotrod k8s manifest file
|
||||
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/main/sample-apps/hotrod/hotrod.yaml
|
||||
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/develop/sample-apps/hotrod/hotrod.yaml
|
||||
|
||||
# wait for all deployments in sample-application namespace to be READY
|
||||
kubectl -n sample-application get deploy --output name | xargs -r -n1 -t kubectl -n sample-application rollout status --timeout=300s
|
||||
|
||||
5
.github/workflows/jest-coverage-changes.yml
vendored
5
.github/workflows/jest-coverage-changes.yml
vendored
@@ -2,8 +2,7 @@ name: Jest Coverage - changed files
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
branches: develop
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -12,7 +11,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/heads/main"
|
||||
ref: "refs/heads/develop"
|
||||
token: ${{ secrets.GITHUB_TOKEN }} # Provide the GitHub token for authentication
|
||||
|
||||
- name: Fetch branch
|
||||
|
||||
36
.github/workflows/prereleaser.yaml
vendored
36
.github/workflows/prereleaser.yaml
vendored
@@ -1,36 +0,0 @@
|
||||
name: prereleaser
|
||||
|
||||
on:
|
||||
# schedule every wednesday 9:30 AM UTC (3pm IST)
|
||||
schedule:
|
||||
- cron: '30 9 * * 3'
|
||||
|
||||
# allow manual triggering of the workflow by a maintainer
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_type:
|
||||
description: "Type of the release"
|
||||
type: choice
|
||||
required: true
|
||||
options:
|
||||
- 'patch'
|
||||
- 'minor'
|
||||
- 'major'
|
||||
|
||||
jobs:
|
||||
verify:
|
||||
uses: signoz/primus.workflows/.github/workflows/github-verify.yaml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GITHUB_TEAM_NAME: releaser
|
||||
GITHUB_MEMBER_NAME: ${{ github.actor }}
|
||||
signoz:
|
||||
if: ${{ always() && (needs.verify.result == 'success' || github.event.name == 'schedule') }}
|
||||
uses: signoz/primus.workflows/.github/workflows/releaser.yaml@main
|
||||
secrets: inherit
|
||||
needs: [verify]
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
PROJECT_NAME: signoz
|
||||
RELEASE_TYPE: ${{ inputs.release_type || 'minor' }}
|
||||
12
.github/workflows/push.yaml
vendored
12
.github/workflows/push.yaml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
tags:
|
||||
- v*
|
||||
|
||||
@@ -57,17 +58,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Create .env file
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
- name: Setup golang
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
|
||||
32
.github/workflows/releaser.yaml
vendored
32
.github/workflows/releaser.yaml
vendored
@@ -1,32 +0,0 @@
|
||||
name: releaser
|
||||
|
||||
on:
|
||||
# trigger on new latest release
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
detect:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
release_type: ${{ steps.find.outputs.release_type }}
|
||||
steps:
|
||||
- id: find
|
||||
name: find
|
||||
run: |
|
||||
release_tag=${{ github.event.release.tag_name }}
|
||||
patch_number=$(echo $release_tag | awk -F. '{print $3}')
|
||||
release_type="minor"
|
||||
if [[ $patch_number -ne 0 ]]; then
|
||||
release_type="patch"
|
||||
fi
|
||||
echo "release_type=${release_type}" >> "$GITHUB_OUTPUT"
|
||||
charts:
|
||||
uses: signoz/primus.workflows/.github/workflows/github-trigger.yaml@main
|
||||
secrets: inherit
|
||||
needs: [detect]
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GITHUB_REPOSITORY_NAME: charts
|
||||
GITHUB_EVENT_NAME: prereleaser
|
||||
GITHUB_EVENT_PAYLOAD: "{\"release_type\": \"${{ needs.detect.outputs.release_type }}\"}"
|
||||
1
.github/workflows/sonar.yml
vendored
1
.github/workflows/sonar.yml
vendored
@@ -3,6 +3,7 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
defaults:
|
||||
|
||||
6
.github/workflows/staging-deployment.yaml
vendored
6
.github/workflows/staging-deployment.yaml
vendored
@@ -1,12 +1,12 @@
|
||||
name: staging-deployment
|
||||
# Trigger deployment only on push to main branch
|
||||
# Trigger deployment only on push to develop branch
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy latest main branch to staging
|
||||
name: Deploy latest develop branch to staging
|
||||
runs-on: ubuntu-latest
|
||||
environment: staging
|
||||
permissions:
|
||||
|
||||
2
.github/workflows/testing-deployment.yaml
vendored
2
.github/workflows/testing-deployment.yaml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout main
|
||||
git checkout develop
|
||||
git pull
|
||||
# This is added to include the scenerio when new commit in PR is force-pushed
|
||||
git branch -D ${GITHUB_BRANCH}
|
||||
|
||||
@@ -339,7 +339,7 @@ to make SigNoz UI available at [localhost:3301](http://localhost:3301)
|
||||
**5.1.1 To install the HotROD sample app:**
|
||||
|
||||
```bash
|
||||
curl -sL https://github.com/SigNoz/signoz/raw/main/sample-apps/hotrod/hotrod-install.sh \
|
||||
curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-install.sh \
|
||||
| HELM_RELEASE=my-release SIGNOZ_NAMESPACE=platform bash
|
||||
```
|
||||
|
||||
@@ -362,7 +362,7 @@ kubectl -n sample-application run strzal --image=djbingham/curl \
|
||||
**5.1.4 To delete the HotROD sample app:**
|
||||
|
||||
```bash
|
||||
curl -sL https://github.com/SigNoz/signoz/raw/main/sample-apps/hotrod/hotrod-delete.sh \
|
||||
curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-delete.sh \
|
||||
| HOTROD_NAMESPACE=sample-application bash
|
||||
```
|
||||
|
||||
|
||||
10
Makefile
10
Makefile
@@ -8,7 +8,6 @@ BUILD_HASH ?= $(shell git rev-parse --short HEAD)
|
||||
BUILD_TIME ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
BUILD_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
DEV_LICENSE_SIGNOZ_IO ?= https://staging-license.signoz.io/api/v1
|
||||
ZEUS_URL ?= https://api.signoz.cloud
|
||||
DEV_BUILD ?= "" # set to any non-empty value to enable dev build
|
||||
|
||||
# Internal variables or constants.
|
||||
@@ -34,9 +33,8 @@ buildHash=${PACKAGE}/pkg/query-service/version.buildHash
|
||||
buildTime=${PACKAGE}/pkg/query-service/version.buildTime
|
||||
gitBranch=${PACKAGE}/pkg/query-service/version.gitBranch
|
||||
licenseSignozIo=${PACKAGE}/ee/query-service/constants.LicenseSignozIo
|
||||
zeusURL=${PACKAGE}/ee/query-service/constants.ZeusURL
|
||||
|
||||
LD_FLAGS=-X ${buildHash}=${BUILD_HASH} -X ${buildTime}=${BUILD_TIME} -X ${buildVersion}=${BUILD_VERSION} -X ${gitBranch}=${BUILD_BRANCH} -X ${zeusURL}=${ZEUS_URL}
|
||||
LD_FLAGS=-X ${buildHash}=${BUILD_HASH} -X ${buildTime}=${BUILD_TIME} -X ${buildVersion}=${BUILD_VERSION} -X ${gitBranch}=${BUILD_BRANCH}
|
||||
DEV_LD_FLAGS=-X ${licenseSignozIo}=${DEV_LICENSE_SIGNOZ_IO}
|
||||
|
||||
all: build-push-frontend build-push-query-service
|
||||
@@ -98,12 +96,12 @@ build-query-service-static-arm64:
|
||||
|
||||
# Steps to build static binary of query service for all platforms
|
||||
.PHONY: build-query-service-static-all
|
||||
build-query-service-static-all: build-query-service-static-amd64 build-query-service-static-arm64 build-frontend-static
|
||||
build-query-service-static-all: build-query-service-static-amd64 build-query-service-static-arm64
|
||||
|
||||
# Steps to build and push docker image of query service
|
||||
.PHONY: build-query-service-amd64 build-push-query-service
|
||||
# Step to build docker image of query service in amd64 (used in build pipeline)
|
||||
build-query-service-amd64: build-query-service-static-amd64 build-frontend-static
|
||||
build-query-service-amd64: build-query-service-static-amd64
|
||||
@echo "------------------"
|
||||
@echo "--> Building query-service docker image for amd64"
|
||||
@echo "------------------"
|
||||
@@ -190,4 +188,4 @@ check-no-ee-references:
|
||||
fi
|
||||
|
||||
test:
|
||||
go test ./pkg/...
|
||||
go test ./pkg/query-service/...
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
##################### SigNoz Configuration Example #####################
|
||||
#
|
||||
# Do not modify this file
|
||||
#
|
||||
|
||||
##################### Instrumentation #####################
|
||||
instrumentation:
|
||||
logs:
|
||||
level: info
|
||||
enabled: false
|
||||
processors:
|
||||
batch:
|
||||
exporter:
|
||||
otlp:
|
||||
endpoint: localhost:4317
|
||||
traces:
|
||||
enabled: false
|
||||
processors:
|
||||
batch:
|
||||
exporter:
|
||||
otlp:
|
||||
endpoint: localhost:4317
|
||||
metrics:
|
||||
enabled: true
|
||||
readers:
|
||||
pull:
|
||||
exporter:
|
||||
prometheus:
|
||||
host: "0.0.0.0"
|
||||
port: 9090
|
||||
|
||||
##################### Web #####################
|
||||
web:
|
||||
# Whether to enable the web frontend
|
||||
enabled: true
|
||||
# The prefix to serve web on
|
||||
prefix: /
|
||||
# The directory containing the static build files.
|
||||
directory: /etc/signoz/web
|
||||
|
||||
##################### Cache #####################
|
||||
cache:
|
||||
# specifies the caching provider to use.
|
||||
provider: memory
|
||||
# memory: Uses in-memory caching.
|
||||
memory:
|
||||
# Time-to-live for cache entries in memory. Specify the duration in ns
|
||||
ttl: 60000000000
|
||||
# The interval at which the cache will be cleaned up
|
||||
cleanupInterval: 1m
|
||||
# redis: Uses Redis as the caching backend.
|
||||
redis:
|
||||
# The hostname or IP address of the Redis server.
|
||||
host: localhost
|
||||
# The port on which the Redis server is running. Default is usually 6379.
|
||||
port: 6379
|
||||
# The password for authenticating with the Redis server, if required.
|
||||
password:
|
||||
# The Redis database number to use
|
||||
db: 0
|
||||
|
||||
##################### SQLStore #####################
|
||||
sqlstore:
|
||||
# specifies the SQLStore provider to use.
|
||||
provider: sqlite
|
||||
# The maximum number of open connections to the database.
|
||||
max_open_conns: 100
|
||||
sqlite:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
@@ -58,7 +58,7 @@ from the HotROD application, you should see the data generated from hotrod in Si
|
||||
```sh
|
||||
kubectl create ns sample-application
|
||||
|
||||
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/main/sample-apps/hotrod/hotrod.yaml
|
||||
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/develop/sample-apps/hotrod/hotrod.yaml
|
||||
```
|
||||
|
||||
To generate load:
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
version: "3.9"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
@@ -15,7 +16,14 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "0.0.0.0:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -24,12 +32,15 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-db-depend: &db-depend
|
||||
depends_on:
|
||||
- clickhouse
|
||||
- otel-collector-migrator
|
||||
# - clickhouse-2
|
||||
# - clickhouse-3
|
||||
|
||||
|
||||
services:
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
@@ -46,6 +57,7 @@ services:
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# hostname: zookeeper-2
|
||||
@@ -77,8 +89,9 @@ services:
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
clickhouse:
|
||||
!!merge <<: *clickhouse-defaults
|
||||
<<: *clickhouse-defaults
|
||||
hostname: clickhouse
|
||||
# ports:
|
||||
# - "9000:9000"
|
||||
@@ -90,6 +103,7 @@ services:
|
||||
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# hostname: clickhouse-2
|
||||
@@ -117,6 +131,7 @@ services:
|
||||
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:0.23.7
|
||||
volumes:
|
||||
@@ -129,9 +144,14 @@ services:
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.68.0
|
||||
command: ["-config=/root/config/prometheus.yml", "--use-logs-new-schema=true", "--use-trace-new-schema=true"]
|
||||
image: signoz/query-service:0.56.0
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
@@ -149,16 +169,24 @@ services:
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
!!merge <<: *db-depend
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.68.0
|
||||
image: signoz/frontend:0.56.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -169,9 +197,15 @@ services:
|
||||
- "3301:3301"
|
||||
volumes:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.111.23
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--manager-config=/etc/manager-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:0.111.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--manager-config=/etc/manager-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@@ -201,20 +235,20 @@ services:
|
||||
- clickhouse
|
||||
- otel-collector-migrator
|
||||
- query-service
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:0.111.23
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
command:
|
||||
- "sync"
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
- "--up="
|
||||
depends_on:
|
||||
- clickhouse
|
||||
# - clickhouse-2
|
||||
# - clickhouse-3
|
||||
image: signoz/signoz-schema-migrator:0.111.5
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
depends_on:
|
||||
- clickhouse
|
||||
# - clickhouse-2
|
||||
# - clickhouse-3
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
volumes:
|
||||
@@ -227,15 +261,17 @@ services:
|
||||
mode: global
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
command: ["all"]
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
hostname: load-hotrod
|
||||
|
||||
@@ -110,7 +110,6 @@ exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/signoz_traces
|
||||
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
use_new_schema: true
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
version: "2.4"
|
||||
|
||||
include:
|
||||
- test-app-docker-compose.yaml
|
||||
|
||||
services:
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
@@ -18,6 +20,7 @@ services:
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: signoz-clickhouse
|
||||
@@ -40,10 +43,18 @@ services:
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "0.0.0.0:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
alertmanager:
|
||||
container_name: signoz-alertmanager
|
||||
image: signoz/alertmanager:0.23.7
|
||||
@@ -56,25 +67,31 @@ services:
|
||||
command:
|
||||
- --queryService.url=http://query-service:8085
|
||||
- --storage.path=/data
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.23}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.5}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "sync"
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
- "--up="
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: signoz-otel-collector
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.23}
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--manager-config=/etc/manager-config.yaml", "--copy-path=/var/tmp/collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:0.111.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--manager-config=/etc/manager-config.yaml",
|
||||
"--copy-path=/var/tmp/collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
# user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@@ -103,6 +120,7 @@ services:
|
||||
condition: service_completed_successfully
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
|
||||
@@ -25,8 +25,7 @@ services:
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
"--use-logs-new-schema=true",
|
||||
"--use-trace-new-schema=true"
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
ports:
|
||||
- "6060:6060"
|
||||
|
||||
@@ -13,7 +13,14 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "0.0.0.0:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -22,17 +29,20 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-db-depend: &db-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
services:
|
||||
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: signoz-zookeeper-1
|
||||
@@ -49,6 +59,7 @@ services:
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-2
|
||||
@@ -82,8 +93,9 @@ services:
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
clickhouse:
|
||||
!!merge <<: *clickhouse-defaults
|
||||
<<: *clickhouse-defaults
|
||||
container_name: signoz-clickhouse
|
||||
hostname: clickhouse
|
||||
ports:
|
||||
@@ -98,6 +110,7 @@ services:
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
- ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-2
|
||||
@@ -115,6 +128,7 @@ services:
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-3
|
||||
@@ -131,6 +145,7 @@ services:
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.7}
|
||||
container_name: signoz-alertmanager
|
||||
@@ -143,11 +158,17 @@ services:
|
||||
command:
|
||||
- --queryService.url=http://query-service:8085
|
||||
- --storage.path=/data
|
||||
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.68.0}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-query-service
|
||||
command: ["-config=/root/config/prometheus.yml", "--use-logs-new-schema=true", "--use-trace-new-schema=true"]
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
@@ -166,13 +187,21 @@ services:
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
!!merge <<: *db-depend
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.68.0}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -182,8 +211,9 @@ services:
|
||||
- "3301:3301"
|
||||
volumes:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.23}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.5}
|
||||
container_name: otel-migrator-sync
|
||||
command:
|
||||
- "sync"
|
||||
@@ -192,12 +222,13 @@ services:
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
otel-collector-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.23}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.5}
|
||||
container_name: otel-migrator-async
|
||||
command:
|
||||
- "async"
|
||||
@@ -208,14 +239,21 @@ services:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.23}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.5}
|
||||
container_name: signoz-otel-collector
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--manager-config=/etc/manager-config.yaml", "--copy-path=/var/tmp/collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--manager-config=/etc/manager-config.yaml",
|
||||
"--copy-path=/var/tmp/collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@@ -245,6 +283,7 @@ services:
|
||||
condition: service_completed_successfully
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
version: "2.4"
|
||||
|
||||
include:
|
||||
- test-app-docker-compose.yaml
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
restart: on-failure
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
@@ -16,7 +18,14 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "0.0.0.0:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"0.0.0.0:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -25,17 +34,20 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-db-depend: &db-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
otel-collector-migrator:
|
||||
condition: service_completed_successfully
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
services:
|
||||
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: signoz-zookeeper-1
|
||||
@@ -52,6 +64,7 @@ services:
|
||||
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-2
|
||||
@@ -85,8 +98,9 @@ services:
|
||||
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
|
||||
# - ALLOW_ANONYMOUS_LOGIN=yes
|
||||
# - ZOO_AUTOPURGE_INTERVAL=1
|
||||
|
||||
clickhouse:
|
||||
!!merge <<: *clickhouse-defaults
|
||||
<<: *clickhouse-defaults
|
||||
container_name: signoz-clickhouse
|
||||
hostname: clickhouse
|
||||
ports:
|
||||
@@ -101,6 +115,7 @@ services:
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
- ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-2
|
||||
@@ -118,6 +133,7 @@ services:
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: signoz-clickhouse-3
|
||||
@@ -134,6 +150,7 @@ services:
|
||||
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
# - ./data/clickhouse-3/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.7}
|
||||
container_name: signoz-alertmanager
|
||||
@@ -146,11 +163,18 @@ services:
|
||||
command:
|
||||
- --queryService.url=http://query-service:8085
|
||||
- --storage.path=/data
|
||||
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.68.0}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-query-service
|
||||
command: ["-config=/root/config/prometheus.yml", "-gateway-url=https://api.staging.signoz.cloud", "--use-logs-new-schema=true", "--use-trace-new-schema=true"]
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
"-gateway-url=https://api.staging.signoz.cloud",
|
||||
"--use-logs-new-schema=true"
|
||||
]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
@@ -170,13 +194,21 @@ services:
|
||||
- KAFKA_SPAN_EVAL=${KAFKA_SPAN_EVAL:-false}
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
!!merge <<: *db-depend
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.68.0}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.56.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -186,22 +218,31 @@ services:
|
||||
- "3301:3301"
|
||||
volumes:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.23}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.5}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
# clickhouse-2:
|
||||
# condition: service_healthy
|
||||
# clickhouse-3:
|
||||
# condition: service_healthy
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.23}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.5}
|
||||
container_name: signoz-otel-collector
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--manager-config=/etc/manager-config.yaml", "--copy-path=/var/tmp/collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--manager-config=/etc/manager-config.yaml",
|
||||
"--copy-path=/var/tmp/collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@@ -231,6 +272,7 @@ services:
|
||||
condition: service_completed_successfully
|
||||
query-service:
|
||||
condition: service_healthy
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
|
||||
@@ -119,7 +119,6 @@ exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://clickhouse:9000/signoz_traces
|
||||
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
use_new_schema: true
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
|
||||
@@ -32,11 +32,6 @@ has_cmd() {
|
||||
command -v "$1" > /dev/null 2>&1
|
||||
}
|
||||
|
||||
# Check if docker compose plugin is present
|
||||
has_docker_compose_plugin() {
|
||||
docker compose version > /dev/null 2>&1
|
||||
}
|
||||
|
||||
is_mac() {
|
||||
[[ $OSTYPE == darwin* ]]
|
||||
}
|
||||
@@ -188,7 +183,9 @@ install_docker() {
|
||||
$sudo_cmd yum-config-manager --add-repo https://download.docker.com/linux/$os/docker-ce.repo
|
||||
echo "Installing docker"
|
||||
$yum_cmd install docker-ce docker-ce-cli containerd.io
|
||||
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
compose_version () {
|
||||
@@ -230,6 +227,12 @@ start_docker() {
|
||||
echo "Starting docker service"
|
||||
$sudo_cmd systemctl start docker.service
|
||||
fi
|
||||
# if [[ -z $sudo_cmd ]]; then
|
||||
# docker ps > /dev/null && true
|
||||
# if [[ $? -ne 0 ]]; then
|
||||
# request_sudo
|
||||
# fi
|
||||
# fi
|
||||
if [[ -z $sudo_cmd ]]; then
|
||||
if ! docker ps > /dev/null && true; then
|
||||
request_sudo
|
||||
@@ -262,7 +265,7 @@ bye() { # Prints a friendly good bye message and exits the script.
|
||||
|
||||
echo "🔴 The containers didn't seem to start correctly. Please run the following command to check containers that may have errored out:"
|
||||
echo ""
|
||||
echo -e "$sudo_cmd $docker_compose_cmd -f ./docker/clickhouse-setup/docker-compose.yaml ps -a"
|
||||
echo -e "$sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml ps -a"
|
||||
|
||||
echo "Please read our troubleshooting guide https://signoz.io/docs/install/troubleshooting/"
|
||||
echo "or reach us for support in #help channel in our Slack Community https://signoz.io/slack"
|
||||
@@ -293,6 +296,11 @@ request_sudo() {
|
||||
if (( $EUID != 0 )); then
|
||||
sudo_cmd="sudo"
|
||||
echo -e "Please enter your sudo password, if prompted."
|
||||
# $sudo_cmd -l | grep -e "NOPASSWD: ALL" > /dev/null
|
||||
# if [[ $? -ne 0 ]] && ! $sudo_cmd -v; then
|
||||
# echo "Need sudo privileges to proceed with the installation."
|
||||
# exit 1;
|
||||
# fi
|
||||
if ! $sudo_cmd -l | grep -e "NOPASSWD: ALL" > /dev/null && ! $sudo_cmd -v; then
|
||||
echo "Need sudo privileges to proceed with the installation."
|
||||
exit 1;
|
||||
@@ -309,7 +317,6 @@ echo -e "👋 Thank you for trying out SigNoz! "
|
||||
echo ""
|
||||
|
||||
sudo_cmd=""
|
||||
docker_compose_cmd=""
|
||||
|
||||
# Check sudo permissions
|
||||
if (( $EUID != 0 )); then
|
||||
@@ -355,8 +362,28 @@ else
|
||||
SIGNOZ_INSTALLATION_ID=$(echo "$sysinfo" | $digest_cmd | grep -E -o '[a-zA-Z0-9]{64}')
|
||||
fi
|
||||
|
||||
# echo ""
|
||||
|
||||
# echo -e "👉 ${RED}Two ways to go forward\n"
|
||||
# echo -e "${RED}1) ClickHouse as database (default)\n"
|
||||
# read -p "⚙️ Enter your preference (1/2):" choice_setup
|
||||
|
||||
# while [[ $choice_setup != "1" && $choice_setup != "2" && $choice_setup != "" ]]
|
||||
# do
|
||||
# # echo $choice_setup
|
||||
# echo -e "\n❌ ${CYAN}Please enter either 1 or 2"
|
||||
# read -p "⚙️ Enter your preference (1/2): " choice_setup
|
||||
# # echo $choice_setup
|
||||
# done
|
||||
|
||||
# if [[ $choice_setup == "1" || $choice_setup == "" ]];then
|
||||
# setup_type='clickhouse'
|
||||
# fi
|
||||
|
||||
setup_type='clickhouse'
|
||||
|
||||
# echo -e "\n✅ ${CYAN}You have chosen: ${setup_type} setup\n"
|
||||
|
||||
# Run bye if failure happens
|
||||
trap bye EXIT
|
||||
|
||||
@@ -428,6 +455,8 @@ if [[ $desired_os -eq 0 ]]; then
|
||||
send_event "os_not_supported"
|
||||
fi
|
||||
|
||||
# check_ports_occupied
|
||||
|
||||
# Check is Docker daemon is installed and available. If not, the install & start Docker for Linux machines. We cannot automatically install Docker Desktop on Mac OS
|
||||
if ! is_command_present docker; then
|
||||
|
||||
@@ -457,39 +486,27 @@ if ! is_command_present docker; then
|
||||
fi
|
||||
fi
|
||||
|
||||
if has_docker_compose_plugin; then
|
||||
echo "docker compose plugin is present, using it"
|
||||
docker_compose_cmd="docker compose"
|
||||
# Install docker-compose
|
||||
else
|
||||
docker_compose_cmd="docker-compose"
|
||||
if ! is_command_present docker-compose; then
|
||||
request_sudo
|
||||
install_docker_compose
|
||||
fi
|
||||
if ! is_command_present docker-compose; then
|
||||
request_sudo
|
||||
install_docker_compose
|
||||
fi
|
||||
|
||||
start_docker
|
||||
|
||||
# check for open ports, if signoz is not installed
|
||||
if is_command_present docker-compose; then
|
||||
if $sudo_cmd $docker_compose_cmd -f ./docker/clickhouse-setup/docker-compose.yaml ps | grep "signoz-query-service" | grep -q "healthy" > /dev/null 2>&1; then
|
||||
echo "SigNoz already installed, skipping the occupied ports check"
|
||||
else
|
||||
check_ports_occupied
|
||||
fi
|
||||
fi
|
||||
# $sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml up -d --remove-orphans || true
|
||||
|
||||
|
||||
echo ""
|
||||
echo -e "\n🟡 Pulling the latest container images for SigNoz.\n"
|
||||
$sudo_cmd $docker_compose_cmd -f ./docker/clickhouse-setup/docker-compose.yaml pull
|
||||
$sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml pull
|
||||
|
||||
echo ""
|
||||
echo "🟡 Starting the SigNoz containers. It may take a few minutes ..."
|
||||
echo
|
||||
# The $docker_compose_cmd command does some nasty stuff for the `--detach` functionality. So we add a `|| true` so that the
|
||||
# The docker-compose command does some nasty stuff for the `--detach` functionality. So we add a `|| true` so that the
|
||||
# script doesn't exit because this command looks like it failed to do it's thing.
|
||||
$sudo_cmd $docker_compose_cmd -f ./docker/clickhouse-setup/docker-compose.yaml up --detach --remove-orphans || true
|
||||
$sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml up --detach --remove-orphans || true
|
||||
|
||||
wait_for_containers_start 60
|
||||
echo ""
|
||||
@@ -499,7 +516,7 @@ if [[ $status_code -ne 200 ]]; then
|
||||
echo "🔴 The containers didn't seem to start correctly. Please run the following command to check containers that may have errored out:"
|
||||
echo ""
|
||||
|
||||
echo -e "$sudo_cmd $docker_compose_cmd -f ./docker/clickhouse-setup/docker-compose.yaml ps -a"
|
||||
echo -e "$sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml ps -a"
|
||||
|
||||
echo "Please read our troubleshooting guide https://signoz.io/docs/install/troubleshooting/"
|
||||
echo "or reach us on SigNoz for support https://signoz.io/slack"
|
||||
@@ -520,7 +537,7 @@ else
|
||||
echo "ℹ️ By default, retention period is set to 15 days for logs and traces, and 30 days for metrics."
|
||||
echo -e "To change this, navigate to the General tab on the Settings page of SigNoz UI. For more details, refer to https://signoz.io/docs/userguide/retention-period \n"
|
||||
|
||||
echo "ℹ️ To bring down SigNoz and clean volumes : $sudo_cmd $docker_compose_cmd -f ./docker/clickhouse-setup/docker-compose.yaml down -v"
|
||||
echo "ℹ️ To bring down SigNoz and clean volumes : $sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml down -v"
|
||||
|
||||
echo ""
|
||||
echo "+++++++++++++++++++++++++++++++++++++++++++++++++"
|
||||
|
||||
@@ -23,9 +23,6 @@ COPY pkg/query-service/templates /root/templates
|
||||
# Make query-service executable for non-root users
|
||||
RUN chmod 755 /root /root/query-service
|
||||
|
||||
# Copy frontend
|
||||
COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
# run the binary
|
||||
ENTRYPOINT ["./query-service"]
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"go.signoz.io/signoz/ee/query-service/license"
|
||||
"go.signoz.io/signoz/ee/query-service/usage"
|
||||
baseapp "go.signoz.io/signoz/pkg/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/integrations"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
@@ -35,14 +34,13 @@ type APIHandlerOptions struct {
|
||||
FeatureFlags baseint.FeatureLookup
|
||||
LicenseManager *license.Manager
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
Cache cache.Cache
|
||||
Gateway *httputil.ReverseProxy
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
FluxInterval time.Duration
|
||||
UseLogsNewSchema bool
|
||||
UseLicensesV3 bool
|
||||
}
|
||||
|
||||
type APIHandler struct {
|
||||
@@ -64,12 +62,11 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
|
||||
RuleManager: opts.RulesManager,
|
||||
FeatureFlags: opts.FeatureFlags,
|
||||
IntegrationsController: opts.IntegrationsController,
|
||||
CloudIntegrationsController: opts.CloudIntegrationsController,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
Cache: opts.Cache,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
UseLicensesV3: opts.UseLicensesV3,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -184,16 +181,23 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
Methods(http.MethodGet)
|
||||
|
||||
// v3
|
||||
router.HandleFunc("/api/v3/licenses", am.ViewAccess(ah.listLicensesV3)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.applyLicenseV3)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.refreshLicensesV3)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.getActiveLicenseV3)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v3/licenses",
|
||||
am.ViewAccess(ah.listLicensesV3)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v3/licenses",
|
||||
am.AdminAccess(ah.applyLicenseV3)).
|
||||
Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v3/licenses",
|
||||
am.AdminAccess(ah.refreshLicensesV3)).
|
||||
Methods(http.MethodPut)
|
||||
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.AdminAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
ah.APIHandler.RegisterRoutes(router, am)
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ func (ah *APIHandler) applyLicense(w http.ResponseWriter, r *http.Request) {
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
|
||||
return
|
||||
}
|
||||
license, apiError := ah.LM().ActivateV3(r.Context(), l.Key)
|
||||
license, apiError := ah.LM().Activate(r.Context(), l.Key)
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
@@ -115,23 +115,6 @@ func (ah *APIHandler) listLicensesV3(w http.ResponseWriter, r *http.Request) {
|
||||
ah.Respond(w, convertLicenseV3ToListLicenseResponse(licenses))
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getActiveLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
activeLicense, err := ah.LM().GetRepo().GetActiveLicenseV3(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
// return 404 not found if there is no active license
|
||||
if activeLicense == nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no active license found")}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// TODO deprecate this when we move away from key for stripe
|
||||
activeLicense.Data["key"] = activeLicense.Key
|
||||
render.Success(w, http.StatusOK, activeLicense.Data)
|
||||
}
|
||||
|
||||
// this function is called by zeus when inserting licenses in the query-service
|
||||
func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
var licenseKey ApplyLicenseRequest
|
||||
@@ -235,10 +218,6 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
|
||||
func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License {
|
||||
licensesV2 := []model.License{}
|
||||
for _, l := range licenses {
|
||||
planKeyFromPlanName, ok := model.MapOldPlanKeyToNewPlanName[l.PlanName]
|
||||
if !ok {
|
||||
planKeyFromPlanName = model.Basic
|
||||
}
|
||||
licenseV2 := model.License{
|
||||
Key: l.Key,
|
||||
ActivationId: "",
|
||||
@@ -247,7 +226,7 @@ func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License {
|
||||
ValidationMessage: "",
|
||||
IsCurrent: l.IsCurrent,
|
||||
LicensePlan: model.LicensePlan{
|
||||
PlanKey: planKeyFromPlanName,
|
||||
PlanKey: l.PlanName,
|
||||
ValidFrom: l.ValidFrom,
|
||||
ValidUntil: l.ValidUntil,
|
||||
Status: l.Status},
|
||||
@@ -258,12 +237,24 @@ func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License {
|
||||
}
|
||||
|
||||
func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
licensesV3, apierr := ah.LM().GetLicensesV3(r.Context())
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
|
||||
var licenses []model.License
|
||||
|
||||
if ah.UseLicensesV3 {
|
||||
licensesV3, err := ah.LM().GetLicensesV3(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, err, nil)
|
||||
return
|
||||
}
|
||||
licenses = convertLicenseV3ToLicenseV2(licensesV3)
|
||||
} else {
|
||||
_licenses, apiError := ah.LM().GetLicenses(r.Context())
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
licenses = _licenses
|
||||
}
|
||||
licenses := convertLicenseV3ToLicenseV2(licensesV3)
|
||||
|
||||
resp := model.Licenses{
|
||||
TrialStart: -1,
|
||||
|
||||
@@ -26,9 +26,8 @@ func NewDataConnector(
|
||||
dialTimeout time.Duration,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
) *ClickhouseReader {
|
||||
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster, useLogsNewSchema, useTraceNewSchema)
|
||||
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster, useLogsNewSchema)
|
||||
return &ClickhouseReader{
|
||||
conn: ch.GetConn(),
|
||||
appdb: localDB,
|
||||
|
||||
@@ -30,16 +30,14 @@ import (
|
||||
"go.signoz.io/signoz/ee/query-service/interfaces"
|
||||
"go.signoz.io/signoz/ee/query-service/rules"
|
||||
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/migrate"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/signoz"
|
||||
"go.signoz.io/signoz/pkg/web"
|
||||
|
||||
licensepkg "go.signoz.io/signoz/ee/query-service/license"
|
||||
"go.signoz.io/signoz/ee/query-service/usage"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/agentConf"
|
||||
baseapp "go.signoz.io/signoz/pkg/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
|
||||
baseexplorer "go.signoz.io/signoz/pkg/query-service/app/explorer"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/integrations"
|
||||
@@ -63,7 +61,6 @@ import (
|
||||
const AppDbEngine = "sqlite"
|
||||
|
||||
type ServerOptions struct {
|
||||
SigNoz *signoz.SigNoz
|
||||
PromConfigPath string
|
||||
SkipTopLvlOpsPath string
|
||||
HTTPHostPort string
|
||||
@@ -80,7 +77,7 @@ type ServerOptions struct {
|
||||
Cluster string
|
||||
GatewayUrl string
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
UseLicensesV3 bool
|
||||
}
|
||||
|
||||
// Server runs HTTP api service
|
||||
@@ -137,7 +134,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// initiate license manager
|
||||
lm, err := licensepkg.StartManager("sqlite", localDB)
|
||||
lm, err := licensepkg.StartManager("sqlite", localDB, serverOptions.UseLicensesV3)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -159,7 +156,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.DialTimeout,
|
||||
serverOptions.Cluster,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
)
|
||||
go qb.Start(readerReady)
|
||||
reader = qb
|
||||
@@ -193,13 +189,19 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.DisableRules,
|
||||
lm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
go func() {
|
||||
err = migrate.ClickHouseMigrate(reader.GetConn(), serverOptions.Cluster)
|
||||
if err != nil {
|
||||
zap.L().Error("error while running clickhouse migrations", zap.Error(err))
|
||||
}
|
||||
}()
|
||||
|
||||
// initiate opamp
|
||||
_, err = opAmpModel.InitDB(localDB)
|
||||
if err != nil {
|
||||
@@ -213,13 +215,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
)
|
||||
}
|
||||
|
||||
cloudIntegrationsController, err := cloudintegrations.NewController(localDB)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't create cloud provider integrations controller: %w", err,
|
||||
)
|
||||
}
|
||||
|
||||
// ingestion pipelines manager
|
||||
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
|
||||
localDB, "sqlite", integrationsController.GetPipelinesForInstalledIntegrations,
|
||||
@@ -270,13 +265,12 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
FeatureFlags: lm,
|
||||
LicenseManager: lm,
|
||||
IntegrationsController: integrationsController,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
Cache: c,
|
||||
FluxInterval: fluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
UseLogsNewSchema: serverOptions.UseLogsNewSchema,
|
||||
UseTraceNewSchema: serverOptions.UseTraceNewSchema,
|
||||
UseLicensesV3: serverOptions.UseLicensesV3,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts)
|
||||
@@ -293,7 +287,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
usageManager: usageManager,
|
||||
}
|
||||
|
||||
httpServer, err := s.createPublicServer(apiHandler, serverOptions.SigNoz.Web)
|
||||
httpServer, err := s.createPublicServer(apiHandler)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -319,10 +313,10 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
|
||||
|
||||
r := baseapp.NewRouter()
|
||||
|
||||
r.Use(baseapp.LogCommentEnricher)
|
||||
r.Use(setTimeoutMiddleware)
|
||||
r.Use(s.analyticsMiddleware)
|
||||
r.Use(loggingMiddlewarePrivate)
|
||||
r.Use(baseapp.LogCommentEnricher)
|
||||
|
||||
apiHandler.RegisterPrivateRoutes(r)
|
||||
|
||||
@@ -342,7 +336,7 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, error) {
|
||||
|
||||
r := baseapp.NewRouter()
|
||||
|
||||
@@ -362,15 +356,14 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
}
|
||||
am := baseapp.NewAuthMiddleware(getUserFromRequest)
|
||||
|
||||
r.Use(baseapp.LogCommentEnricher)
|
||||
r.Use(setTimeoutMiddleware)
|
||||
r.Use(s.analyticsMiddleware)
|
||||
r.Use(loggingMiddleware)
|
||||
r.Use(baseapp.LogCommentEnricher)
|
||||
|
||||
apiHandler.RegisterRoutes(r, am)
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
apiHandler.RegisterIntegrationRoutes(r, am)
|
||||
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
@@ -387,11 +380,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
|
||||
handler = handlers.CompressHandler(handler)
|
||||
|
||||
err := web.AddToRouter(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &http.Server{
|
||||
Handler: handler,
|
||||
}, nil
|
||||
@@ -501,29 +489,32 @@ func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}
|
||||
zap.L().Error("error while matching the trace explorer: ", zap.Error(err))
|
||||
}
|
||||
|
||||
queryInfoResult := telemetry.GetInstance().CheckQueryInfo(postData)
|
||||
signozMetricsUsed := false
|
||||
signozLogsUsed := false
|
||||
signozTracesUsed := false
|
||||
if postData != nil {
|
||||
|
||||
if (queryInfoResult.MetricsUsed || queryInfoResult.LogsUsed || queryInfoResult.TracesUsed) && (queryInfoResult.FilterApplied) {
|
||||
if queryInfoResult.MetricsUsed {
|
||||
if postData.CompositeQuery != nil {
|
||||
data["queryType"] = postData.CompositeQuery.QueryType
|
||||
data["panelType"] = postData.CompositeQuery.PanelType
|
||||
|
||||
signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData)
|
||||
}
|
||||
}
|
||||
|
||||
if signozMetricsUsed || signozLogsUsed || signozTracesUsed {
|
||||
if signozMetricsUsed {
|
||||
telemetry.GetInstance().AddActiveMetricsUser()
|
||||
}
|
||||
if queryInfoResult.LogsUsed {
|
||||
if signozLogsUsed {
|
||||
telemetry.GetInstance().AddActiveLogsUser()
|
||||
}
|
||||
if queryInfoResult.TracesUsed {
|
||||
if signozTracesUsed {
|
||||
telemetry.GetInstance().AddActiveTracesUser()
|
||||
}
|
||||
data["metricsUsed"] = queryInfoResult.MetricsUsed
|
||||
data["logsUsed"] = queryInfoResult.LogsUsed
|
||||
data["tracesUsed"] = queryInfoResult.TracesUsed
|
||||
data["filterApplied"] = queryInfoResult.FilterApplied
|
||||
data["groupByApplied"] = queryInfoResult.GroupByApplied
|
||||
data["aggregateOperator"] = queryInfoResult.AggregateOperator
|
||||
data["aggregateAttributeKey"] = queryInfoResult.AggregateAttributeKey
|
||||
data["numberOfQueries"] = queryInfoResult.NumberOfQueries
|
||||
data["queryType"] = queryInfoResult.QueryType
|
||||
data["panelType"] = queryInfoResult.PanelType
|
||||
|
||||
data["metricsUsed"] = signozMetricsUsed
|
||||
data["logsUsed"] = signozLogsUsed
|
||||
data["tracesUsed"] = signozTracesUsed
|
||||
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
|
||||
if err == nil {
|
||||
// switch case to set data["screen"] based on the referrer
|
||||
@@ -746,8 +737,7 @@ func makeRulesManager(
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm baseint.FeatureLookup,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool) (*baserules.Manager, error) {
|
||||
useLogsNewSchema bool) (*baserules.Manager, error) {
|
||||
|
||||
// create engine
|
||||
pqle, err := pqle.FromConfigPath(promConfigPath)
|
||||
@@ -777,9 +767,8 @@ func makeRulesManager(
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
PrepareTestRuleFunc: rules.TestNotification,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -13,9 +13,7 @@ var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
|
||||
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
|
||||
// this is set via build time variable
|
||||
var ZeusURL = "https://api.signoz.cloud"
|
||||
var ZeusURL = GetOrDefaultEnv("ZEUS_URL", "ZeusURL")
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
|
||||
@@ -2,6 +2,18 @@ package signozio
|
||||
|
||||
type status string
|
||||
|
||||
type ActivationResult struct {
|
||||
Status status `json:"status"`
|
||||
Data *ActivationResponse `json:"data,omitempty"`
|
||||
ErrorType string `json:"errorType,omitempty"`
|
||||
Error string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
type ActivationResponse struct {
|
||||
ActivationId string `json:"ActivationId"`
|
||||
PlanDetails string `json:"PlanDetails"`
|
||||
}
|
||||
|
||||
type ValidateLicenseResponse struct {
|
||||
Status status `json:"status"`
|
||||
Data map[string]interface{} `json:"data"`
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/constants"
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
@@ -38,6 +39,86 @@ func init() {
|
||||
C = New()
|
||||
}
|
||||
|
||||
// ActivateLicense sends key to license.signoz.io and gets activation data
|
||||
func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) {
|
||||
licenseReq := map[string]string{
|
||||
"key": key,
|
||||
"siteId": siteId,
|
||||
}
|
||||
|
||||
reqString, _ := json.Marshal(licenseReq)
|
||||
httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString))
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to connect to license.signoz.io", zap.Error(err))
|
||||
return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
|
||||
}
|
||||
|
||||
httpBody, err := io.ReadAll(httpResponse.Body)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err))
|
||||
return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
|
||||
}
|
||||
|
||||
defer httpResponse.Body.Close()
|
||||
|
||||
// read api request result
|
||||
result := ActivationResult{}
|
||||
err = json.Unmarshal(httpBody, &result)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err))
|
||||
return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
|
||||
}
|
||||
|
||||
switch httpResponse.StatusCode {
|
||||
case 200, 201:
|
||||
return result.Data, nil
|
||||
case 400, 401:
|
||||
return nil, model.BadRequest(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
|
||||
default:
|
||||
return nil, model.InternalError(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// ValidateLicense validates the license key
|
||||
func ValidateLicense(activationId string) (*ActivationResponse, *model.ApiError) {
|
||||
validReq := map[string]string{
|
||||
"activationId": activationId,
|
||||
}
|
||||
|
||||
reqString, _ := json.Marshal(validReq)
|
||||
response, err := http.Post(C.Prefix+"/licenses/validate", APPLICATION_JSON, bytes.NewBuffer(reqString))
|
||||
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, "failed to read validation response from license.signoz.io"))
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
switch response.StatusCode {
|
||||
case 200, 201:
|
||||
a := ActivationResult{}
|
||||
err = json.Unmarshal(body, &a)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
|
||||
}
|
||||
return a.Data, nil
|
||||
case 400, 401:
|
||||
return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
|
||||
"bad request error received from license.signoz.io"))
|
||||
default:
|
||||
return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
|
||||
"internal error received from license.signoz.io"))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func ValidateLicenseV3(licenseKey string) (*model.LicenseV3, *model.ApiError) {
|
||||
|
||||
// Creating an HTTP client with a timeout for better control
|
||||
|
||||
@@ -78,7 +78,9 @@ func (r *Repo) GetLicensesV3(ctx context.Context) ([]*model.LicenseV3, error) {
|
||||
return licenseV3Data, nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetActiveLicenseV2(ctx context.Context) (*model.License, *basemodel.ApiError) {
|
||||
// GetActiveLicense fetches the latest active license from DB.
|
||||
// If the license is not present, expect a nil license and a nil error in the output.
|
||||
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) {
|
||||
var err error
|
||||
licenses := []model.License{}
|
||||
|
||||
@@ -107,21 +109,6 @@ func (r *Repo) GetActiveLicenseV2(ctx context.Context) (*model.License, *basemod
|
||||
return active, nil
|
||||
}
|
||||
|
||||
// GetActiveLicense fetches the latest active license from DB.
|
||||
// If the license is not present, expect a nil license and a nil error in the output.
|
||||
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) {
|
||||
activeLicenseV3, err := r.GetActiveLicenseV3(ctx)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
|
||||
}
|
||||
|
||||
if activeLicenseV3 == nil {
|
||||
return nil, nil
|
||||
}
|
||||
activeLicenseV2 := model.ConvertLicenseV3ToLicenseV2(activeLicenseV3)
|
||||
return activeLicenseV2, nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetActiveLicenseV3(ctx context.Context) (*model.LicenseV3, error) {
|
||||
var err error
|
||||
licenses := []model.LicenseDB{}
|
||||
|
||||
@@ -51,7 +51,7 @@ type Manager struct {
|
||||
activeFeatures basemodel.FeatureSet
|
||||
}
|
||||
|
||||
func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*Manager, error) {
|
||||
func StartManager(dbType string, db *sqlx.DB, useLicensesV3 bool, features ...basemodel.Feature) (*Manager, error) {
|
||||
if LM != nil {
|
||||
return LM, nil
|
||||
}
|
||||
@@ -67,7 +67,31 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M
|
||||
repo: &repo,
|
||||
}
|
||||
|
||||
if err := m.start(features...); err != nil {
|
||||
if useLicensesV3 {
|
||||
// get active license from the db
|
||||
active, err := m.repo.GetActiveLicense(context.Background())
|
||||
if err != nil {
|
||||
return m, err
|
||||
}
|
||||
|
||||
// if we have an active license then need to fetch the complete details
|
||||
if active != nil {
|
||||
// fetch the new license structure from control plane
|
||||
licenseV3, apiError := validate.ValidateLicenseV3(active.Key)
|
||||
if apiError != nil {
|
||||
return m, apiError
|
||||
}
|
||||
|
||||
// insert the licenseV3 in sqlite db
|
||||
apiError = m.repo.InsertLicenseV3(context.Background(), licenseV3)
|
||||
// if the license already exists move ahead.
|
||||
if apiError != nil && apiError.Typ != model.ErrorConflict {
|
||||
return m, apiError
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := m.start(useLicensesV3, features...); err != nil {
|
||||
return m, err
|
||||
}
|
||||
LM = m
|
||||
@@ -75,8 +99,16 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M
|
||||
}
|
||||
|
||||
// start loads active license in memory and initiates validator
|
||||
func (lm *Manager) start(features ...basemodel.Feature) error {
|
||||
return lm.LoadActiveLicenseV3(features...)
|
||||
func (lm *Manager) start(useLicensesV3 bool, features ...basemodel.Feature) error {
|
||||
|
||||
var err error
|
||||
if useLicensesV3 {
|
||||
err = lm.LoadActiveLicenseV3(features...)
|
||||
} else {
|
||||
err = lm.LoadActiveLicense(features...)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (lm *Manager) Stop() {
|
||||
@@ -84,6 +116,31 @@ func (lm *Manager) Stop() {
|
||||
<-lm.terminated
|
||||
}
|
||||
|
||||
func (lm *Manager) SetActive(l *model.License, features ...basemodel.Feature) {
|
||||
lm.mutex.Lock()
|
||||
defer lm.mutex.Unlock()
|
||||
|
||||
if l == nil {
|
||||
return
|
||||
}
|
||||
|
||||
lm.activeLicense = l
|
||||
lm.activeFeatures = append(l.FeatureSet, features...)
|
||||
// set default features
|
||||
setDefaultFeatures(lm)
|
||||
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Panic("Couldn't activate features", zap.Error(err))
|
||||
}
|
||||
if !lm.validatorRunning {
|
||||
// we want to make sure only one validator runs,
|
||||
// we already have lock() so good to go
|
||||
lm.validatorRunning = true
|
||||
go lm.Validator(context.Background())
|
||||
}
|
||||
|
||||
}
|
||||
func (lm *Manager) SetActiveV3(l *model.LicenseV3, features ...basemodel.Feature) {
|
||||
lm.mutex.Lock()
|
||||
defer lm.mutex.Unlock()
|
||||
@@ -114,6 +171,29 @@ func setDefaultFeatures(lm *Manager) {
|
||||
lm.activeFeatures = append(lm.activeFeatures, baseconstants.DEFAULT_FEATURE_SET...)
|
||||
}
|
||||
|
||||
// LoadActiveLicense loads the most recent active license
|
||||
func (lm *Manager) LoadActiveLicense(features ...basemodel.Feature) error {
|
||||
active, err := lm.repo.GetActiveLicense(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if active != nil {
|
||||
lm.SetActive(active, features...)
|
||||
} else {
|
||||
zap.L().Info("No active license found, defaulting to basic plan")
|
||||
// if no active license is found, we default to basic(free) plan with all default features
|
||||
lm.activeFeatures = model.BasicPlan
|
||||
setDefaultFeatures(lm)
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Error("Couldn't initialize features", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) LoadActiveLicenseV3(features ...basemodel.Feature) error {
|
||||
active, err := lm.repo.GetActiveLicenseV3(context.Background())
|
||||
if err != nil {
|
||||
@@ -173,20 +253,38 @@ func (lm *Manager) GetLicensesV3(ctx context.Context) (response []*model.License
|
||||
if lm.activeLicenseV3 != nil && l.Key == lm.activeLicenseV3.Key {
|
||||
l.IsCurrent = true
|
||||
}
|
||||
if l.ValidUntil == -1 {
|
||||
// for subscriptions, there is no end-date as such
|
||||
// but for showing user some validity we default one year timespan
|
||||
l.ValidUntil = l.ValidFrom + 31556926
|
||||
}
|
||||
response = append(response, l)
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Validator validates license after an epoch of time
|
||||
func (lm *Manager) Validator(ctx context.Context) {
|
||||
defer close(lm.terminated)
|
||||
tick := time.NewTicker(validationFrequency)
|
||||
defer tick.Stop()
|
||||
|
||||
lm.Validate(ctx)
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
default:
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
case <-tick.C:
|
||||
lm.Validate(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// Validator validates license after an epoch of time
|
||||
func (lm *Manager) ValidatorV3(ctx context.Context) {
|
||||
zap.L().Info("ValidatorV3 started!")
|
||||
defer close(lm.terminated)
|
||||
tick := time.NewTicker(validationFrequency)
|
||||
defer tick.Stop()
|
||||
@@ -209,6 +307,74 @@ func (lm *Manager) ValidatorV3(ctx context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
// Validate validates the current active license
|
||||
func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
zap.L().Info("License validation started")
|
||||
if lm.activeLicense == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
defer func() {
|
||||
lm.mutex.Lock()
|
||||
|
||||
lm.lastValidated = time.Now().Unix()
|
||||
if reterr != nil {
|
||||
zap.L().Error("License validation completed with error", zap.Error(reterr))
|
||||
atomic.AddUint64(&lm.failedAttempts, 1)
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
|
||||
map[string]interface{}{"err": reterr.Error()}, "", true, false)
|
||||
} else {
|
||||
zap.L().Info("License validation completed with no errors")
|
||||
}
|
||||
|
||||
lm.mutex.Unlock()
|
||||
}()
|
||||
|
||||
response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId)
|
||||
if apiError != nil {
|
||||
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
|
||||
return apiError.Err
|
||||
}
|
||||
|
||||
if response.PlanDetails == lm.activeLicense.PlanDetails {
|
||||
// license plan hasnt changed, nothing to do
|
||||
return nil
|
||||
}
|
||||
|
||||
if response.PlanDetails != "" {
|
||||
|
||||
// copy and replace the active license record
|
||||
l := model.License{
|
||||
Key: lm.activeLicense.Key,
|
||||
CreatedAt: lm.activeLicense.CreatedAt,
|
||||
PlanDetails: response.PlanDetails,
|
||||
ValidationMessage: lm.activeLicense.ValidationMessage,
|
||||
ActivationId: lm.activeLicense.ActivationId,
|
||||
}
|
||||
|
||||
if err := l.ParsePlan(); err != nil {
|
||||
zap.L().Error("failed to parse updated license", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
// updated plan is parsable, check if plan has changed
|
||||
if lm.activeLicense.PlanDetails != response.PlanDetails {
|
||||
err := lm.repo.UpdatePlanDetails(ctx, lm.activeLicense.Key, response.PlanDetails)
|
||||
if err != nil {
|
||||
// unexpected db write issue but we can let the user continue
|
||||
// and wait for update to work in next cycle.
|
||||
zap.L().Error("failed to validate license", zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
// activate the update license plan
|
||||
lm.SetActive(&l)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// todo[vikrantgupta25]: check the comparison here between old and new license!
|
||||
func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError {
|
||||
|
||||
license, apiError := validate.ValidateLicenseV3(lm.activeLicenseV3.Key)
|
||||
@@ -256,6 +422,50 @@ func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Activate activates a license key with signoz server
|
||||
func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *model.ApiError) {
|
||||
defer func() {
|
||||
if errResponse != nil {
|
||||
userEmail, err := auth.GetEmailFromJwt(ctx)
|
||||
if err == nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
|
||||
map[string]interface{}{"err": errResponse.Err.Error()}, userEmail, true, false)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
response, apiError := validate.ActivateLicense(key, "")
|
||||
if apiError != nil {
|
||||
zap.L().Error("failed to activate license", zap.Error(apiError.Err))
|
||||
return nil, apiError
|
||||
}
|
||||
|
||||
l := &model.License{
|
||||
Key: key,
|
||||
ActivationId: response.ActivationId,
|
||||
PlanDetails: response.PlanDetails,
|
||||
}
|
||||
|
||||
// parse validity and features from the plan details
|
||||
err := l.ParsePlan()
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to activate license", zap.Error(err))
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
// store the license before activating it
|
||||
err = lm.repo.InsertLicense(ctx, l)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to activate license", zap.Error(err))
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
// license is valid, activate it
|
||||
lm.SetActive(l)
|
||||
return l, nil
|
||||
}
|
||||
|
||||
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (licenseResponse *model.LicenseV3, errResponse *model.ApiError) {
|
||||
defer func() {
|
||||
if errResponse != nil {
|
||||
|
||||
@@ -13,14 +13,10 @@ import (
|
||||
"go.opentelemetry.io/otel/sdk/resource"
|
||||
semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
|
||||
"go.signoz.io/signoz/ee/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/config"
|
||||
"go.signoz.io/signoz/pkg/config/envprovider"
|
||||
"go.signoz.io/signoz/pkg/config/fileprovider"
|
||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
|
||||
"go.signoz.io/signoz/pkg/query-service/migrate"
|
||||
"go.signoz.io/signoz/pkg/query-service/version"
|
||||
"go.signoz.io/signoz/pkg/signoz"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
|
||||
@@ -98,7 +94,7 @@ func main() {
|
||||
var cluster string
|
||||
|
||||
var useLogsNewSchema bool
|
||||
var useTraceNewSchema bool
|
||||
var useLicensesV3 bool
|
||||
var cacheConfigPath, fluxInterval string
|
||||
var enableQueryServiceLogOTLPExport bool
|
||||
var preferSpanMetrics bool
|
||||
@@ -107,10 +103,9 @@ func main() {
|
||||
var maxOpenConns int
|
||||
var dialTimeout time.Duration
|
||||
var gatewayUrl string
|
||||
var useLicensesV3 bool
|
||||
|
||||
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
|
||||
flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces")
|
||||
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
@@ -124,7 +119,7 @@ func main() {
|
||||
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
|
||||
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
loggerMgr := initZapLog(enableQueryServiceLogOTLPExport)
|
||||
@@ -134,24 +129,7 @@ func main() {
|
||||
|
||||
version.PrintVersion()
|
||||
|
||||
config, err := signoz.NewConfig(context.Background(), config.ResolverConfig{
|
||||
Uris: []string{"env:"},
|
||||
ProviderFactories: []config.ProviderFactory{
|
||||
envprovider.NewFactory(),
|
||||
fileprovider.NewFactory(),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create config", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz, err := signoz.New(context.Background(), config, signoz.NewProviderConfig())
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz struct", zap.Error(err))
|
||||
}
|
||||
|
||||
serverOptions := &app.ServerOptions{
|
||||
SigNoz: signoz,
|
||||
HTTPHostPort: baseconst.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
SkipTopLvlOpsPath: skipTopLvlOpsPath,
|
||||
@@ -167,7 +145,7 @@ func main() {
|
||||
Cluster: cluster,
|
||||
GatewayUrl: gatewayUrl,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
UseLicensesV3: useLicensesV3,
|
||||
}
|
||||
|
||||
// Read the jwt secret key
|
||||
|
||||
@@ -247,24 +247,3 @@ func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}
|
||||
licenseDataWithIdAndKey["key"] = key
|
||||
return NewLicenseV3(licenseDataWithIdAndKey)
|
||||
}
|
||||
|
||||
func ConvertLicenseV3ToLicenseV2(l *LicenseV3) *License {
|
||||
planKeyFromPlanName, ok := MapOldPlanKeyToNewPlanName[l.PlanName]
|
||||
if !ok {
|
||||
planKeyFromPlanName = Basic
|
||||
}
|
||||
return &License{
|
||||
Key: l.Key,
|
||||
ActivationId: "",
|
||||
PlanDetails: "",
|
||||
FeatureSet: l.Features,
|
||||
ValidationMessage: "",
|
||||
IsCurrent: l.IsCurrent,
|
||||
LicensePlan: LicensePlan{
|
||||
PlanKey: planKeyFromPlanName,
|
||||
ValidFrom: l.ValidFrom,
|
||||
ValidUntil: l.ValidUntil,
|
||||
Status: l.Status},
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -16,10 +16,6 @@ var (
|
||||
PlanNameBasic = "BASIC"
|
||||
)
|
||||
|
||||
var (
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameTeams: Pro, PlanNameEnterprise: Enterprise}
|
||||
)
|
||||
|
||||
var (
|
||||
LicenseStatusInactive = "INACTIVE"
|
||||
)
|
||||
|
||||
@@ -26,7 +26,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
)
|
||||
|
||||
@@ -123,7 +122,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
)
|
||||
|
||||
@@ -13,3 +13,8 @@ if [ "$branch" = "main" ]; then
|
||||
echo "${color_red}${bold}You can't commit directly to the main branch${reset}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$branch" = "develop" ]; then
|
||||
echo "${color_red}${bold}You can't commit directly to the develop branch${reset}"
|
||||
exit 1
|
||||
fi
|
||||
@@ -24,7 +24,7 @@ const config: Config.InitialOptions = {
|
||||
'^.+\\.(js|jsx)$': 'babel-jest',
|
||||
},
|
||||
transformIgnorePatterns: [
|
||||
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color|api)/)',
|
||||
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color)/)',
|
||||
],
|
||||
setupFilesAfterEnv: ['<rootDir>jest.setup.ts'],
|
||||
testPathIgnorePatterns: ['/node_modules/', '/public/'],
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest --coverage",
|
||||
"test:changedsince": "jest --changedSince=main --coverage --silent"
|
||||
"test:changedsince": "jest --changedSince=develop --coverage --silent"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.15.0"
|
||||
@@ -40,10 +40,9 @@
|
||||
"@monaco-editor/react": "^4.3.1",
|
||||
"@radix-ui/react-tabs": "1.0.4",
|
||||
"@radix-ui/react-tooltip": "1.0.7",
|
||||
"@sentry/react": "8.41.0",
|
||||
"@sentry/webpack-plugin": "2.22.6",
|
||||
"@sentry/react": "7.102.1",
|
||||
"@sentry/webpack-plugin": "2.16.0",
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
"@visx/group": "3.3.0",
|
||||
"@visx/shape": "3.5.0",
|
||||
@@ -77,7 +76,7 @@
|
||||
"fontfaceobserver": "2.3.0",
|
||||
"history": "4.10.1",
|
||||
"html-webpack-plugin": "5.5.0",
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"http-proxy-middleware": "2.0.7",
|
||||
"i18next": "^21.6.12",
|
||||
"i18next-browser-languagedetector": "^6.1.3",
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
@@ -129,7 +128,7 @@
|
||||
"uuid": "^8.3.2",
|
||||
"web-vitals": "^0.2.4",
|
||||
"webpack": "5.94.0",
|
||||
"webpack-dev-server": "^4.15.2",
|
||||
"webpack-dev-server": "^4.15.1",
|
||||
"webpack-retry-chunk-load-plugin": "3.1.1",
|
||||
"xstate": "^4.31.0"
|
||||
},
|
||||
@@ -154,7 +153,6 @@
|
||||
"@babel/preset-typescript": "^7.21.4",
|
||||
"@commitlint/cli": "^16.3.0",
|
||||
"@commitlint/config-conventional": "^16.2.4",
|
||||
"@faker-js/faker": "9.3.0",
|
||||
"@jest/globals": "^27.5.1",
|
||||
"@playwright/test": "^1.22.0",
|
||||
"@testing-library/jest-dom": "5.16.5",
|
||||
@@ -188,6 +186,7 @@
|
||||
"@types/webpack-dev-server": "^4.7.2",
|
||||
"@typescript-eslint/eslint-plugin": "^4.33.0",
|
||||
"@typescript-eslint/parser": "^4.33.0",
|
||||
"@vvo/tzdb": "6.149.0",
|
||||
"autoprefixer": "10.4.19",
|
||||
"babel-plugin-styled-components": "^1.12.0",
|
||||
"compression-webpack-plugin": "9.0.0",
|
||||
@@ -243,9 +242,6 @@
|
||||
"semver": "7.5.4",
|
||||
"xml2js": "0.5.0",
|
||||
"phin": "^3.7.1",
|
||||
"body-parser": "1.20.3",
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"cross-spawn": "7.0.5",
|
||||
"cookie": "^0.7.1"
|
||||
"body-parser": "1.20.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="18" height="18" fill="none"><path fill="#A65F3E" d="M8.04 10.331a.41.41 0 0 1-.414-.414.4.4 0 0 1 .121-.292l8.071-8.071a.414.414 0 1 1 .585.585l-8.07 8.071a.4.4 0 0 1-.293.121"/><path fill="#A65F3E" d="M16.11 1.5c.09 0 .178.034.245.101a.35.35 0 0 1 0 .492l-8.07 8.07a.346.346 0 0 1-.49 0 .35.35 0 0 1 0-.49l8.07-8.072a.35.35 0 0 1 .245-.101m0-.133a.48.48 0 0 0-.338.14L7.7 9.578a.47.47 0 0 0-.14.34.475.475 0 0 0 .478.478c.13 0 .25-.05.34-.14l8.07-8.071a.48.48 0 0 0-.339-.818"/><path fill="#FFE082" d="m1.701 12.438 3.89 3.889c.873-.963 1.62-2.057 2.023-3.313.03-.091.034-.24.128-.359.451-.566 1.865-2.008.706-3.167-1.106-1.106-2.438.227-2.994.686-.17.14-.384.228-.606.276-1.493.326-3.034 1.869-3.147 1.988"/><path fill="#FFE082" d="M8.385 8.577a.62.62 0 0 1 .393-.085c.098.018.237.135.38.28.144.143.28.304.32.408s-.005.242-.005.242c-.116.23-.383.69-.6.624-.24-.074-.482-.305-.66-.479a1.5 1.5 0 0 1-.276-.328c-.096-.177.008-.324.129-.447.086-.082.232-.17.319-.215"/><path fill="#F9C248" d="M8.327 8.975c.116.11.21.243.339.338.252.185.455.097.62-.052.049-.044.122-.1.17-.055a.1.1 0 0 1 .025.051.45.45 0 0 1-.045.273 1.3 1.3 0 0 1-.433.529c-.032.022-.07.044-.11.032a.12.12 0 0 1-.056-.045c-.207-.244-.37-.533-.626-.724-.103-.076-.364-.132-.298-.303.1-.262.317-.137.414-.044"/><path fill="#F9C248" d="M7.614 13.014c.028-.091.033-.24.127-.359.515-.645 1.223-1.38 1.145-2.275-.01-.123-.169-.75-.342-.514-.04.052-.024.315-.03.379-.1 1.172-1.02 1.821-1.19 2.024s-.164.393-.31.695a5 5 0 0 1-.61.947c-.379.47-.825.88-1.286 1.27a.8.8 0 0 0-.203.217c-.131.241.153.406.305.558l.369.368c.873-.961 1.62-2.055 2.025-3.31"/><path fill="#E2A610" d="M5.537 15.809c-.1-.157-.242-.3-.317-.458a.24.24 0 0 1-.03-.123c.01-.08.13-.15.187-.198q.129-.108.254-.22c.162-.149.314-.314.419-.509.017-.031.032-.07.016-.102-.035-.065-.238.152-.275.186-.105.092-.208.187-.318.272-.146.113-.422.304-.618.213-.1-.046-.19-.169-.263-.249-.084-.094-.164-.191-.252-.283a17 17 0 0 0-.592-.582c-.05-.046-.06-.066-.003-.122a10 10 0 0 0 .546-.58c.022-.025.044-.067.017-.09-.018-.015-.048-.004-.07.007-.26.138-.467.354-.692.544-.055.046-.214-.13-.249-.158-.092-.073-.154-.102-.046-.21.484-.49.972-.946 1.554-1.323.107-.07.22-.14.28-.253-.01-.03-.054-.026-.085-.015-.807.29-1.89 1.291-1.983 1.38-.162.158-.454-.206-.885-.481-.147-.094 0-.235.038-.279.26-.307.603-.642.603-.642-.127.013-.956.76-1.054.873-.084.097-.17.184-.175.318a.52.52 0 0 0 .107.325c.77 1.05 2.586 2.794 3.23 3.253.384.274.502.224.659.068a.35.35 0 0 0 .105-.3.65.65 0 0 0-.108-.263"/><path fill="#A65F3E" d="M8.835 10.176s-.438-.825-1.017-1.074c0 0-.02-.054.02-.1.052-.057.12-.07.157-.046.427.265.812.619 1.007 1.102.039.093-.131.23-.167.118"/><path fill="#F44336" d="M7.64 12.88c-.528-.818-1.63-1.937-2.46-2.524-.066-.046.204-.204.272-.156a9.7 9.7 0 0 1 2.31 2.398c.045.067-.091.33-.123.282M8.193 12.078c-.506-.71-1.521-1.738-2.238-2.312-.062-.05.182-.22.232-.181.755.602 1.668 1.499 2.18 2.263.037.057-.138.282-.174.23"/></svg>
|
||||
|
Before Width: | Height: | Size: 2.9 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="16" fill="none"><path fill="#616161" fill-rule="evenodd" d="M8.096 2.885H4.372V2.51h3.724zM8.096 4.79H4.372v-.375h3.724z" clip-rule="evenodd"/><path fill="#9E9E9E" d="M7.098 15.539H5.662V.936s.134-.311.719-.311.719.311.719.311v14.603z"/><path fill="#757575" d="M6.73.671V12.47H5.662v1.074c.181.001.345.023.493.055.336.074.576.37.576.714v1.227H7.1V.936c-.002 0-.08-.179-.37-.265"/><path fill="#2196F3" d="M10.58.54a3.03 3.03 0 0 0-3.028 3.038 3.02 3.02 0 0 0 3.027 3.028 3.025 3.025 0 0 0 3.028-3.028A3.035 3.035 0 0 0 10.579.54"/><path fill="#fff" d="M11.902 1.671c-.19-.048-.569-.098-1.321-.098-.753 0-1.132.05-1.322.098-.112.029-.488.185-.488.606v2.598c0 .142.115.258.258.258h.095v.288c0 .084.068.151.152.151h.306a.15.15 0 0 0 .151-.151v-.288h1.693v.288c0 .084.067.151.15.151h.307a.15.15 0 0 0 .151-.151v-.288h.098a.26.26 0 0 0 .259-.258V2.277c0-.404-.377-.579-.49-.606m-2.139.206c0-.064.051-.115.115-.115h1.403c.063 0 .115.051.115.115v.204a.115.115 0 0 1-.115.115H9.878a.115.115 0 0 1-.115-.115zm.024 2.736a.08.08 0 0 1-.078.078h-.308a.264.264 0 0 1-.264-.264v-.139c0-.042.035-.077.077-.077h.31c.144 0 .263.117.263.264zm2.235-.186a.264.264 0 0 1-.264.264h-.309a.08.08 0 0 1-.077-.078v-.138c0-.145.117-.264.264-.264h.308c.043 0 .078.035.078.077zm.07-1.129c0 .168-.363.46-1.513.46s-1.512-.27-1.512-.46v-.767c0-.05.05-.175.175-.175h2.695c.125 0 .155.126.155.175z"/><path fill="#F5F5F5" d="M8.61 12.867H4.15a.285.285 0 0 1-.285-.285v-5.15c0-.158.127-.285.285-.285h4.457c.158 0 .285.127.285.285v5.15a.285.285 0 0 1-.284.285"/><path fill="#82AEC0" d="M8.128 12.015H4.632l-.01-4.07H8.12z" opacity=".8"/><path fill="#F5F5F5" fill-rule="evenodd" d="M6.246 12.07V7.945h.25v4.123z" clip-rule="evenodd"/><path fill="#616161" d="M6.246 7.946H4.622v.34h1.624z"/><path fill="#F5F5F5" fill-rule="evenodd" d="M8.142 11.307H4.618v-.125h3.524zM8.142 10.482H4.618v-.125h3.524zM8.12 9.657H4.621v-.125H8.12zM8.12 8.833H4.617v-.125H8.12z" clip-rule="evenodd"/><path fill="#616161" d="M8.118 9.426H6.495v.34h1.623zM6.253 10.25H4.635v.34h1.618z"/><path fill="#9E9E9E" fill-rule="evenodd" d="M4.15 7.334a.097.097 0 0 0-.097.098v5.15c0 .054.044.097.098.097h4.458a.097.097 0 0 0 .097-.097v-5.15a.097.097 0 0 0-.098-.098zm-.472.098c0-.261.212-.473.473-.473h4.457c.261 0 .473.212.473.473v5.15c0 .26-.211.472-.472.472H4.151a.47.47 0 0 1-.473-.472z" clip-rule="evenodd"/><path fill="#757575" d="M4.17 12.682c-.194.017-.194-.11-.194-.145V7.493c0-.141.115-.256.256-.256H8.56c.118 0 .172.071.148.216 0 0-.015-.092-.128-.092H4.233a.133.133 0 0 0-.132.132v5.045c0 .117.069.144.069.144"/><path fill="#FFCA28" d="M4.9 1.775H.642v3.7h4.26z"/><path fill="#9E9E9E" d="M4.663 1.775c.132 0 .238.106.238.237v3.225a.237.237 0 0 1-.238.237H.88a.237.237 0 0 1-.238-.237V2.012c0-.131.107-.237.238-.237zm0-.25H.88a.49.49 0 0 0-.488.487v3.225c0 .269.22.487.488.487h3.783a.49.49 0 0 0 .488-.487V2.012a.487.487 0 0 0-.488-.487"/><path fill="#FFFDE7" fill-rule="evenodd" d="M4.902 3.11H.642v-.25h4.26zM4.902 4.388H.642v-.25h4.26z" clip-rule="evenodd"/><path fill="#757575" d="M1.975 2.186H.904v.282h1.07zM1.711 4.777H.904v.283h.807zM4.552 4.777h-.807v.283h.807zM4.552 2.186h-.807v.282h.807zM3.795 3.482H3.33v.282h.465zM4.552 3.482h-.465v.282h.465zM2.388 3.482H.904v.282h1.484z"/></svg>
|
||||
|
Before Width: | Height: | Size: 3.2 KiB |
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 408 KiB |
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"workspaceSuspended": "Your workspace is locked",
|
||||
"gotQuestions": "Got Questions?",
|
||||
"contactUs": "Contact Us",
|
||||
"actionHeader": "Pay to continue",
|
||||
"actionDescription": "Pay now to keep enjoying all the great features you’ve been using.",
|
||||
"yourDataIsSafe": "Your data is safe with us until",
|
||||
"actNow": "Act now to avoid any disruptions and continue where you left off.",
|
||||
"contactAdmin": "Contact your admin to proceed with the upgrade.",
|
||||
"continueMyJourney": "Settle your bill to continue",
|
||||
"somethingWentWrong": "Something went wrong"
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"containers_visualization_message": "The ability to visualise containers is in active development and should be available to you soon.",
|
||||
"processes_visualization_message": "The ability to visualise processes is in active development and should be available to you soon.",
|
||||
"working_message": "We're working to extend infrastructure monitoring to take care of a bunch of different cases. Thank you for your patience.",
|
||||
"waitlist_message": "Join the waitlist for early access.",
|
||||
"waitlist_success_message": "We have received your request for early access. We will get back to you as soon as we launch the feature.",
|
||||
"contact_support": "Contact Support"
|
||||
}
|
||||
@@ -3,10 +3,9 @@
|
||||
"alert_channels": "Alert Channels",
|
||||
"organization_settings": "Organization Settings",
|
||||
"ingestion_settings": "Ingestion Settings",
|
||||
"api_keys": "API Keys",
|
||||
"api_keys": "Access Tokens",
|
||||
"my_settings": "My Settings",
|
||||
"overview_metrics": "Overview Metrics",
|
||||
"custom_domain_settings": "Custom Domain Settings",
|
||||
"dbcall_metrics": "Database Calls",
|
||||
"external_metrics": "External Calls",
|
||||
"pipeline": "Pipeline",
|
||||
|
||||
@@ -26,8 +26,7 @@
|
||||
"MY_SETTINGS": "SigNoz | My Settings",
|
||||
"ORG_SETTINGS": "SigNoz | Organization Settings",
|
||||
"INGESTION_SETTINGS": "SigNoz | Ingestion Settings",
|
||||
"API_KEYS": "SigNoz | API Keys",
|
||||
"CUSTOM_DOMAIN_SETTINGS": "SigNoz | Custom Domain Settings",
|
||||
"API_KEYS": "SigNoz | Access Tokens",
|
||||
"SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong",
|
||||
"UN_AUTHORIZED": "SigNoz | Unauthorized",
|
||||
"NOT_FOUND": "SigNoz | Page Not Found",
|
||||
@@ -38,11 +37,8 @@
|
||||
"PASSWORD_RESET": "SigNoz | Password Reset",
|
||||
"LIST_LICENSES": "SigNoz | List of Licenses",
|
||||
"WORKSPACE_LOCKED": "SigNoz | Workspace Locked",
|
||||
"WORKSPACE_SUSPENDED": "SigNoz | Workspace Suspended",
|
||||
"SUPPORT": "SigNoz | Support",
|
||||
"DEFAULT": "Open source Observability Platform | SigNoz",
|
||||
"ALERT_HISTORY": "SigNoz | Alert Rule History",
|
||||
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
|
||||
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring"
|
||||
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview"
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"delete_confirm_message": "Are you sure you want to delete {{keyName}} key? Deleting a key is irreversible and cannot be undone."
|
||||
"delete_confirm_message": "Are you sure you want to delete {{keyName}} token? Deleting a token is irreversible and cannot be undone."
|
||||
}
|
||||
|
||||
@@ -3,9 +3,7 @@
|
||||
"billing": "Billing",
|
||||
"manage_billing_and_costs": "Manage your billing information, invoices, and monitor costs.",
|
||||
"enterprise_cloud": "Enterprise Cloud",
|
||||
"teams_cloud": "Teams Cloud",
|
||||
"enterprise": "Enterprise",
|
||||
"teams": "Teams",
|
||||
"card_details_recieved_and_billing_info": "We have received your card details, your billing will only start after the end of your free trial period.",
|
||||
"upgrade_plan": "Upgrade Plan",
|
||||
"manage_billing": "Manage Billing",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"save": "Save",
|
||||
"edit": "Edit",
|
||||
"logged_in": "Logged In",
|
||||
"pending_data_placeholder": "Retrieving your {{dataSource}}!"
|
||||
"pending_data_placeholder": "Just a bit of patience, just a little bit’s enough ⎯ we’re getting your {{dataSource}}!"
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"workspaceSuspended": "Your workspace is locked",
|
||||
"gotQuestions": "Got Questions?",
|
||||
"contactUs": "Contact Us",
|
||||
"actionHeader": "Pay to continue",
|
||||
"actionDescription": "Pay now to keep enjoying all the great features you’ve been using.",
|
||||
"yourDataIsSafe": "Your data is safe with us until",
|
||||
"actNow": "Act now to avoid any disruptions and continue where you left off.",
|
||||
"contactAdmin": "Contact your admin to proceed with the upgrade.",
|
||||
"continueMyJourney": "Settle your bill to continue",
|
||||
"somethingWentWrong": "Something went wrong"
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"containers_visualization_message": "The ability to visualise containers is in active development and should be available to you soon.",
|
||||
"processes_visualization_message": "The ability to visualise processes is in active development and should be available to you soon.",
|
||||
"working_message": "We're working to extend infrastructure monitoring to take care of a bunch of different cases. Thank you for your patience.",
|
||||
"waitlist_message": "Join the waitlist for early access.",
|
||||
"waitlist_success_message": "We have received your request for early access. We will get back to you as soon as we launch the feature.",
|
||||
"contact_support": "Contact Support"
|
||||
}
|
||||
@@ -3,10 +3,9 @@
|
||||
"alert_channels": "Alert Channels",
|
||||
"organization_settings": "Organization Settings",
|
||||
"ingestion_settings": "Ingestion Settings",
|
||||
"api_keys": "API Keys",
|
||||
"api_keys": "Access Tokens",
|
||||
"my_settings": "My Settings",
|
||||
"overview_metrics": "Overview Metrics",
|
||||
"custom_domain_settings": "Custom Domain Settings",
|
||||
"dbcall_metrics": "Database Calls",
|
||||
"external_metrics": "External Calls",
|
||||
"pipeline": "Pipeline",
|
||||
|
||||
@@ -32,8 +32,7 @@
|
||||
"MY_SETTINGS": "SigNoz | My Settings",
|
||||
"ORG_SETTINGS": "SigNoz | Organization Settings",
|
||||
"INGESTION_SETTINGS": "SigNoz | Ingestion Settings",
|
||||
"API_KEYS": "SigNoz | API Keys",
|
||||
"CUSTOM_DOMAIN_SETTINGS": "SigNoz | Custom Domain Settings",
|
||||
"API_KEYS": "SigNoz | Access Tokens",
|
||||
"SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong",
|
||||
"UN_AUTHORIZED": "SigNoz | Unauthorized",
|
||||
"NOT_FOUND": "SigNoz | Page Not Found",
|
||||
@@ -46,7 +45,6 @@
|
||||
"PASSWORD_RESET": "SigNoz | Password Reset",
|
||||
"LIST_LICENSES": "SigNoz | List of Licenses",
|
||||
"WORKSPACE_LOCKED": "SigNoz | Workspace Locked",
|
||||
"WORKSPACE_SUSPENDED": "SigNoz | Workspace Suspended",
|
||||
"SUPPORT": "SigNoz | Support",
|
||||
"LOGS_SAVE_VIEWS": "SigNoz | Logs Saved Views",
|
||||
"TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views",
|
||||
@@ -55,7 +53,5 @@
|
||||
"INTEGRATIONS": "SigNoz | Integrations",
|
||||
"ALERT_HISTORY": "SigNoz | Alert Rule History",
|
||||
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
|
||||
"MESSAGING_QUEUES": "SigNoz | Messaging Queues",
|
||||
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring"
|
||||
"MESSAGING_QUEUES": "SigNoz | Messaging Queues"
|
||||
}
|
||||
|
||||
@@ -1,17 +1,27 @@
|
||||
/* eslint-disable react-hooks/exhaustive-deps */
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import getOrgUser from 'api/user/getOrgUser';
|
||||
import loginApi from 'api/user/login';
|
||||
import { Logout } from 'api/utils';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useLicense from 'hooks/useLicense';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { isEmpty, isNull } from 'lodash-es';
|
||||
import { ReactChild, useEffect, useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQuery } from 'react-query';
|
||||
import { matchPath, useLocation } from 'react-router-dom';
|
||||
import { LicenseState, LicenseStatus } from 'types/api/licensesV3/getActive';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { matchPath, Redirect, useLocation } from 'react-router-dom';
|
||||
import { Dispatch } from 'redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { getInitialUserTokenRefreshToken } from 'store/utils';
|
||||
import AppActions from 'types/actions';
|
||||
import { UPDATE_USER_IS_FETCH } from 'types/actions/app';
|
||||
import { Organization } from 'types/api/user/getOrganization';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { isCloudUser } from 'utils/app';
|
||||
import { routePermission } from 'utils/permission';
|
||||
|
||||
@@ -19,30 +29,30 @@ import routes, {
|
||||
LIST_LICENSES,
|
||||
oldNewRoutesMapping,
|
||||
oldRoutes,
|
||||
ROUTES_NOT_TO_BE_OVERRIDEN,
|
||||
SUPPORT_ROUTE,
|
||||
} from './routes';
|
||||
import afterLogin from './utils';
|
||||
|
||||
function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
const location = useLocation();
|
||||
const { pathname } = location;
|
||||
|
||||
const [isLoading, setIsLoading] = useState<boolean>(true);
|
||||
|
||||
const {
|
||||
org,
|
||||
orgPreferences,
|
||||
user,
|
||||
role,
|
||||
isUserFetching,
|
||||
isUserFetchingError,
|
||||
isLoggedIn: isLoggedInState,
|
||||
isFetchingOrgPreferences,
|
||||
licenses,
|
||||
isFetchingLicenses,
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicenseV3,
|
||||
} = useAppContext();
|
||||
} = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
|
||||
const isAdmin = user.role === USER_ROLES.ADMIN;
|
||||
const mapRoutes = useMemo(
|
||||
() =>
|
||||
new Map(
|
||||
[...routes, LIST_LICENSES, SUPPORT_ROUTE].map((e) => {
|
||||
[...routes, LIST_LICENSES].map((e) => {
|
||||
const currentPath = matchPath(pathname, {
|
||||
path: e.path,
|
||||
});
|
||||
@@ -51,13 +61,52 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
),
|
||||
[pathname],
|
||||
);
|
||||
const isOldRoute = oldRoutes.indexOf(pathname) > -1;
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
|
||||
const isOnboardingComplete = useMemo(
|
||||
() =>
|
||||
orgPreferences?.find(
|
||||
(preference: Record<string, any>) => preference.key === 'ORG_ONBOARDING',
|
||||
)?.value,
|
||||
[orgPreferences],
|
||||
);
|
||||
|
||||
const {
|
||||
data: licensesData,
|
||||
isFetching: isFetchingLicensesData,
|
||||
} = useLicense();
|
||||
|
||||
const { t } = useTranslation(['common']);
|
||||
|
||||
const isCloudUserVal = isCloudUser();
|
||||
|
||||
const localStorageUserAuthToken = getInitialUserTokenRefreshToken();
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
|
||||
const isOldRoute = oldRoutes.indexOf(pathname) > -1;
|
||||
|
||||
const [orgData, setOrgData] = useState<Organization | undefined>(undefined);
|
||||
|
||||
const { data: orgUsers, isFetching: isFetchingOrgUsers } = useQuery({
|
||||
const isLocalStorageLoggedIn =
|
||||
getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true';
|
||||
|
||||
const navigateToLoginIfNotLoggedIn = (isLoggedIn = isLoggedInState): void => {
|
||||
dispatch({
|
||||
type: UPDATE_USER_IS_FETCH,
|
||||
payload: {
|
||||
isUserFetching: false,
|
||||
},
|
||||
});
|
||||
if (!isLoggedIn) {
|
||||
history.push(ROUTES.LOGIN, { from: pathname });
|
||||
}
|
||||
};
|
||||
|
||||
const { data: orgUsers, isLoading: isLoadingOrgUsers } = useQuery({
|
||||
queryFn: () => {
|
||||
if (orgData && orgData.id !== undefined) {
|
||||
return getOrgUser({
|
||||
@@ -67,10 +116,10 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
return undefined;
|
||||
},
|
||||
queryKey: ['getOrgUser'],
|
||||
enabled: !isEmpty(orgData) && user.role === 'ADMIN',
|
||||
enabled: !isEmpty(orgData),
|
||||
});
|
||||
|
||||
const checkFirstTimeUser = useCallback((): boolean => {
|
||||
const checkFirstTimeUser = (): boolean => {
|
||||
const users = orgUsers?.payload || [];
|
||||
|
||||
const remainingUsers = users.filter(
|
||||
@@ -78,91 +127,127 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
);
|
||||
|
||||
return remainingUsers.length === 1;
|
||||
}, [orgUsers?.payload]);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
// Check if the onboarding should be shown based on the org users and onboarding completion status, wait for org users and preferences to load
|
||||
const shouldShowOnboarding = (): boolean => {
|
||||
// Only run this effect if the org users and preferences are loaded
|
||||
|
||||
if (!isLoadingOrgUsers && !isFetchingOrgPreferences) {
|
||||
const isFirstUser = checkFirstTimeUser();
|
||||
|
||||
// Redirect to get started if it's not the first user or if the onboarding is complete
|
||||
return isFirstUser && !isOnboardingComplete;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
const handleRedirectForOrgOnboarding = (key: string): void => {
|
||||
if (
|
||||
isLoggedInState &&
|
||||
isCloudUserVal &&
|
||||
!isFetchingOrgPreferences &&
|
||||
orgPreferences &&
|
||||
!isFetchingOrgUsers &&
|
||||
orgUsers &&
|
||||
orgUsers.payload
|
||||
!isLoadingOrgUsers &&
|
||||
!isEmpty(orgUsers?.payload) &&
|
||||
!isNull(orgPreferences)
|
||||
) {
|
||||
const isOnboardingComplete = orgPreferences?.find(
|
||||
(preference: Record<string, any>) => preference.key === 'ORG_ONBOARDING',
|
||||
)?.value;
|
||||
if (key === 'ONBOARDING' && isOnboardingComplete) {
|
||||
history.push(ROUTES.APPLICATION);
|
||||
}
|
||||
|
||||
const isFirstUser = checkFirstTimeUser();
|
||||
if (
|
||||
isFirstUser &&
|
||||
!isOnboardingComplete &&
|
||||
// if the current route is allowed to be overriden by org onboarding then only do the same
|
||||
!ROUTES_NOT_TO_BE_OVERRIDEN.includes(pathname)
|
||||
) {
|
||||
const isFirstTimeUser = checkFirstTimeUser();
|
||||
|
||||
if (isFirstTimeUser && !isOnboardingComplete) {
|
||||
history.push(ROUTES.ONBOARDING);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
checkFirstTimeUser,
|
||||
isCloudUserVal,
|
||||
isFetchingOrgPreferences,
|
||||
isFetchingOrgUsers,
|
||||
orgPreferences,
|
||||
orgUsers,
|
||||
pathname,
|
||||
]);
|
||||
|
||||
if (!isCloudUserVal && key === 'ONBOARDING') {
|
||||
history.push(ROUTES.APPLICATION);
|
||||
}
|
||||
};
|
||||
|
||||
const handleUserLoginIfTokenPresent = async (
|
||||
key: keyof typeof ROUTES,
|
||||
): Promise<void> => {
|
||||
if (localStorageUserAuthToken?.refreshJwt) {
|
||||
// localstorage token is present
|
||||
|
||||
// renew web access token
|
||||
const response = await loginApi({
|
||||
refreshToken: localStorageUserAuthToken?.refreshJwt,
|
||||
});
|
||||
|
||||
if (response.statusCode === 200) {
|
||||
const route = routePermission[key];
|
||||
|
||||
// get all resource and put it over redux
|
||||
const userResponse = await afterLogin(
|
||||
response.payload.userId,
|
||||
response.payload.accessJwt,
|
||||
response.payload.refreshJwt,
|
||||
);
|
||||
|
||||
handleRedirectForOrgOnboarding(key);
|
||||
|
||||
if (
|
||||
userResponse &&
|
||||
route &&
|
||||
route.find((e) => e === userResponse.payload.role) === undefined
|
||||
) {
|
||||
history.push(ROUTES.UN_AUTHORIZED);
|
||||
}
|
||||
} else {
|
||||
Logout();
|
||||
|
||||
notifications.error({
|
||||
message: response.error || t('something_went_wrong'),
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handlePrivateRoutes = async (
|
||||
key: keyof typeof ROUTES,
|
||||
): Promise<void> => {
|
||||
if (
|
||||
localStorageUserAuthToken &&
|
||||
localStorageUserAuthToken.refreshJwt &&
|
||||
isUserFetching
|
||||
) {
|
||||
handleUserLoginIfTokenPresent(key);
|
||||
} else {
|
||||
handleRedirectForOrgOnboarding(key);
|
||||
|
||||
navigateToLoginIfNotLoggedIn(isLocalStorageLoggedIn);
|
||||
}
|
||||
};
|
||||
|
||||
const navigateToWorkSpaceBlocked = (route: any): void => {
|
||||
const { path } = route;
|
||||
|
||||
const isRouteEnabledForWorkspaceBlockedState =
|
||||
isAdmin &&
|
||||
(path === ROUTES.ORG_SETTINGS ||
|
||||
path === ROUTES.BILLING ||
|
||||
path === ROUTES.MY_SETTINGS);
|
||||
|
||||
if (
|
||||
path &&
|
||||
path !== ROUTES.WORKSPACE_LOCKED &&
|
||||
!isRouteEnabledForWorkspaceBlockedState
|
||||
) {
|
||||
if (path && path !== ROUTES.WORKSPACE_LOCKED) {
|
||||
history.push(ROUTES.WORKSPACE_LOCKED);
|
||||
|
||||
dispatch({
|
||||
type: UPDATE_USER_IS_FETCH,
|
||||
payload: {
|
||||
isUserFetching: false,
|
||||
},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingLicenses) {
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
const shouldBlockWorkspace = licenses?.workSpaceBlock;
|
||||
if (!isFetchingLicensesData) {
|
||||
const shouldBlockWorkspace = licensesData?.payload?.workSpaceBlock;
|
||||
|
||||
if (shouldBlockWorkspace && currentRoute) {
|
||||
if (shouldBlockWorkspace) {
|
||||
navigateToWorkSpaceBlocked(currentRoute);
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isFetchingLicenses, licenses?.workSpaceBlock, mapRoutes, pathname]);
|
||||
|
||||
const navigateToWorkSpaceSuspended = (route: any): void => {
|
||||
const { path } = route;
|
||||
|
||||
if (path && path !== ROUTES.WORKSPACE_SUSPENDED) {
|
||||
history.push(ROUTES.WORKSPACE_SUSPENDED);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
const shouldSuspendWorkspace =
|
||||
activeLicenseV3.status === LicenseStatus.SUSPENDED &&
|
||||
activeLicenseV3.state === LicenseState.PAYMENT_FAILED;
|
||||
|
||||
if (shouldSuspendWorkspace && currentRoute) {
|
||||
navigateToWorkSpaceSuspended(currentRoute);
|
||||
}
|
||||
}
|
||||
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
|
||||
}, [isFetchingLicensesData]);
|
||||
|
||||
useEffect(() => {
|
||||
if (org && org.length > 0 && org[0].id !== undefined) {
|
||||
@@ -170,70 +255,103 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
}
|
||||
}, [org]);
|
||||
|
||||
const handleRouting = (): void => {
|
||||
const showOrgOnboarding = shouldShowOnboarding();
|
||||
|
||||
if (showOrgOnboarding && !isOnboardingComplete && isCloudUserVal) {
|
||||
history.push(ROUTES.ONBOARDING);
|
||||
} else {
|
||||
history.push(ROUTES.APPLICATION);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const { isPrivate } = currentRoute || {
|
||||
isPrivate: false,
|
||||
};
|
||||
|
||||
if (isLoggedInState && role && role !== 'ADMIN') {
|
||||
setIsLoading(false);
|
||||
}
|
||||
|
||||
if (!isPrivate) {
|
||||
setIsLoading(false);
|
||||
}
|
||||
|
||||
if (
|
||||
!isEmpty(user) &&
|
||||
!isFetchingOrgPreferences &&
|
||||
!isEmpty(orgUsers?.payload) &&
|
||||
!isNull(orgPreferences)
|
||||
) {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [currentRoute, user, role, orgUsers, orgPreferences]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
// if it is an old route navigate to the new route
|
||||
if (isOldRoute) {
|
||||
const redirectUrl = oldNewRoutesMapping[pathname];
|
||||
(async (): Promise<void> => {
|
||||
try {
|
||||
if (isOldRoute) {
|
||||
const redirectUrl = oldNewRoutesMapping[pathname];
|
||||
|
||||
const newLocation = {
|
||||
...location,
|
||||
pathname: redirectUrl,
|
||||
};
|
||||
history.replace(newLocation);
|
||||
return;
|
||||
}
|
||||
// if the current route
|
||||
if (currentRoute) {
|
||||
const { isPrivate, key } = currentRoute;
|
||||
if (isPrivate) {
|
||||
if (isLoggedInState) {
|
||||
const route = routePermission[key];
|
||||
if (route && route.find((e) => e === user.role) === undefined) {
|
||||
history.push(ROUTES.UN_AUTHORIZED);
|
||||
const newLocation = {
|
||||
...location,
|
||||
pathname: redirectUrl,
|
||||
};
|
||||
history.replace(newLocation);
|
||||
}
|
||||
|
||||
if (currentRoute) {
|
||||
const { isPrivate, key } = currentRoute;
|
||||
|
||||
if (isPrivate && key !== String(ROUTES.WORKSPACE_LOCKED)) {
|
||||
handlePrivateRoutes(key);
|
||||
} else {
|
||||
// no need to fetch the user and make user fetching false
|
||||
if (getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true') {
|
||||
handleRouting();
|
||||
}
|
||||
dispatch({
|
||||
type: UPDATE_USER_IS_FETCH,
|
||||
payload: {
|
||||
isUserFetching: false,
|
||||
},
|
||||
});
|
||||
}
|
||||
} else if (pathname === ROUTES.HOME_PAGE) {
|
||||
// routing to application page over root page
|
||||
if (isLoggedInState) {
|
||||
handleRouting();
|
||||
} else {
|
||||
navigateToLoginIfNotLoggedIn();
|
||||
}
|
||||
} else {
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
|
||||
history.push(ROUTES.LOGIN);
|
||||
// not found
|
||||
navigateToLoginIfNotLoggedIn(isLocalStorageLoggedIn);
|
||||
}
|
||||
} else if (isLoggedInState) {
|
||||
const fromPathname = getLocalStorageApi(
|
||||
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
|
||||
);
|
||||
if (fromPathname) {
|
||||
history.push(fromPathname);
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
|
||||
} else if (pathname !== ROUTES.SOMETHING_WENT_WRONG) {
|
||||
history.push(ROUTES.APPLICATION);
|
||||
}
|
||||
} else {
|
||||
// do nothing as the unauthenticated routes are LOGIN and SIGNUP and the LOGIN container takes care of routing to signup if
|
||||
// setup is not completed
|
||||
} catch (error) {
|
||||
// something went wrong
|
||||
history.push(ROUTES.SOMETHING_WENT_WRONG);
|
||||
}
|
||||
} else if (isLoggedInState) {
|
||||
const fromPathname = getLocalStorageApi(
|
||||
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
|
||||
);
|
||||
if (fromPathname) {
|
||||
history.push(fromPathname);
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
|
||||
} else {
|
||||
history.push(ROUTES.APPLICATION);
|
||||
}
|
||||
} else {
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
|
||||
history.push(ROUTES.LOGIN);
|
||||
}
|
||||
})();
|
||||
}, [
|
||||
licenses,
|
||||
dispatch,
|
||||
isLoggedInState,
|
||||
pathname,
|
||||
user,
|
||||
isOldRoute,
|
||||
currentRoute,
|
||||
location,
|
||||
licensesData,
|
||||
orgUsers,
|
||||
orgPreferences,
|
||||
]);
|
||||
|
||||
if (isUserFetchingError) {
|
||||
return <Redirect to={ROUTES.SOMETHING_WENT_WRONG} />;
|
||||
}
|
||||
|
||||
if (isUserFetching || isLoading) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
|
||||
// NOTE: disabling this rule as there is no need to have div
|
||||
// eslint-disable-next-line react/jsx-no-useless-fragment
|
||||
return <>{children}</>;
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { ConfigProvider } from 'antd';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import getAllOrgPreferences from 'api/preferences/getAllOrgPreferences';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
@@ -9,21 +11,34 @@ import ROUTES from 'constants/routes';
|
||||
import AppLayout from 'container/AppLayout';
|
||||
import useAnalytics from 'hooks/analytics/useAnalytics';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import useFeatureFlags from 'hooks/useFeatureFlag';
|
||||
import useGetFeatureFlag from 'hooks/useGetFeatureFlag';
|
||||
import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import history from 'lib/history';
|
||||
import { identity, pickBy } from 'lodash-es';
|
||||
import { identity, pick, pickBy } from 'lodash-es';
|
||||
import posthog from 'posthog-js';
|
||||
import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { IUser } from 'providers/App/types';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Suspense, useEffect, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { Dispatch } from 'redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import {
|
||||
UPDATE_FEATURE_FLAG_RESPONSE,
|
||||
UPDATE_IS_FETCHING_ORG_PREFERENCES,
|
||||
UPDATE_ORG_PREFERENCES,
|
||||
} from 'types/actions/app';
|
||||
import AppReducer, { User } from 'types/reducer/app';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { extractDomain, isCloudUser, isEECloudUser } from 'utils/app';
|
||||
|
||||
import PrivateRoute from './Private';
|
||||
@@ -35,20 +50,14 @@ import defaultRoutes, {
|
||||
|
||||
function App(): JSX.Element {
|
||||
const themeConfig = useThemeConfig();
|
||||
const {
|
||||
licenses,
|
||||
user,
|
||||
isFetchingUser,
|
||||
isFetchingLicenses,
|
||||
isFetchingFeatureFlags,
|
||||
userFetchError,
|
||||
licensesFetchError,
|
||||
featureFlagsFetchError,
|
||||
isLoggedIn: isLoggedInState,
|
||||
featureFlags,
|
||||
org,
|
||||
} = useAppContext();
|
||||
const { data: licenseData } = useLicense();
|
||||
const [routes, setRoutes] = useState<AppRoutes[]>(defaultRoutes);
|
||||
const { role, isLoggedIn: isLoggedInState, user, org } = useSelector<
|
||||
AppState,
|
||||
AppReducer
|
||||
>((state) => state.app);
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const { trackPageView } = useAnalytics();
|
||||
|
||||
@@ -56,114 +65,164 @@ function App(): JSX.Element {
|
||||
|
||||
const isCloudUserVal = isCloudUser();
|
||||
|
||||
const enableAnalytics = useCallback(
|
||||
(user: IUser): void => {
|
||||
// wait for the required data to be loaded before doing init for anything!
|
||||
if (!isFetchingLicenses && licenses && org) {
|
||||
const orgName =
|
||||
org && Array.isArray(org) && org.length > 0 ? org[0].name : '';
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const { name, email, role } = user;
|
||||
const isChatSupportEnabled =
|
||||
useFeatureFlags(FeatureKeys.CHAT_SUPPORT)?.active || false;
|
||||
|
||||
const identifyPayload = {
|
||||
email,
|
||||
name,
|
||||
company_name: orgName,
|
||||
role,
|
||||
source: 'signoz-ui',
|
||||
};
|
||||
const isPremiumSupportEnabled =
|
||||
useFeatureFlags(FeatureKeys.PREMIUM_SUPPORT)?.active || false;
|
||||
|
||||
const sanitizedIdentifyPayload = pickBy(identifyPayload, identity);
|
||||
const domain = extractDomain(email);
|
||||
const hostNameParts = hostname.split('.');
|
||||
const { data: orgPreferences, isLoading: isLoadingOrgPreferences } = useQuery({
|
||||
queryFn: () => getAllOrgPreferences(),
|
||||
queryKey: ['getOrgPreferences'],
|
||||
enabled: isLoggedInState && role === USER_ROLES.ADMIN,
|
||||
});
|
||||
|
||||
const groupTraits = {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
};
|
||||
|
||||
window.analytics.identify(email, sanitizedIdentifyPayload);
|
||||
window.analytics.group(domain, groupTraits);
|
||||
|
||||
posthog?.identify(email, {
|
||||
email,
|
||||
name,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!licenses?.trialConvertedToSubscription,
|
||||
});
|
||||
|
||||
posthog?.group('company', domain, {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!licenses?.trialConvertedToSubscription,
|
||||
});
|
||||
}
|
||||
},
|
||||
[hostname, isFetchingLicenses, licenses, org],
|
||||
);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
if (
|
||||
!isFetchingLicenses &&
|
||||
licenses &&
|
||||
!isFetchingUser &&
|
||||
user &&
|
||||
!!user.email
|
||||
) {
|
||||
const isOnBasicPlan =
|
||||
licenses.licenses?.some(
|
||||
(license) =>
|
||||
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
|
||||
) || licenses.licenses === null;
|
||||
if (orgPreferences && !isLoadingOrgPreferences) {
|
||||
dispatch({
|
||||
type: UPDATE_IS_FETCHING_ORG_PREFERENCES,
|
||||
payload: {
|
||||
isFetchingOrgPreferences: false,
|
||||
},
|
||||
});
|
||||
|
||||
const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER);
|
||||
|
||||
if (isLoggedInState && user && user.id && user.email && !isIdentifiedUser) {
|
||||
setLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER, 'true');
|
||||
}
|
||||
|
||||
let updatedRoutes = defaultRoutes;
|
||||
// if the user is a cloud user
|
||||
if (isCloudUserVal || isEECloudUser()) {
|
||||
// if the user is on basic plan then remove billing
|
||||
if (isOnBasicPlan) {
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
(route) => route?.path !== ROUTES.BILLING,
|
||||
);
|
||||
}
|
||||
// always add support route for cloud users
|
||||
updatedRoutes = [...updatedRoutes, SUPPORT_ROUTE];
|
||||
} else {
|
||||
// if not a cloud user then remove billing and add list licenses route
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
(route) => route?.path !== ROUTES.BILLING,
|
||||
);
|
||||
updatedRoutes = [...updatedRoutes, LIST_LICENSES];
|
||||
}
|
||||
setRoutes(updatedRoutes);
|
||||
dispatch({
|
||||
type: UPDATE_ORG_PREFERENCES,
|
||||
payload: {
|
||||
orgPreferences: orgPreferences.payload?.data || null,
|
||||
},
|
||||
});
|
||||
}
|
||||
}, [
|
||||
isLoggedInState,
|
||||
user,
|
||||
licenses,
|
||||
isCloudUserVal,
|
||||
isFetchingLicenses,
|
||||
isFetchingUser,
|
||||
]);
|
||||
}, [orgPreferences, dispatch, isLoadingOrgPreferences]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isLoggedInState && role !== USER_ROLES.ADMIN) {
|
||||
dispatch({
|
||||
type: UPDATE_IS_FETCHING_ORG_PREFERENCES,
|
||||
payload: {
|
||||
isFetchingOrgPreferences: false,
|
||||
},
|
||||
});
|
||||
}
|
||||
}, [isLoggedInState, role, dispatch]);
|
||||
|
||||
const featureResponse = useGetFeatureFlag((allFlags) => {
|
||||
dispatch({
|
||||
type: UPDATE_FEATURE_FLAG_RESPONSE,
|
||||
payload: {
|
||||
featureFlag: allFlags,
|
||||
refetch: featureResponse.refetch,
|
||||
},
|
||||
});
|
||||
|
||||
const isOnboardingEnabled =
|
||||
allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active ||
|
||||
false;
|
||||
|
||||
if (!isOnboardingEnabled || !isCloudUserVal) {
|
||||
const newRoutes = routes.filter(
|
||||
(route) => route?.path !== ROUTES.GET_STARTED,
|
||||
);
|
||||
|
||||
setRoutes(newRoutes);
|
||||
}
|
||||
});
|
||||
|
||||
const isOnBasicPlan =
|
||||
licenseData?.payload?.licenses?.some(
|
||||
(license) =>
|
||||
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
|
||||
) || licenseData?.payload?.licenses === null;
|
||||
|
||||
const enableAnalytics = (user: User): void => {
|
||||
const orgName =
|
||||
org && Array.isArray(org) && org.length > 0 ? org[0].name : '';
|
||||
|
||||
const { name, email } = user;
|
||||
|
||||
const identifyPayload = {
|
||||
email,
|
||||
name,
|
||||
company_name: orgName,
|
||||
role,
|
||||
source: 'signoz-ui',
|
||||
};
|
||||
|
||||
const sanitizedIdentifyPayload = pickBy(identifyPayload, identity);
|
||||
const domain = extractDomain(email);
|
||||
const hostNameParts = hostname.split('.');
|
||||
|
||||
const groupTraits = {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
};
|
||||
|
||||
window.analytics.identify(email, sanitizedIdentifyPayload);
|
||||
window.analytics.group(domain, groupTraits);
|
||||
|
||||
posthog?.identify(email, {
|
||||
email,
|
||||
name,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
|
||||
});
|
||||
|
||||
posthog?.group('company', domain, {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
|
||||
});
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER);
|
||||
|
||||
if (
|
||||
isLoggedInState &&
|
||||
user &&
|
||||
user.userId &&
|
||||
user.email &&
|
||||
!isIdentifiedUser
|
||||
) {
|
||||
setLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER, 'true');
|
||||
}
|
||||
|
||||
if (
|
||||
isOnBasicPlan ||
|
||||
(isLoggedInState && role && role !== 'ADMIN') ||
|
||||
!(isCloudUserVal || isEECloudUser())
|
||||
) {
|
||||
const newRoutes = routes.filter((route) => route?.path !== ROUTES.BILLING);
|
||||
setRoutes(newRoutes);
|
||||
}
|
||||
|
||||
if (isCloudUserVal || isEECloudUser()) {
|
||||
const newRoutes = [...routes, SUPPORT_ROUTE];
|
||||
|
||||
setRoutes(newRoutes);
|
||||
} else {
|
||||
const newRoutes = [...routes, LIST_LICENSES];
|
||||
|
||||
setRoutes(newRoutes);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isLoggedInState, isOnBasicPlan, user]);
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname === ROUTES.ONBOARDING) {
|
||||
@@ -177,85 +236,59 @@ function App(): JSX.Element {
|
||||
}
|
||||
|
||||
trackPageView(pathname);
|
||||
}, [pathname, trackPageView]);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
// feature flag shouldn't be loading and featureFlags or fetchError any one of this should be true indicating that req is complete
|
||||
// licenses should also be present. there is no check for licenses for loading and error as that is mandatory if not present then routing
|
||||
// to something went wrong which would ideally need a reload.
|
||||
if (
|
||||
!isFetchingFeatureFlags &&
|
||||
(featureFlags || featureFlagsFetchError) &&
|
||||
licenses
|
||||
) {
|
||||
let isChatSupportEnabled = false;
|
||||
let isPremiumSupportEnabled = false;
|
||||
if (featureFlags && featureFlags.length > 0) {
|
||||
isChatSupportEnabled =
|
||||
featureFlags.find((flag) => flag.name === FeatureKeys.CHAT_SUPPORT)
|
||||
?.active || false;
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled &&
|
||||
!licenseData?.payload?.trialConvertedToSubscription;
|
||||
|
||||
isPremiumSupportEnabled =
|
||||
featureFlags.find((flag) => flag.name === FeatureKeys.PREMIUM_SUPPORT)
|
||||
?.active || false;
|
||||
}
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled && !licenses.trialConvertedToSubscription;
|
||||
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
name: user?.name || '',
|
||||
});
|
||||
}
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
name: user?.name || '',
|
||||
});
|
||||
}
|
||||
}, [
|
||||
isLoggedInState,
|
||||
isChatSupportEnabled,
|
||||
user,
|
||||
licenseData,
|
||||
isPremiumSupportEnabled,
|
||||
pathname,
|
||||
licenses?.trialConvertedToSubscription,
|
||||
featureFlags,
|
||||
isFetchingFeatureFlags,
|
||||
featureFlagsFetchError,
|
||||
licenses,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingUser && isCloudUserVal && user && user.email) {
|
||||
if (user && user?.email && user?.userId && user?.name) {
|
||||
try {
|
||||
const isThemeAnalyticsSent = getLocalStorageApi(
|
||||
LOCALSTORAGE.THEME_ANALYTICS_V1,
|
||||
);
|
||||
if (!isThemeAnalyticsSent) {
|
||||
logEvent('Theme Analytics', {
|
||||
theme: isDarkMode ? THEME_MODE.DARK : THEME_MODE.LIGHT,
|
||||
user: pick(user, ['email', 'userId', 'name']),
|
||||
org,
|
||||
});
|
||||
setLocalStorageApi(LOCALSTORAGE.THEME_ANALYTICS_V1, 'true');
|
||||
}
|
||||
} catch {
|
||||
console.error('Failed to parse local storage theme analytics event');
|
||||
}
|
||||
}
|
||||
|
||||
if (isCloudUserVal && user && user.email) {
|
||||
enableAnalytics(user);
|
||||
}
|
||||
}, [user, isFetchingUser, isCloudUserVal, enableAnalytics]);
|
||||
|
||||
// if the user is in logged in state
|
||||
if (isLoggedInState) {
|
||||
if (pathname === ROUTES.HOME_PAGE) {
|
||||
history.replace(ROUTES.APPLICATION);
|
||||
}
|
||||
// if the setup calls are loading then return a spinner
|
||||
if (isFetchingLicenses || isFetchingUser || isFetchingFeatureFlags) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [user]);
|
||||
|
||||
// if the required calls fails then return a something went wrong error
|
||||
// this needs to be on top of data missing error because if there is an error, data will never be loaded and it will
|
||||
// move to indefinitive loading
|
||||
if (
|
||||
(userFetchError || licensesFetchError) &&
|
||||
pathname !== ROUTES.SOMETHING_WENT_WRONG
|
||||
) {
|
||||
history.replace(ROUTES.SOMETHING_WENT_WRONG);
|
||||
}
|
||||
|
||||
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
|
||||
if (
|
||||
(!licenses || !user.email || !featureFlags) &&
|
||||
!userFetchError &&
|
||||
!licensesFetchError
|
||||
) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
}
|
||||
useEffect(() => {
|
||||
console.info('We are hiring! https://jobs.gem.com/signoz');
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
|
||||
@@ -145,11 +145,6 @@ export const MySettings = Loadable(
|
||||
() => import(/* webpackChunkName: "All MySettings" */ 'pages/MySettings'),
|
||||
);
|
||||
|
||||
export const CustomDomainSettings = Loadable(
|
||||
() =>
|
||||
import(/* webpackChunkName: "Custom Domain Settings" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const Logs = Loadable(
|
||||
() => import(/* webpackChunkName: "Logs" */ 'pages/LogsModulePage'),
|
||||
);
|
||||
@@ -185,7 +180,7 @@ export const PasswordReset = Loadable(
|
||||
export const SomethingWentWrong = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "ErrorBoundaryFallback" */ 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'
|
||||
/* webpackChunkName: "SomethingWentWrong" */ 'pages/SomethingWentWrong'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -211,13 +206,6 @@ export const WorkspaceBlocked = Loadable(
|
||||
import(/* webpackChunkName: "WorkspaceLocked" */ 'pages/WorkspaceLocked'),
|
||||
);
|
||||
|
||||
export const WorkspaceSuspended = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "WorkspaceSuspended" */ 'pages/WorkspaceSuspended/WorkspaceSuspended'
|
||||
),
|
||||
);
|
||||
|
||||
export const ShortcutsPage = Loadable(
|
||||
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'),
|
||||
);
|
||||
@@ -240,10 +228,3 @@ export const MQDetailPage = Loadable(
|
||||
/* webpackChunkName: "MQDetailPage" */ 'pages/MessagingQueues/MQDetailPage'
|
||||
),
|
||||
);
|
||||
|
||||
export const InfrastructureMonitoring = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "InfrastructureMonitoring" */ 'pages/InfrastructureMonitoring'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -10,13 +10,11 @@ import {
|
||||
BillingPage,
|
||||
CreateAlertChannelAlerts,
|
||||
CreateNewAlerts,
|
||||
CustomDomainSettings,
|
||||
DashboardPage,
|
||||
DashboardWidget,
|
||||
EditAlertChannelsAlerts,
|
||||
EditRulesPage,
|
||||
ErrorDetails,
|
||||
InfrastructureMonitoring,
|
||||
IngestionSettings,
|
||||
InstalledIntegrations,
|
||||
LicensePage,
|
||||
@@ -54,7 +52,6 @@ import {
|
||||
UnAuthorized,
|
||||
UsageExplorerPage,
|
||||
WorkspaceBlocked,
|
||||
WorkspaceSuspended,
|
||||
} from './pageComponents';
|
||||
|
||||
const routes: AppRoutes[] = [
|
||||
@@ -289,13 +286,6 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'MY_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.CUSTOM_DOMAIN_SETTINGS,
|
||||
exact: true,
|
||||
component: CustomDomainSettings,
|
||||
isPrivate: true,
|
||||
key: 'CUSTOM_DOMAIN_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.LOGS,
|
||||
exact: true,
|
||||
@@ -373,13 +363,6 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'WORKSPACE_LOCKED',
|
||||
},
|
||||
{
|
||||
path: ROUTES.WORKSPACE_SUSPENDED,
|
||||
exact: true,
|
||||
component: WorkspaceSuspended,
|
||||
isPrivate: true,
|
||||
key: 'WORKSPACE_SUSPENDED',
|
||||
},
|
||||
{
|
||||
path: ROUTES.SHORTCUTS,
|
||||
exact: true,
|
||||
@@ -408,20 +391,6 @@ const routes: AppRoutes[] = [
|
||||
key: 'MESSAGING_QUEUES_DETAIL',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.INFRASTRUCTURE_MONITORING_HOSTS,
|
||||
exact: true,
|
||||
component: InfrastructureMonitoring,
|
||||
key: 'INFRASTRUCTURE_MONITORING_HOSTS',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.INFRASTRUCTURE_MONITORING_KUBERNETES,
|
||||
exact: true,
|
||||
component: InfrastructureMonitoring,
|
||||
key: 'INFRASTRUCTURE_MONITORING_KUBERNETES',
|
||||
isPrivate: true,
|
||||
},
|
||||
];
|
||||
|
||||
export const SUPPORT_ROUTE: AppRoutes = {
|
||||
@@ -442,27 +411,24 @@ export const LIST_LICENSES: AppRoutes = {
|
||||
|
||||
export const oldRoutes = [
|
||||
'/pipelines',
|
||||
'/logs/old-logs-explorer',
|
||||
'/logs-explorer',
|
||||
'/logs-explorer/live',
|
||||
'/logs-save-views',
|
||||
'/traces-save-views',
|
||||
'/settings/access-tokens',
|
||||
'/settings/api-keys',
|
||||
];
|
||||
|
||||
export const oldNewRoutesMapping: Record<string, string> = {
|
||||
'/pipelines': '/logs/pipelines',
|
||||
'/logs/old-logs-explorer': '/logs/old-logs-explorer',
|
||||
'/logs-explorer': '/logs/logs-explorer',
|
||||
'/logs-explorer/live': '/logs/logs-explorer/live',
|
||||
'/logs-save-views': '/logs/saved-views',
|
||||
'/traces-save-views': '/traces/saved-views',
|
||||
'/settings/access-tokens': '/settings/api-keys',
|
||||
'/settings/api-keys': '/settings/access-tokens',
|
||||
};
|
||||
|
||||
export const ROUTES_NOT_TO_BE_OVERRIDEN: string[] = [
|
||||
ROUTES.WORKSPACE_LOCKED,
|
||||
ROUTES.WORKSPACE_SUSPENDED,
|
||||
];
|
||||
|
||||
export interface AppRoutes {
|
||||
component: RouteProps['component'];
|
||||
path: RouteProps['path'];
|
||||
|
||||
@@ -1,28 +1,92 @@
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import getUserApi from 'api/user/getUser';
|
||||
import { Logout } from 'api/utils';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import store from 'store';
|
||||
import AppActions from 'types/actions';
|
||||
import {
|
||||
LOGGED_IN,
|
||||
UPDATE_USER,
|
||||
UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN,
|
||||
UPDATE_USER_IS_FETCH,
|
||||
} from 'types/actions/app';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { PayloadProps } from 'types/api/user/getUser';
|
||||
|
||||
const afterLogin = (
|
||||
const afterLogin = async (
|
||||
userId: string,
|
||||
authToken: string,
|
||||
refreshToken: string,
|
||||
interceptorRejected?: boolean,
|
||||
): void => {
|
||||
): Promise<SuccessResponse<PayloadProps> | undefined> => {
|
||||
setLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN, authToken);
|
||||
setLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN, refreshToken);
|
||||
setLocalStorageApi(LOCALSTORAGE.USER_ID, userId);
|
||||
setLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN, 'true');
|
||||
|
||||
if (!interceptorRejected) {
|
||||
window.dispatchEvent(
|
||||
new CustomEvent('AFTER_LOGIN', {
|
||||
detail: {
|
||||
accessJWT: authToken,
|
||||
refreshJWT: refreshToken,
|
||||
id: userId,
|
||||
},
|
||||
}),
|
||||
);
|
||||
store.dispatch<AppActions>({
|
||||
type: UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN,
|
||||
payload: {
|
||||
accessJwt: authToken,
|
||||
refreshJwt: refreshToken,
|
||||
},
|
||||
});
|
||||
|
||||
const [getUserResponse] = await Promise.all([
|
||||
getUserApi({
|
||||
userId,
|
||||
token: authToken,
|
||||
}),
|
||||
]);
|
||||
|
||||
if (getUserResponse.statusCode === 200 && getUserResponse.payload) {
|
||||
store.dispatch<AppActions>({
|
||||
type: LOGGED_IN,
|
||||
payload: {
|
||||
isLoggedIn: true,
|
||||
},
|
||||
});
|
||||
|
||||
const { payload } = getUserResponse;
|
||||
|
||||
store.dispatch<AppActions>({
|
||||
type: UPDATE_USER,
|
||||
payload: {
|
||||
ROLE: payload.role,
|
||||
email: payload.email,
|
||||
name: payload.name,
|
||||
orgName: payload.organization,
|
||||
profilePictureURL: payload.profilePictureURL,
|
||||
userId: payload.id,
|
||||
orgId: payload.orgId,
|
||||
userFlags: payload.flags,
|
||||
},
|
||||
});
|
||||
|
||||
const isLoggedInLocalStorage = getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN);
|
||||
|
||||
if (isLoggedInLocalStorage === null) {
|
||||
setLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN, 'true');
|
||||
}
|
||||
|
||||
store.dispatch({
|
||||
type: UPDATE_USER_IS_FETCH,
|
||||
payload: {
|
||||
isUserFetching: false,
|
||||
},
|
||||
});
|
||||
|
||||
return getUserResponse;
|
||||
}
|
||||
|
||||
store.dispatch({
|
||||
type: UPDATE_USER_IS_FETCH,
|
||||
payload: {
|
||||
isUserFetching: false,
|
||||
},
|
||||
});
|
||||
|
||||
Logout();
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export default afterLogin;
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { DeploymentsDataProps } from 'types/api/customDomain/types';
|
||||
|
||||
export const getDeploymentsData = (): Promise<
|
||||
AxiosResponse<DeploymentsDataProps>
|
||||
> => axios.get(`/deployments/me`);
|
||||
@@ -1,16 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import {
|
||||
PayloadProps,
|
||||
UpdateCustomDomainProps,
|
||||
} from 'types/api/customDomain/types';
|
||||
|
||||
const updateSubDomainAPI = async (
|
||||
props: UpdateCustomDomainProps,
|
||||
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
|
||||
axios.put(`/deployments/me/host`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
export default updateSubDomainAPI;
|
||||
@@ -7,6 +7,7 @@ import afterLogin from 'AppRoutes/utils';
|
||||
import axios, { AxiosResponse, InternalAxiosRequestConfig } from 'axios';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import store from 'store';
|
||||
|
||||
import apiV1, {
|
||||
apiAlertManager,
|
||||
@@ -25,7 +26,10 @@ const interceptorsResponse = (
|
||||
const interceptorsRequestResponse = (
|
||||
value: InternalAxiosRequestConfig,
|
||||
): InternalAxiosRequestConfig => {
|
||||
const token = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) || '';
|
||||
const token =
|
||||
store.getState().app.user?.accessJwt ||
|
||||
getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) ||
|
||||
'';
|
||||
|
||||
if (value && value.headers) {
|
||||
value.headers.Authorization = token ? `Bearer ${token}` : '';
|
||||
@@ -43,36 +47,41 @@ const interceptorRejected = async (
|
||||
// reject the refresh token error
|
||||
if (response.status === 401 && response.config.url !== '/login') {
|
||||
const response = await loginApi({
|
||||
refreshToken: getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN) || '',
|
||||
refreshToken: store.getState().app.user?.refreshJwt,
|
||||
});
|
||||
|
||||
if (response.statusCode === 200) {
|
||||
afterLogin(
|
||||
const user = await afterLogin(
|
||||
response.payload.userId,
|
||||
response.payload.accessJwt,
|
||||
response.payload.refreshJwt,
|
||||
true,
|
||||
);
|
||||
|
||||
const reResponse = await axios(
|
||||
`${value.config.baseURL}${value.config.url?.substring(1)}`,
|
||||
{
|
||||
method: value.config.method,
|
||||
headers: {
|
||||
...value.config.headers,
|
||||
Authorization: `Bearer ${response.payload.accessJwt}`,
|
||||
if (user) {
|
||||
const reResponse = await axios(
|
||||
`${value.config.baseURL}${value.config.url?.substring(1)}`,
|
||||
{
|
||||
method: value.config.method,
|
||||
headers: {
|
||||
...value.config.headers,
|
||||
Authorization: `Bearer ${response.payload.accessJwt}`,
|
||||
},
|
||||
data: {
|
||||
...JSON.parse(value.config.data || '{}'),
|
||||
},
|
||||
},
|
||||
data: {
|
||||
...JSON.parse(value.config.data || '{}'),
|
||||
},
|
||||
},
|
||||
);
|
||||
);
|
||||
|
||||
if (reResponse.status === 200) {
|
||||
return await Promise.resolve(reResponse);
|
||||
if (reResponse.status === 200) {
|
||||
return await Promise.resolve(reResponse);
|
||||
}
|
||||
Logout();
|
||||
|
||||
return await Promise.reject(reResponse);
|
||||
}
|
||||
Logout();
|
||||
return await Promise.reject(reResponse);
|
||||
|
||||
return await Promise.reject(value);
|
||||
}
|
||||
Logout();
|
||||
}
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
|
||||
import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
|
||||
import { createIdFromObjectFields } from 'lib/createIdFromObjectFields';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
IQueryAutocompleteResponse,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
export const getHostAttributeKeys = async (
|
||||
searchText = '',
|
||||
entity: K8sCategory,
|
||||
): Promise<SuccessResponse<IQueryAutocompleteResponse> | ErrorResponse> => {
|
||||
try {
|
||||
const response: AxiosResponse<{
|
||||
data: IQueryAutocompleteResponse;
|
||||
}> = await ApiBaseInstance.get(
|
||||
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
|
||||
{
|
||||
params: {
|
||||
limit: 500,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const payload: BaseAutocompleteData[] =
|
||||
response.data.data.attributeKeys?.map(({ id: _, ...item }) => ({
|
||||
...item,
|
||||
id: createIdFromObjectFields(item, baseAutoCompleteIdKeysOrder),
|
||||
})) || [];
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.statusText,
|
||||
payload: { attributeKeys: payload },
|
||||
};
|
||||
} catch (e) {
|
||||
return ErrorResponseHandler(e as AxiosError);
|
||||
}
|
||||
};
|
||||
@@ -1,77 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
export interface HostListPayload {
|
||||
filters: TagFilter;
|
||||
groupBy: BaseAutocompleteData[];
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
orderBy?: {
|
||||
columnName: string;
|
||||
order: 'asc' | 'desc';
|
||||
};
|
||||
}
|
||||
|
||||
export interface TimeSeriesValue {
|
||||
timestamp: number;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface TimeSeries {
|
||||
labels: Record<string, string>;
|
||||
labelsArray: Array<Record<string, string>>;
|
||||
values: TimeSeriesValue[];
|
||||
}
|
||||
|
||||
export interface HostData {
|
||||
hostName: string;
|
||||
active: boolean;
|
||||
os: string;
|
||||
cpu: number;
|
||||
cpuTimeSeries: TimeSeries;
|
||||
memory: number;
|
||||
memoryTimeSeries: TimeSeries;
|
||||
wait: number;
|
||||
waitTimeSeries: TimeSeries;
|
||||
load15: number;
|
||||
load15TimeSeries: TimeSeries;
|
||||
}
|
||||
|
||||
export interface HostListResponse {
|
||||
status: string;
|
||||
data: {
|
||||
type: string;
|
||||
records: HostData[];
|
||||
groups: null;
|
||||
total: number;
|
||||
sentAnyHostMetricsData: boolean;
|
||||
isSendingK8SAgentMetrics: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export const getHostLists = async (
|
||||
props: HostListPayload,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponse<HostListResponse> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/hosts/list', props, {
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data,
|
||||
params: props,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
@@ -1,40 +0,0 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
IAttributeValuesResponse,
|
||||
IGetAttributeValuesPayload,
|
||||
} from 'types/api/queryBuilder/getAttributesValues';
|
||||
|
||||
export const getInfraAttributesValues = async ({
|
||||
dataSource,
|
||||
attributeKey,
|
||||
filterAttributeKeyDataType,
|
||||
tagType,
|
||||
searchText,
|
||||
aggregateAttribute,
|
||||
}: IGetAttributeValuesPayload): Promise<
|
||||
SuccessResponse<IAttributeValuesResponse> | ErrorResponse
|
||||
> => {
|
||||
try {
|
||||
const response = await ApiBaseInstance.get(
|
||||
`/hosts/attribute_values?${createQueryParams({
|
||||
dataSource,
|
||||
attributeKey,
|
||||
searchText,
|
||||
aggregateAttribute,
|
||||
})}&filterAttributeKeyDataType=${filterAttributeKeyDataType}&tagType=${tagType}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
@@ -1,65 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
export interface K8sNodesListPayload {
|
||||
filters: TagFilter;
|
||||
groupBy?: BaseAutocompleteData[];
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
orderBy?: {
|
||||
columnName: string;
|
||||
order: 'asc' | 'desc';
|
||||
};
|
||||
}
|
||||
|
||||
export interface K8sNodesData {
|
||||
nodeUID: string;
|
||||
nodeCPUUsage: number;
|
||||
nodeCPUAllocatable: number;
|
||||
nodeMemoryUsage: number;
|
||||
nodeMemoryAllocatable: number;
|
||||
meta: {
|
||||
k8s_node_name: string;
|
||||
k8s_node_uid: string;
|
||||
k8s_cluster_name: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface K8sNodesListResponse {
|
||||
status: string;
|
||||
data: {
|
||||
type: string;
|
||||
records: K8sNodesData[];
|
||||
groups: null;
|
||||
total: number;
|
||||
sentAnyHostMetricsData: boolean;
|
||||
isSendingK8SAgentMetrics: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export const getK8sNodesList = async (
|
||||
props: K8sNodesListPayload,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponse<K8sNodesListResponse> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/nodes/list', props, {
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data,
|
||||
params: props,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
@@ -1,93 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
export interface K8sPodsListPayload {
|
||||
filters: TagFilter;
|
||||
groupBy?: BaseAutocompleteData[];
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
orderBy?: {
|
||||
columnName: string;
|
||||
order: 'asc' | 'desc';
|
||||
};
|
||||
}
|
||||
|
||||
export interface TimeSeriesValue {
|
||||
timestamp: number;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface TimeSeries {
|
||||
labels: Record<string, string>;
|
||||
labelsArray: Array<Record<string, string>>;
|
||||
values: TimeSeriesValue[];
|
||||
}
|
||||
|
||||
export interface K8sPodsData {
|
||||
podUID: string;
|
||||
podCPU: number;
|
||||
podCPURequest: number;
|
||||
podCPULimit: number;
|
||||
podMemory: number;
|
||||
podMemoryRequest: number;
|
||||
podMemoryLimit: number;
|
||||
restartCount: number;
|
||||
meta: {
|
||||
k8s_cronjob_name: string;
|
||||
k8s_daemonset_name: string;
|
||||
k8s_deployment_name: string;
|
||||
k8s_job_name: string;
|
||||
k8s_namespace_name: string;
|
||||
k8s_node_name: string;
|
||||
k8s_pod_name: string;
|
||||
k8s_pod_uid: string;
|
||||
k8s_statefulset_name: string;
|
||||
k8s_cluster_name: string;
|
||||
};
|
||||
countByPhase: {
|
||||
pending: number;
|
||||
running: number;
|
||||
succeeded: number;
|
||||
failed: number;
|
||||
unknown: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface K8sPodsListResponse {
|
||||
status: string;
|
||||
data: {
|
||||
type: string;
|
||||
records: K8sPodsData[];
|
||||
groups: null;
|
||||
total: number;
|
||||
sentAnyHostMetricsData: boolean;
|
||||
isSendingK8SAgentMetrics: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export const getK8sPodsList = async (
|
||||
props: K8sPodsListPayload,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponse<K8sPodsListResponse> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/pods/list', props, {
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data,
|
||||
params: props,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
@@ -1,18 +1,24 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps } from 'types/api/licenses/getAll';
|
||||
|
||||
const getAll = async (): Promise<
|
||||
SuccessResponse<PayloadProps> | ErrorResponse
|
||||
> => {
|
||||
const response = await axios.get('/licenses');
|
||||
try {
|
||||
const response = await axios.get('/licenses');
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default getAll;
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
import { ApiV3Instance as axios } from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { LicenseV3EventQueueResModel } from 'types/api/licensesV3/getActive';
|
||||
|
||||
const getActive = async (): Promise<
|
||||
SuccessResponse<LicenseV3EventQueueResModel> | ErrorResponse
|
||||
> => {
|
||||
const response = await axios.get('/licenses/active');
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
export default getActive;
|
||||
@@ -5,6 +5,7 @@ import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
|
||||
import { createIdFromObjectFields } from 'lib/createIdFromObjectFields';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
// ** Types
|
||||
import { IGetAttributeKeysPayload } from 'types/api/queryBuilder/getAttributeKeys';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
|
||||
@@ -1,18 +1,28 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/user/getUser';
|
||||
|
||||
const getUser = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
const response = await axios.get(`/user/${props.userId}`);
|
||||
try {
|
||||
const response = await axios.get(`/user/${props.userId}`, {
|
||||
headers: {
|
||||
Authorization: `bearer ${props.token}`,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data,
|
||||
};
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default getUser;
|
||||
|
||||
@@ -2,6 +2,14 @@ import deleteLocalStorageKey from 'api/browser/localstorage/remove';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import history from 'lib/history';
|
||||
import store from 'store';
|
||||
import {
|
||||
LOGGED_IN,
|
||||
UPDATE_ORG,
|
||||
UPDATE_USER,
|
||||
UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN,
|
||||
UPDATE_USER_ORG_ROLE,
|
||||
} from 'types/actions/app';
|
||||
|
||||
export const Logout = (): void => {
|
||||
deleteLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN);
|
||||
@@ -11,9 +19,50 @@ export const Logout = (): void => {
|
||||
deleteLocalStorageKey(LOCALSTORAGE.LOGGED_IN_USER_EMAIL);
|
||||
deleteLocalStorageKey(LOCALSTORAGE.LOGGED_IN_USER_NAME);
|
||||
deleteLocalStorageKey(LOCALSTORAGE.CHAT_SUPPORT);
|
||||
deleteLocalStorageKey(LOCALSTORAGE.USER_ID);
|
||||
|
||||
window.dispatchEvent(new CustomEvent('LOGOUT'));
|
||||
store.dispatch({
|
||||
type: LOGGED_IN,
|
||||
payload: {
|
||||
isLoggedIn: false,
|
||||
},
|
||||
});
|
||||
|
||||
store.dispatch({
|
||||
type: UPDATE_USER_ORG_ROLE,
|
||||
payload: {
|
||||
org: null,
|
||||
role: null,
|
||||
},
|
||||
});
|
||||
|
||||
store.dispatch({
|
||||
type: UPDATE_USER,
|
||||
payload: {
|
||||
ROLE: 'VIEWER',
|
||||
email: '',
|
||||
name: '',
|
||||
orgId: '',
|
||||
orgName: '',
|
||||
profilePictureURL: '',
|
||||
userId: '',
|
||||
userFlags: {},
|
||||
},
|
||||
});
|
||||
|
||||
store.dispatch({
|
||||
type: UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN,
|
||||
payload: {
|
||||
accessJwt: '',
|
||||
refreshJwt: '',
|
||||
},
|
||||
});
|
||||
|
||||
store.dispatch({
|
||||
type: UPDATE_ORG,
|
||||
payload: {
|
||||
org: [],
|
||||
},
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
|
||||
@@ -2,9 +2,9 @@ import { Button, Modal, Typography } from 'antd';
|
||||
import updateCreditCardApi from 'api/billing/checkout';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import useLicense from 'hooks/useLicense';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { CreditCard, X } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
@@ -20,16 +20,16 @@ export default function ChatSupportGateway(): JSX.Element {
|
||||
false,
|
||||
);
|
||||
|
||||
const { licenses, isFetchingLicenses } = useAppContext();
|
||||
const { data: licenseData, isFetching } = useLicense();
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingLicenses && licenses) {
|
||||
const activeValidLicense =
|
||||
licenses.licenses?.find((license) => license.isCurrent === true) || null;
|
||||
const activeValidLicense =
|
||||
licenseData?.payload?.licenses?.find(
|
||||
(license) => license.isCurrent === true,
|
||||
) || null;
|
||||
|
||||
setActiveLicense(activeValidLicense);
|
||||
}
|
||||
}, [licenses, isFetchingLicenses]);
|
||||
setActiveLicense(activeValidLicense);
|
||||
}, [licenseData, isFetching]);
|
||||
|
||||
const handleBillingOnSuccess = (
|
||||
data: ErrorResponse | SuccessResponse<CheckoutSuccessPayloadProps, unknown>,
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
|
||||
&.custom-time {
|
||||
input:not(:focus) {
|
||||
min-width: 280px;
|
||||
min-width: 240px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
.date-time-popover__footer {
|
||||
.date-time-popover-footer {
|
||||
border-top: 1px solid var(--bg-ink-200);
|
||||
padding: 8px 14px;
|
||||
.timezone-container {
|
||||
@@ -136,14 +136,10 @@
|
||||
color: var(--bg-vanilla-400);
|
||||
gap: 6px;
|
||||
.timezone {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
border-radius: 2px;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
cursor: pointer;
|
||||
padding: 0px 4px;
|
||||
padding: 0;
|
||||
color: var(--bg-vanilla-100);
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
}
|
||||
}
|
||||
@@ -162,26 +158,3 @@
|
||||
letter-spacing: -0.06px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.date-time-popover__footer {
|
||||
border-color: var(--bg-vanilla-400);
|
||||
}
|
||||
.timezone-container {
|
||||
color: var(--bg-ink-400);
|
||||
&__clock-icon {
|
||||
stroke: var(--bg-ink-400);
|
||||
}
|
||||
.timezone {
|
||||
color: var(--bg-ink-100);
|
||||
background: rgb(179 179 179 / 15%);
|
||||
&__icon {
|
||||
stroke: var(--bg-ink-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
.timezone-badge {
|
||||
color: var(--bg-ink-100);
|
||||
background: rgb(179 179 179 / 15%);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import './CustomTimePicker.styles.scss';
|
||||
|
||||
import { Input, Popover, Tooltip, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
||||
import {
|
||||
@@ -90,9 +89,9 @@ function CustomTimePicker({
|
||||
const [activeView, setActiveView] = useState<ViewType>(DEFAULT_VIEW);
|
||||
|
||||
const { timezone, browserTimezone } = useTimezone();
|
||||
const activeTimezoneOffset = timezone.offset;
|
||||
const activeTimezoneOffset = timezone?.offset;
|
||||
const isTimezoneOverridden = useMemo(
|
||||
() => timezone.offset !== browserTimezone.offset,
|
||||
() => timezone?.offset !== browserTimezone.offset,
|
||||
[timezone, browserTimezone],
|
||||
);
|
||||
|
||||
@@ -101,28 +100,16 @@ function CustomTimePicker({
|
||||
if (activeView !== newView) {
|
||||
setActiveView(newView);
|
||||
}
|
||||
setOpen(true);
|
||||
setOpen(!open);
|
||||
},
|
||||
[activeView, setOpen],
|
||||
[activeView, open, setOpen],
|
||||
);
|
||||
|
||||
const [isOpenedFromFooter, setIsOpenedFromFooter] = useState(false);
|
||||
|
||||
const getSelectedTimeRangeLabel = (
|
||||
selectedTime: string,
|
||||
selectedTimeValue: string,
|
||||
): string => {
|
||||
if (selectedTime === 'custom') {
|
||||
// Convert the date range string to 12-hour format
|
||||
const dates = selectedTimeValue.split(' - ');
|
||||
if (dates.length === 2) {
|
||||
const startDate = dayjs(dates[0], 'DD/MM/YYYY HH:mm');
|
||||
const endDate = dayjs(dates[1], 'DD/MM/YYYY HH:mm');
|
||||
|
||||
return `${startDate.format('DD/MM/YYYY hh:mm A')} - ${endDate.format(
|
||||
'DD/MM/YYYY hh:mm A',
|
||||
)}`;
|
||||
}
|
||||
return selectedTimeValue;
|
||||
}
|
||||
|
||||
@@ -157,6 +144,7 @@ function CustomTimePicker({
|
||||
|
||||
useEffect(() => {
|
||||
const value = getSelectedTimeRangeLabel(selectedTime, selectedValue);
|
||||
|
||||
setSelectedTimePlaceholderValue(value);
|
||||
}, [selectedTime, selectedValue]);
|
||||
|
||||
@@ -282,7 +270,6 @@ function CustomTimePicker({
|
||||
|
||||
const handleFocus = (): void => {
|
||||
setIsInputFocused(true);
|
||||
setActiveView('datetime');
|
||||
};
|
||||
|
||||
const handleBlur = (): void => {
|
||||
@@ -298,18 +285,6 @@ function CustomTimePicker({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [location.pathname]);
|
||||
|
||||
const handleTimezoneHintClick = (e: React.MouseEvent): void => {
|
||||
e.stopPropagation();
|
||||
handleViewChange('timezone');
|
||||
setIsOpenedFromFooter(false);
|
||||
logEvent(
|
||||
'DateTimePicker: Timezone picker opened from time range input badge',
|
||||
{
|
||||
page: location.pathname,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="custom-time-picker">
|
||||
<Popover
|
||||
@@ -333,8 +308,6 @@ function CustomTimePicker({
|
||||
selectedTime={selectedTime}
|
||||
activeView={activeView}
|
||||
setActiveView={setActiveView}
|
||||
setIsOpenedFromFooter={setIsOpenedFromFooter}
|
||||
isOpenedFromFooter={isOpenedFromFooter}
|
||||
/>
|
||||
) : (
|
||||
content
|
||||
@@ -373,7 +346,13 @@ function CustomTimePicker({
|
||||
suffix={
|
||||
<>
|
||||
{!!isTimezoneOverridden && activeTimezoneOffset && (
|
||||
<div className="timezone-badge" onClick={handleTimezoneHintClick}>
|
||||
<div
|
||||
className="timezone-badge"
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
handleViewChange('timezone');
|
||||
}}
|
||||
>
|
||||
<span>{activeTimezoneOffset}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -2,7 +2,6 @@ import './CustomTimePicker.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
||||
@@ -11,7 +10,7 @@ import {
|
||||
Option,
|
||||
RelativeDurationSuggestionOptions,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { Clock, PenLine } from 'lucide-react';
|
||||
import { Clock } from 'lucide-react';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { Dispatch, SetStateAction, useMemo } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
@@ -33,8 +32,6 @@ interface CustomTimePickerPopoverContentProps {
|
||||
selectedTime: string;
|
||||
activeView: 'datetime' | 'timezone';
|
||||
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
|
||||
isOpenedFromFooter: boolean;
|
||||
setIsOpenedFromFooter: Dispatch<SetStateAction<boolean>>;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
@@ -49,8 +46,6 @@ function CustomTimePickerPopoverContent({
|
||||
selectedTime,
|
||||
activeView,
|
||||
setActiveView,
|
||||
isOpenedFromFooter,
|
||||
setIsOpenedFromFooter,
|
||||
}: CustomTimePickerPopoverContentProps): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
|
||||
@@ -58,7 +53,7 @@ function CustomTimePickerPopoverContent({
|
||||
pathname,
|
||||
]);
|
||||
const { timezone } = useTimezone();
|
||||
const activeTimezoneOffset = timezone.offset;
|
||||
const activeTimezoneOffset = timezone?.offset;
|
||||
|
||||
function getTimeChips(options: Option[]): JSX.Element {
|
||||
return (
|
||||
@@ -79,31 +74,8 @@ function CustomTimePickerPopoverContent({
|
||||
);
|
||||
}
|
||||
|
||||
const handleTimezoneHintClick = (): void => {
|
||||
setActiveView('timezone');
|
||||
setIsOpenedFromFooter(true);
|
||||
logEvent(
|
||||
'DateTimePicker: Timezone picker opened from time range picker footer',
|
||||
{
|
||||
page: pathname,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
if (activeView === 'timezone') {
|
||||
return (
|
||||
<div className="date-time-popover">
|
||||
<TimezonePicker
|
||||
setActiveView={setActiveView}
|
||||
setIsOpen={setIsOpen}
|
||||
isOpenedFromFooter={isOpenedFromFooter}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
return activeView === 'datetime' ? (
|
||||
<div>
|
||||
<div className="date-time-popover">
|
||||
<div className="date-time-options">
|
||||
{isLogsExplorerPage && (
|
||||
@@ -153,31 +125,24 @@ function CustomTimePickerPopoverContent({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="date-time-popover__footer">
|
||||
<div className="date-time-popover-footer">
|
||||
<div className="timezone-container">
|
||||
<Clock
|
||||
color={Color.BG_VANILLA_400}
|
||||
className="timezone-container__clock-icon"
|
||||
height={12}
|
||||
width={12}
|
||||
/>
|
||||
<span className="timezone__icon">Current timezone</span>
|
||||
<div>⎯</div>
|
||||
<Clock color={Color.BG_VANILLA_400} height={12} width={12} />
|
||||
<span className="timezone-text">You are at</span>
|
||||
<button
|
||||
type="button"
|
||||
className="timezone"
|
||||
onClick={handleTimezoneHintClick}
|
||||
onClick={(): void => setActiveView('timezone')}
|
||||
>
|
||||
<span>{activeTimezoneOffset}</span>
|
||||
<PenLine
|
||||
color={Color.BG_VANILLA_100}
|
||||
className="timezone__icon"
|
||||
size={10}
|
||||
/>
|
||||
{activeTimezoneOffset}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
</div>
|
||||
) : (
|
||||
<div className="date-time-popover">
|
||||
<TimezonePicker setActiveView={setActiveView} setIsOpen={setIsOpen} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
||||
import { LexicalContext } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import dayjs, { Dayjs } from 'dayjs';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { Dispatch, SetStateAction, useMemo } from 'react';
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
@@ -32,10 +32,7 @@ function RangePickerModal(props: RangePickerModalProps): JSX.Element {
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
// Using any type here because antd's DatePicker expects its own internal Dayjs type
|
||||
// which conflicts with our project's Dayjs type that has additional plugins (tz, utc etc).
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/explicit-module-boundary-types
|
||||
const disabledDate = (current: any): boolean => {
|
||||
const disabledDate = (current: Dayjs): boolean => {
|
||||
const currentDay = dayjs(current);
|
||||
return currentDay.isAfter(dayjs());
|
||||
};
|
||||
@@ -53,32 +50,21 @@ function RangePickerModal(props: RangePickerModalProps): JSX.Element {
|
||||
}
|
||||
onCustomDateHandler(date_time, LexicalContext.CUSTOM_DATE_PICKER);
|
||||
};
|
||||
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
const rangeValue: [Dayjs, Dayjs] = useMemo(
|
||||
() => [
|
||||
dayjs(minTime / 1000_000).tz(timezone.value),
|
||||
dayjs(maxTime / 1000_000).tz(timezone.value),
|
||||
],
|
||||
[maxTime, minTime, timezone.value],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="custom-date-picker">
|
||||
<RangePicker
|
||||
disabledDate={disabledDate}
|
||||
allowClear
|
||||
showTime={{
|
||||
use12Hours: true,
|
||||
format: 'hh:mm A',
|
||||
}}
|
||||
format={(date: Dayjs): string =>
|
||||
date.tz(timezone.value).format('YYYY-MM-DD hh:mm A')
|
||||
}
|
||||
showTime
|
||||
onOk={onModalOkHandler}
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...(selectedTime === 'custom' && {
|
||||
value: rangeValue,
|
||||
defaultValue: [
|
||||
dayjs(minTime / 1000000).tz(timezone.value),
|
||||
dayjs(maxTime / 1000000).tz(timezone.value),
|
||||
],
|
||||
})}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
// Variables
|
||||
$font-family: 'Inter';
|
||||
$border-color: var(--bg-slate-400);
|
||||
$item-spacing: 8px;
|
||||
|
||||
:root {
|
||||
--border-color: var(--bg-slate-400);
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
--border-color: var(--bg-vanilla-400);
|
||||
}
|
||||
|
||||
// Mixins
|
||||
@mixin text-style-base {
|
||||
font-family: $font-family;
|
||||
@@ -31,7 +24,7 @@ $item-spacing: 8px;
|
||||
@include flex-center;
|
||||
justify-content: space-between;
|
||||
padding: 12px 14px;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
border-bottom: 1px solid $border-color;
|
||||
}
|
||||
|
||||
&__input-container {
|
||||
@@ -51,9 +44,6 @@ $item-spacing: 8px;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
padding: 0;
|
||||
&.ant-input:focus {
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
&::placeholder {
|
||||
color: var(--bg-vanilla-400);
|
||||
@@ -104,7 +94,7 @@ $item-spacing: 8px;
|
||||
bottom: -2px;
|
||||
left: -$item-spacing;
|
||||
right: -$item-spacing;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
border-bottom: 1px solid $border-color;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -133,34 +123,3 @@ $item-spacing: 8px;
|
||||
width: 15px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.timezone-picker {
|
||||
&__search {
|
||||
.search-icon {
|
||||
stroke: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
&__input {
|
||||
color: var(--bg-ink-100);
|
||||
}
|
||||
&__esc-key {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
border-color: var(--bg-vanilla-400);
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
&__item {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
&__offset {
|
||||
color: var(--bg-ink-100);
|
||||
}
|
||||
}
|
||||
.timezone-name-wrapper {
|
||||
&__selected-icon {
|
||||
.check-icon {
|
||||
stroke: var(--bg-ink-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import './TimezonePicker.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Input } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import { TimezonePickerShortcuts } from 'constants/shortcuts/TimezonePickerShortcuts';
|
||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
@@ -21,9 +19,6 @@ import { Timezone, TIMEZONE_DATA } from './timezoneUtils';
|
||||
interface SearchBarProps {
|
||||
value: string;
|
||||
onChange: (value: string) => void;
|
||||
setIsOpen: Dispatch<SetStateAction<boolean>>;
|
||||
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
|
||||
isOpenedFromFooter: boolean;
|
||||
}
|
||||
|
||||
interface TimezoneItemProps {
|
||||
@@ -34,44 +29,17 @@ interface TimezoneItemProps {
|
||||
|
||||
const ICON_SIZE = 14;
|
||||
|
||||
function SearchBar({
|
||||
value,
|
||||
onChange,
|
||||
setIsOpen,
|
||||
setActiveView,
|
||||
isOpenedFromFooter = false,
|
||||
}: SearchBarProps): JSX.Element {
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent): void => {
|
||||
if (e.key === 'Escape') {
|
||||
if (isOpenedFromFooter) {
|
||||
setActiveView('datetime');
|
||||
} else {
|
||||
setIsOpen(false);
|
||||
}
|
||||
}
|
||||
},
|
||||
[setActiveView, setIsOpen, isOpenedFromFooter],
|
||||
);
|
||||
|
||||
function SearchBar({ value, onChange }: SearchBarProps): JSX.Element {
|
||||
return (
|
||||
<div className="timezone-picker__search">
|
||||
<div className="timezone-picker__input-container">
|
||||
<Search
|
||||
color={Color.BG_VANILLA_400}
|
||||
className="search-icon"
|
||||
height={ICON_SIZE}
|
||||
width={ICON_SIZE}
|
||||
/>
|
||||
<Input
|
||||
<Search color={Color.BG_VANILLA_400} height={ICON_SIZE} width={ICON_SIZE} />
|
||||
<input
|
||||
type="text"
|
||||
className="timezone-picker__input"
|
||||
placeholder="Search timezones..."
|
||||
value={value}
|
||||
onChange={(e): void => onChange(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
tabIndex={0}
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
<kbd className="timezone-picker__esc-key">esc</kbd>
|
||||
@@ -97,7 +65,6 @@ function TimezoneItem({
|
||||
<div className="timezone-name-wrapper__selected-icon">
|
||||
{isSelected && (
|
||||
<Check
|
||||
className="check-icon"
|
||||
color={Color.BG_VANILLA_100}
|
||||
height={ICON_SIZE}
|
||||
width={ICON_SIZE}
|
||||
@@ -119,18 +86,16 @@ TimezoneItem.defaultProps = {
|
||||
interface TimezonePickerProps {
|
||||
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
|
||||
setIsOpen: Dispatch<SetStateAction<boolean>>;
|
||||
isOpenedFromFooter: boolean;
|
||||
}
|
||||
|
||||
function TimezonePicker({
|
||||
setActiveView,
|
||||
setIsOpen,
|
||||
isOpenedFromFooter,
|
||||
}: TimezonePickerProps): JSX.Element {
|
||||
const [searchTerm, setSearchTerm] = useState('');
|
||||
const { timezone, updateTimezone } = useTimezone();
|
||||
const [selectedTimezone, setSelectedTimezone] = useState<string>(
|
||||
timezone.name ?? TIMEZONE_DATA[0].name,
|
||||
timezone?.name ?? TIMEZONE_DATA[0].name,
|
||||
);
|
||||
|
||||
const getFilteredTimezones = useCallback((searchTerm: string): Timezone[] => {
|
||||
@@ -144,12 +109,8 @@ function TimezonePicker({
|
||||
}, []);
|
||||
|
||||
const handleCloseTimezonePicker = useCallback(() => {
|
||||
if (isOpenedFromFooter) {
|
||||
setActiveView('datetime');
|
||||
} else {
|
||||
setIsOpen(false);
|
||||
}
|
||||
}, [isOpenedFromFooter, setActiveView, setIsOpen]);
|
||||
setActiveView('datetime');
|
||||
}, [setActiveView]);
|
||||
|
||||
const handleTimezoneSelect = useCallback(
|
||||
(timezone: Timezone) => {
|
||||
@@ -157,12 +118,6 @@ function TimezonePicker({
|
||||
updateTimezone(timezone);
|
||||
handleCloseTimezonePicker();
|
||||
setIsOpen(false);
|
||||
logEvent('DateTimePicker: New Timezone Selected', {
|
||||
timezone: {
|
||||
name: timezone.name,
|
||||
offset: timezone.offset,
|
||||
},
|
||||
});
|
||||
},
|
||||
[handleCloseTimezonePicker, setIsOpen, updateTimezone],
|
||||
);
|
||||
@@ -183,13 +138,7 @@ function TimezonePicker({
|
||||
|
||||
return (
|
||||
<div className="timezone-picker">
|
||||
<SearchBar
|
||||
value={searchTerm}
|
||||
onChange={setSearchTerm}
|
||||
setIsOpen={setIsOpen}
|
||||
setActiveView={setActiveView}
|
||||
isOpenedFromFooter={isOpenedFromFooter}
|
||||
/>
|
||||
<SearchBar value={searchTerm} onChange={setSearchTerm} />
|
||||
<div className="timezone-picker__list">
|
||||
{getFilteredTimezones(searchTerm).map((timezone) => (
|
||||
<TimezoneItem
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { getTimeZones } from '@vvo/tzdb';
|
||||
import dayjs from 'dayjs';
|
||||
import timezone from 'dayjs/plugin/timezone';
|
||||
import utc from 'dayjs/plugin/utc';
|
||||
@@ -13,6 +14,7 @@ export interface Timezone {
|
||||
hasDivider?: boolean;
|
||||
}
|
||||
|
||||
// Constants
|
||||
const TIMEZONE_TYPES = {
|
||||
BROWSER: 'BROWSER',
|
||||
UTC: 'UTC',
|
||||
@@ -21,7 +23,7 @@ const TIMEZONE_TYPES = {
|
||||
|
||||
type TimezoneType = typeof TIMEZONE_TYPES[keyof typeof TIMEZONE_TYPES];
|
||||
|
||||
export const UTC_TIMEZONE: Timezone = {
|
||||
const UTC_TIMEZONE: Timezone = {
|
||||
name: 'Coordinated Universal Time — UTC, GMT',
|
||||
value: 'UTC',
|
||||
offset: 'UTC',
|
||||
@@ -29,12 +31,14 @@ export const UTC_TIMEZONE: Timezone = {
|
||||
hasDivider: true,
|
||||
};
|
||||
|
||||
const normalizeTimezoneName = (timezone: string): string => {
|
||||
// https://github.com/tc39/proposal-temporal/issues/1076
|
||||
if (timezone === 'Asia/Calcutta') {
|
||||
return 'Asia/Kolkata';
|
||||
// Helper functions
|
||||
const isValidTimezone = (tzName: string): boolean => {
|
||||
try {
|
||||
dayjs.tz(dayjs(), tzName);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
return timezone;
|
||||
};
|
||||
|
||||
const formatOffset = (offsetMinutes: number): string => {
|
||||
@@ -92,36 +96,26 @@ const getOffsetByTimezone = (timezone: string): number => {
|
||||
|
||||
export const getBrowserTimezone = (): Timezone => {
|
||||
const browserTz = dayjs.tz.guess();
|
||||
const normalizedTz = normalizeTimezoneName(browserTz);
|
||||
const browserOffset = getOffsetByTimezone(normalizedTz);
|
||||
return createTimezoneEntry(
|
||||
normalizedTz,
|
||||
browserOffset,
|
||||
TIMEZONE_TYPES.BROWSER,
|
||||
);
|
||||
const browserOffset = getOffsetByTimezone(browserTz);
|
||||
return createTimezoneEntry(browserTz, browserOffset, TIMEZONE_TYPES.BROWSER);
|
||||
};
|
||||
|
||||
const filterAndSortTimezones = (
|
||||
allTimezones: string[],
|
||||
allTimezones: ReturnType<typeof getTimeZones>,
|
||||
browserTzName?: string,
|
||||
includeEtcTimezones = false,
|
||||
): Timezone[] =>
|
||||
allTimezones
|
||||
.filter((tz) => {
|
||||
const isNotBrowserTz = tz !== browserTzName;
|
||||
const isNotEtcTz = includeEtcTimezones || !tz.startsWith('Etc/');
|
||||
return isNotBrowserTz && isNotEtcTz;
|
||||
})
|
||||
.sort((a, b) => a.localeCompare(b))
|
||||
.map((tz) => {
|
||||
const normalizedTz = normalizeTimezoneName(tz);
|
||||
const offset = getOffsetByTimezone(normalizedTz);
|
||||
return createTimezoneEntry(normalizedTz, offset);
|
||||
});
|
||||
.filter(
|
||||
(tz) =>
|
||||
!tz.name.startsWith('Etc/') &&
|
||||
isValidTimezone(tz.name) &&
|
||||
tz.name !== browserTzName,
|
||||
)
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((tz) => createTimezoneEntry(tz.name, tz.rawOffsetInMinutes));
|
||||
|
||||
const generateTimezoneData = (includeEtcTimezones = false): Timezone[] => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const allTimezones = (Intl as any).supportedValuesOf('timeZone');
|
||||
const generateTimezoneData = (): Timezone[] => {
|
||||
const allTimezones = getTimeZones();
|
||||
const timezones: Timezone[] = [];
|
||||
|
||||
// Add browser timezone
|
||||
@@ -131,13 +125,8 @@ const generateTimezoneData = (includeEtcTimezones = false): Timezone[] => {
|
||||
// Add UTC timezone with divider
|
||||
timezones.push(UTC_TIMEZONE);
|
||||
|
||||
timezones.push(
|
||||
...filterAndSortTimezones(
|
||||
allTimezones,
|
||||
browserTzObject.value,
|
||||
includeEtcTimezones,
|
||||
),
|
||||
);
|
||||
// Add remaining timezones
|
||||
timezones.push(...filterAndSortTimezones(allTimezones, browserTzObject.value));
|
||||
|
||||
return timezones;
|
||||
};
|
||||
@@ -146,6 +135,7 @@ export const getTimezoneObjectByTimezoneString = (
|
||||
timezone: string,
|
||||
): Timezone => {
|
||||
const utcOffset = getOffsetByTimezone(timezone);
|
||||
|
||||
return createTimezoneEntry(timezone, utcOffset);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,22 +1,15 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import { Table } from 'antd';
|
||||
import { matchMedia } from 'container/PipelinePage/tests/AddNewPipeline.test';
|
||||
import { I18nextProvider } from 'react-i18next';
|
||||
import { Provider } from 'react-redux';
|
||||
import i18n from 'ReactI18';
|
||||
import store from 'store';
|
||||
|
||||
import DraggableTableRow from '..';
|
||||
|
||||
beforeAll(() => {
|
||||
Object.defineProperty(window, 'matchMedia', {
|
||||
writable: true,
|
||||
value: jest.fn().mockImplementation((query) => ({
|
||||
matches: false,
|
||||
media: query,
|
||||
onchange: null,
|
||||
addListener: jest.fn(),
|
||||
removeListener: jest.fn(),
|
||||
addEventListener: jest.fn(),
|
||||
removeEventListener: jest.fn(),
|
||||
dispatchEvent: jest.fn(),
|
||||
})),
|
||||
});
|
||||
matchMedia();
|
||||
});
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
@@ -41,14 +34,18 @@ jest.mock('react-dnd', () => ({
|
||||
describe('DraggableTableRow Snapshot test', () => {
|
||||
it('should render DraggableTableRow', async () => {
|
||||
const { asFragment } = render(
|
||||
<Table
|
||||
components={{
|
||||
body: {
|
||||
row: DraggableTableRow,
|
||||
},
|
||||
}}
|
||||
pagination={false}
|
||||
/>,
|
||||
<Provider store={store}>
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<Table
|
||||
components={{
|
||||
body: {
|
||||
row: DraggableTableRow,
|
||||
},
|
||||
}}
|
||||
pagination={false}
|
||||
/>
|
||||
</I18nextProvider>
|
||||
</Provider>,
|
||||
);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
@@ -99,3 +99,5 @@ exports[`DraggableTableRow Snapshot test should render DraggableTableRow 1`] = `
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`PipelinePage container test should render AddNewPipeline section 1`] = `<DocumentFragment />`;
|
||||
|
||||
@@ -132,7 +132,7 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
|
||||
useEffect(() => {
|
||||
_adapters._date.override({
|
||||
format(time: number | Date, fmt: string) {
|
||||
const dayjsTime = dayjs(time).tz(timezone.value);
|
||||
const dayjsTime = dayjs(time).tz(timezone?.value);
|
||||
const format = formatMap[fmt as keyof typeof formatMap];
|
||||
if (!format) {
|
||||
console.warn(`Missing datetime format for ${fmt}`);
|
||||
|
||||
@@ -99,7 +99,7 @@ export const getGraphOptions = (
|
||||
callbacks: {
|
||||
title(context): string | string[] {
|
||||
const date = dayjs(context[0].parsed.x);
|
||||
return date.tz(timezone.value).format('MMM DD, YYYY, HH:mm:ss');
|
||||
return date.tz(timezone?.value).format('MMM DD, YYYY, HH:mm:ss');
|
||||
},
|
||||
label(context): string | string[] {
|
||||
let label = context.dataset.label || '';
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
.host-containers {
|
||||
gap: 24px;
|
||||
height: 60vh;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 100%;
|
||||
margin: 0 auto;
|
||||
box-sizing: border-box;
|
||||
|
||||
.infra-container-card-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
.dev-status-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.infra-container-card {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.infra-container-card-text {
|
||||
font-size: var(--font-size-sm);
|
||||
color: var(--text-vanilla-400);
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
width: 400px;
|
||||
font-family: 'Inter';
|
||||
margin-top: 12px;
|
||||
font-weight: 300;
|
||||
}
|
||||
|
||||
.infra-container-working-msg {
|
||||
display: flex;
|
||||
width: 400px;
|
||||
padding: 12px;
|
||||
align-items: flex-start;
|
||||
gap: 12px;
|
||||
border-radius: 4px;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
|
||||
.ant-space {
|
||||
align-items: flex-start;
|
||||
}
|
||||
}
|
||||
|
||||
.infra-container-contact-support-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
margin: auto;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.infra-container-card-text {
|
||||
color: var(--text-ink-200);
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
import './Containers.styles.scss';
|
||||
|
||||
import { Space, Typography } from 'antd';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import WaitlistFragment from '../WaitlistFragment/WaitlistFragment';
|
||||
|
||||
const { Text } = Typography;
|
||||
|
||||
function Containers(): JSX.Element {
|
||||
const { t } = useTranslation(['infraMonitoring']);
|
||||
|
||||
return (
|
||||
<Space direction="vertical" className="host-containers" size={24}>
|
||||
<div className="infra-container-card-container">
|
||||
<div className="dev-status-container">
|
||||
<div className="infra-container-card">
|
||||
<img
|
||||
src="/Icons/infraContainers.svg"
|
||||
alt="infra-container"
|
||||
width={32}
|
||||
height={32}
|
||||
/>
|
||||
|
||||
<Text className="infra-container-card-text">
|
||||
{t('containers_visualization_message')}
|
||||
</Text>
|
||||
</div>
|
||||
|
||||
<div className="infra-container-working-msg">
|
||||
<Space>
|
||||
<img src="/Icons/broom.svg" alt="broom" width={24} height={24} />
|
||||
<Text className="infra-container-card-text">{t('working_message')}</Text>
|
||||
</Space>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<WaitlistFragment entityType="containers" />
|
||||
</div>
|
||||
</Space>
|
||||
);
|
||||
}
|
||||
|
||||
export default Containers;
|
||||
@@ -1,7 +0,0 @@
|
||||
import { HostData } from 'api/infraMonitoring/getHostLists';
|
||||
|
||||
export type HostDetailProps = {
|
||||
host: HostData | null;
|
||||
isModalTimeSelection: boolean;
|
||||
onClose: () => void;
|
||||
};
|
||||
@@ -1,193 +0,0 @@
|
||||
.host-metric-traces {
|
||||
margin-top: 1rem;
|
||||
|
||||
.host-metric-traces-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-bottom: 1rem;
|
||||
|
||||
gap: 8px;
|
||||
padding: 12px;
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
|
||||
.filter-section {
|
||||
flex: 1;
|
||||
|
||||
.ant-select-selector {
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400) !important;
|
||||
background-color: var(--bg-ink-300) !important;
|
||||
|
||||
input {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.ant-tag .ant-typography {
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.host-metric-traces-table {
|
||||
.ant-table-content {
|
||||
overflow: hidden !important;
|
||||
}
|
||||
|
||||
.ant-table {
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
|
||||
.ant-table-thead > tr > th {
|
||||
padding: 12px;
|
||||
font-weight: 500;
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
|
||||
background: rgba(171, 189, 255, 0.01);
|
||||
border-bottom: none;
|
||||
|
||||
color: var(--Vanilla-400, #c0c1c3);
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 18px; /* 163.636% */
|
||||
letter-spacing: 0.44px;
|
||||
text-transform: uppercase;
|
||||
|
||||
&::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-thead > tr > th:has(.hostname-column-header) {
|
||||
background: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
padding: 12px;
|
||||
font-size: 13px;
|
||||
line-height: 20px;
|
||||
color: var(--bg-vanilla-100);
|
||||
background: rgba(171, 189, 255, 0.01);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.hostname-column-value) {
|
||||
background: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.hostname-column-value {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: 'Geist Mono';
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.status-cell {
|
||||
.active-tag {
|
||||
color: var(--bg-forest-500);
|
||||
padding: 4px 8px;
|
||||
border-radius: 4px;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
}
|
||||
}
|
||||
|
||||
.progress-container {
|
||||
.ant-progress-bg {
|
||||
height: 8px !important;
|
||||
border-radius: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(255, 255, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-cell:first-child {
|
||||
text-align: justify;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(2) {
|
||||
padding-left: 16px;
|
||||
padding-right: 16px;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(n + 3) {
|
||||
padding-right: 24px;
|
||||
}
|
||||
.column-header-right {
|
||||
text-align: right;
|
||||
}
|
||||
.ant-table-tbody > tr > td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.ant-table-thead
|
||||
> tr
|
||||
> th:not(:last-child):not(.ant-table-selection-column):not(.ant-table-row-expand-icon-cell):not([colspan])::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.ant-empty-normal {
|
||||
visibility: hidden;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-container::after {
|
||||
content: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.host-metric-traces-header {
|
||||
.filter-section {
|
||||
border-top: 1px solid var(--bg-vanilla-300);
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.ant-select-selector {
|
||||
border-color: var(--bg-vanilla-300) !important;
|
||||
background-color: var(--bg-vanilla-100) !important;
|
||||
color: var(--bg-ink-200);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.host-metric-traces-table {
|
||||
.ant-table {
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.ant-table-thead > tr > th {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-thead > tr > th:has(.hostname-column-header) {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.hostname-column-value) {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.hostname-column-value {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(0, 0, 0, 0.04);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
import './HostMetricTraces.styles.scss';
|
||||
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch';
|
||||
import NoLogs from 'container/NoLogs/NoLogs';
|
||||
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
|
||||
import { ErrorText } from 'container/TimeSeriesView/styles';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import TraceExplorerControls from 'container/TracesExplorer/Controls';
|
||||
import { PER_PAGE_OPTIONS } from 'container/TracesExplorer/ListView/configs';
|
||||
import { TracesLoading } from 'container/TracesExplorer/TraceLoading/TraceLoading';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { Pagination } from 'hooks/queryPagination';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { getHostTracesQueryPayload, selectedColumns } from './constants';
|
||||
import { getListColumns } from './utils';
|
||||
|
||||
interface Props {
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
isModalTimeSelection: boolean;
|
||||
handleTimeChange: (
|
||||
interval: Time | CustomTimeType,
|
||||
dateTimeRange?: [number, number],
|
||||
) => void;
|
||||
handleChangeTracesFilters: (value: IBuilderQuery['filters']) => void;
|
||||
tracesFilters: IBuilderQuery['filters'];
|
||||
selectedInterval: Time;
|
||||
}
|
||||
|
||||
function HostMetricTraces({
|
||||
timeRange,
|
||||
isModalTimeSelection,
|
||||
handleTimeChange,
|
||||
handleChangeTracesFilters,
|
||||
tracesFilters,
|
||||
selectedInterval,
|
||||
}: Props): JSX.Element {
|
||||
const [traces, setTraces] = useState<any[]>([]);
|
||||
const [offset] = useState<number>(0);
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
...currentQuery.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[currentQuery],
|
||||
);
|
||||
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
const { queryData: paginationQueryData } = useUrlQueryData<Pagination>(
|
||||
QueryParams.pagination,
|
||||
);
|
||||
|
||||
const queryPayload = useMemo(
|
||||
() =>
|
||||
getHostTracesQueryPayload(
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
paginationQueryData?.offset || offset,
|
||||
tracesFilters,
|
||||
),
|
||||
[
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
offset,
|
||||
tracesFilters,
|
||||
paginationQueryData,
|
||||
],
|
||||
);
|
||||
|
||||
const { data, isLoading, isFetching, isError } = useQuery({
|
||||
queryKey: [
|
||||
'hostMetricTraces',
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
offset,
|
||||
tracesFilters,
|
||||
DEFAULT_ENTITY_VERSION,
|
||||
paginationQueryData,
|
||||
],
|
||||
queryFn: () => GetMetricQueryRange(queryPayload, DEFAULT_ENTITY_VERSION),
|
||||
enabled: !!queryPayload,
|
||||
});
|
||||
|
||||
const traceListColumns = getListColumns(selectedColumns);
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.payload?.data?.newResult?.data?.result) {
|
||||
const currentData = data.payload.data.newResult.data.result;
|
||||
if (currentData.length > 0 && currentData[0].list) {
|
||||
if (offset === 0) {
|
||||
setTraces(currentData[0].list ?? []);
|
||||
} else {
|
||||
setTraces((prev) => [...prev, ...(currentData[0].list ?? [])]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, [data, offset]);
|
||||
|
||||
const isDataEmpty =
|
||||
!isLoading && !isFetching && !isError && traces.length === 0;
|
||||
const hasAdditionalFilters = tracesFilters.items.length > 1;
|
||||
|
||||
const totalCount =
|
||||
data?.payload?.data?.newResult?.data?.result?.[0]?.list?.length || 0;
|
||||
|
||||
return (
|
||||
<div className="host-metric-traces">
|
||||
<div className="host-metric-traces-header">
|
||||
<div className="filter-section">
|
||||
{query && (
|
||||
<QueryBuilderSearch
|
||||
query={query}
|
||||
onChange={handleChangeTracesFilters}
|
||||
disableNavigationShortcuts
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="datetime-section">
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
showRefreshText={false}
|
||||
hideShareModal
|
||||
isModalTimeSelection={isModalTimeSelection}
|
||||
onTimeChange={handleTimeChange}
|
||||
defaultRelativeTime="5m"
|
||||
modalSelectedInterval={selectedInterval}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{isError && <ErrorText>{data?.error || 'Something went wrong'}</ErrorText>}
|
||||
|
||||
{isLoading && traces.length === 0 && <TracesLoading />}
|
||||
|
||||
{isDataEmpty && !hasAdditionalFilters && (
|
||||
<NoLogs dataSource={DataSource.TRACES} />
|
||||
)}
|
||||
|
||||
{isDataEmpty && hasAdditionalFilters && (
|
||||
<EmptyLogsSearch dataSource={DataSource.TRACES} panelType="LIST" />
|
||||
)}
|
||||
|
||||
{!isError && traces.length > 0 && (
|
||||
<div className="host-metric-traces-table">
|
||||
<TraceExplorerControls
|
||||
isLoading={isFetching}
|
||||
totalCount={totalCount}
|
||||
perPageOptions={PER_PAGE_OPTIONS}
|
||||
showSizeChanger={false}
|
||||
/>
|
||||
<ResizeTable
|
||||
tableLayout="fixed"
|
||||
pagination={false}
|
||||
scroll={{ x: true }}
|
||||
loading={isFetching}
|
||||
dataSource={traces}
|
||||
columns={traceListColumns}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default HostMetricTraces;
|
||||
@@ -1,200 +0,0 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { nanoToMilli } from 'utils/timeUtils';
|
||||
|
||||
export const columns = [
|
||||
{
|
||||
dataIndex: 'timestamp',
|
||||
key: 'timestamp',
|
||||
title: 'Timestamp',
|
||||
width: 200,
|
||||
render: (timestamp: string): string => new Date(timestamp).toLocaleString(),
|
||||
},
|
||||
{
|
||||
title: 'Service Name',
|
||||
dataIndex: ['data', 'serviceName'],
|
||||
key: 'serviceName-string-tag',
|
||||
width: 150,
|
||||
},
|
||||
{
|
||||
title: 'Name',
|
||||
dataIndex: ['data', 'name'],
|
||||
key: 'name-string-tag',
|
||||
width: 145,
|
||||
},
|
||||
{
|
||||
title: 'Duration',
|
||||
dataIndex: ['data', 'durationNano'],
|
||||
key: 'durationNano-float64-tag',
|
||||
width: 145,
|
||||
render: (duration: number): string => `${nanoToMilli(duration)}ms`,
|
||||
},
|
||||
{
|
||||
title: 'HTTP Method',
|
||||
dataIndex: ['data', 'httpMethod'],
|
||||
key: 'httpMethod-string-tag',
|
||||
width: 145,
|
||||
},
|
||||
{
|
||||
title: 'Status Code',
|
||||
dataIndex: ['data', 'responseStatusCode'],
|
||||
key: 'responseStatusCode-string-tag',
|
||||
width: 145,
|
||||
},
|
||||
];
|
||||
|
||||
export const selectedColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'timestamp',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
},
|
||||
{
|
||||
key: 'serviceName',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
},
|
||||
{
|
||||
key: 'name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
},
|
||||
{
|
||||
key: 'durationNano',
|
||||
dataType: DataTypes.Float64,
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
},
|
||||
{
|
||||
key: 'httpMethod',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
},
|
||||
{
|
||||
key: 'responseStatusCode',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
export const getHostTracesQueryPayload = (
|
||||
start: number,
|
||||
end: number,
|
||||
offset = 0,
|
||||
filters: IBuilderQuery['filters'],
|
||||
): GetQueryResultsProps => ({
|
||||
query: {
|
||||
promql: [],
|
||||
clickhouse_sql: [],
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: DataSource.TRACES,
|
||||
queryName: 'A',
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
id: '------false',
|
||||
dataType: DataTypes.EMPTY,
|
||||
key: '',
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
},
|
||||
timeAggregation: 'rate',
|
||||
spaceAggregation: 'sum',
|
||||
functions: [],
|
||||
filters,
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
stepInterval: 60,
|
||||
having: [],
|
||||
limit: null,
|
||||
orderBy: [
|
||||
{
|
||||
columnName: 'timestamp',
|
||||
order: 'desc',
|
||||
},
|
||||
],
|
||||
groupBy: [],
|
||||
legend: '',
|
||||
reduceTo: 'avg',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
id: '572f1d91-6ac0-46c0-b726-c21488b34434',
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
start,
|
||||
end,
|
||||
params: {
|
||||
dataSource: DataSource.TRACES,
|
||||
},
|
||||
tableParams: {
|
||||
pagination: {
|
||||
limit: 10,
|
||||
offset,
|
||||
},
|
||||
selectColumns: [
|
||||
{
|
||||
key: 'serviceName',
|
||||
dataType: 'string',
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'serviceName--string--tag--true',
|
||||
isIndexed: false,
|
||||
},
|
||||
{
|
||||
key: 'name',
|
||||
dataType: 'string',
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'name--string--tag--true',
|
||||
isIndexed: false,
|
||||
},
|
||||
{
|
||||
key: 'durationNano',
|
||||
dataType: 'float64',
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'durationNano--float64--tag--true',
|
||||
isIndexed: false,
|
||||
},
|
||||
{
|
||||
key: 'httpMethod',
|
||||
dataType: 'string',
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'httpMethod--string--tag--true',
|
||||
isIndexed: false,
|
||||
},
|
||||
{
|
||||
key: 'responseStatusCode',
|
||||
dataType: 'string',
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'responseStatusCode--string--tag--true',
|
||||
isIndexed: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
@@ -1,84 +0,0 @@
|
||||
import { Tag, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import { getMs } from 'container/Trace/Filters/Panel/PanelBody/Duration/util';
|
||||
import {
|
||||
BlockLink,
|
||||
getTraceLink,
|
||||
} from 'container/TracesExplorer/ListView/utils';
|
||||
import dayjs from 'dayjs';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
const keyToLabelMap: Record<string, string> = {
|
||||
timestamp: 'Timestamp',
|
||||
serviceName: 'Service Name',
|
||||
name: 'Name',
|
||||
durationNano: 'Duration',
|
||||
httpMethod: 'HTTP Method',
|
||||
responseStatusCode: 'Status Code',
|
||||
};
|
||||
|
||||
export const getListColumns = (
|
||||
selectedColumns: BaseAutocompleteData[],
|
||||
): ColumnsType<RowData> => {
|
||||
const columns: ColumnsType<RowData> =
|
||||
selectedColumns.map(({ dataType, key, type }) => ({
|
||||
title: keyToLabelMap[key],
|
||||
dataIndex: key,
|
||||
key: `${key}-${dataType}-${type}`,
|
||||
width: 145,
|
||||
render: (value, item): JSX.Element => {
|
||||
const itemData = item.data as any;
|
||||
|
||||
if (key === 'timestamp') {
|
||||
const date =
|
||||
typeof value === 'string'
|
||||
? dayjs(value).format('YYYY-MM-DD HH:mm:ss.SSS')
|
||||
: dayjs(value / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
|
||||
|
||||
return (
|
||||
<BlockLink to={getTraceLink(item)} openInNewTab>
|
||||
<Typography.Text>{date}</Typography.Text>
|
||||
</BlockLink>
|
||||
);
|
||||
}
|
||||
|
||||
if (value === '') {
|
||||
return (
|
||||
<BlockLink to={getTraceLink(itemData)} openInNewTab>
|
||||
<Typography data-testid={key}>N/A</Typography>
|
||||
</BlockLink>
|
||||
);
|
||||
}
|
||||
|
||||
if (key === 'httpMethod' || key === 'responseStatusCode') {
|
||||
return (
|
||||
<BlockLink to={getTraceLink(itemData)} openInNewTab>
|
||||
<Tag data-testid={key} color="magenta">
|
||||
{itemData[key]}
|
||||
</Tag>
|
||||
</BlockLink>
|
||||
);
|
||||
}
|
||||
|
||||
if (key === 'durationNano') {
|
||||
const durationNano = itemData[key];
|
||||
|
||||
return (
|
||||
<BlockLink to={getTraceLink(item)} openInNewTab>
|
||||
<Typography data-testid={key}>{getMs(durationNano)}ms</Typography>
|
||||
</BlockLink>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<BlockLink to={getTraceLink(itemData)} openInNewTab>
|
||||
<Typography data-testid={key}>{itemData[key]}</Typography>
|
||||
</BlockLink>
|
||||
);
|
||||
},
|
||||
responsive: ['md'],
|
||||
})) || [];
|
||||
|
||||
return columns;
|
||||
};
|
||||
@@ -1,232 +0,0 @@
|
||||
.host-detail-drawer {
|
||||
border-left: 1px solid var(--bg-slate-500);
|
||||
background: var(--bg-ink-400);
|
||||
box-shadow: -4px 10px 16px 2px rgba(0, 0, 0, 0.2);
|
||||
|
||||
.ant-drawer-header {
|
||||
padding: 8px 16px;
|
||||
border-bottom: none;
|
||||
|
||||
align-items: stretch;
|
||||
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
background: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.ant-drawer-close {
|
||||
margin-inline-end: 0px;
|
||||
}
|
||||
|
||||
.ant-drawer-body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
.title {
|
||||
color: var(--text-vanilla-400);
|
||||
font-family: 'Geist Mono';
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.radio-button {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding-top: var(--padding-1);
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.host-detail-drawer__host {
|
||||
.host-details-grid {
|
||||
.labels-row,
|
||||
.values-row {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1.5fr 1.5fr 1.5fr;
|
||||
gap: 30px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.labels-row {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.host-details-metadata-label {
|
||||
color: var(--text-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 18px; /* 163.636% */
|
||||
letter-spacing: 0.44px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.status-tag {
|
||||
margin: 0;
|
||||
|
||||
&.active {
|
||||
color: var(--success-500);
|
||||
background: var(--success-100);
|
||||
border-color: var(--success-500);
|
||||
}
|
||||
|
||||
&.inactive {
|
||||
color: var(--error-500);
|
||||
background: var(--error-100);
|
||||
border-color: var(--error-500);
|
||||
}
|
||||
}
|
||||
|
||||
.progress-container {
|
||||
width: 158px;
|
||||
.ant-progress {
|
||||
margin: 0;
|
||||
|
||||
.ant-progress-text {
|
||||
font-weight: 600;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-card {
|
||||
&.ant-card-bordered {
|
||||
border: 1px solid var(--bg-slate-500) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.tabs-and-search {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin: 16px 0;
|
||||
|
||||
.action-btn {
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
|
||||
.views-tabs-container {
|
||||
margin-top: 1.5rem;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
||||
.views-tabs {
|
||||
color: var(--text-vanilla-400);
|
||||
|
||||
.view-title {
|
||||
display: flex;
|
||||
gap: var(--margin-2);
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: var(--font-size-xs);
|
||||
font-style: normal;
|
||||
font-weight: var(--font-weight-normal);
|
||||
}
|
||||
|
||||
.tab {
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
width: 114px;
|
||||
}
|
||||
|
||||
.tab::before {
|
||||
background: var(--bg-slate-400);
|
||||
}
|
||||
|
||||
.selected_view {
|
||||
background: var(--bg-slate-300);
|
||||
color: var(--text-vanilla-100);
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
}
|
||||
|
||||
.selected_view::before {
|
||||
background: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
|
||||
.compass-button {
|
||||
width: 30px;
|
||||
height: 30px;
|
||||
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
}
|
||||
.ant-drawer-close {
|
||||
padding: 0px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.ant-drawer-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-400);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.host-detail-drawer {
|
||||
.title {
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
|
||||
.host-detail-drawer__host {
|
||||
.ant-typography {
|
||||
color: var(--text-ink-300);
|
||||
background: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.radio-button {
|
||||
border: 1px solid var(--bg-vanilla-400);
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
|
||||
.views-tabs {
|
||||
.tab {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.selected_view {
|
||||
background: var(--bg-vanilla-300);
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
|
||||
.selected_view::before {
|
||||
background: var(--bg-vanilla-300);
|
||||
border-left: 1px solid var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
.compass-button {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.tabs-and-search {
|
||||
.action-btn {
|
||||
border: 1px solid var(--bg-vanilla-400);
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,517 +0,0 @@
|
||||
import './HostMetricsDetail.styles.scss';
|
||||
|
||||
import { Color, Spacing } from '@signozhq/design-tokens';
|
||||
import {
|
||||
Button,
|
||||
Divider,
|
||||
Drawer,
|
||||
Progress,
|
||||
Radio,
|
||||
Tag,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { RadioChangeEvent } from 'antd/lib';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import {
|
||||
initialQueryBuilderFormValuesMap,
|
||||
initialQueryState,
|
||||
} from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import GetMinMax from 'lib/getMinMax';
|
||||
import {
|
||||
BarChart2,
|
||||
ChevronsLeftRight,
|
||||
Compass,
|
||||
DraftingCompass,
|
||||
Package2,
|
||||
ScrollText,
|
||||
X,
|
||||
} from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
LogsAggregatorOperator,
|
||||
TracesAggregatorOperator,
|
||||
} from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
import Containers from './Containers/Containers';
|
||||
import { HostDetailProps } from './HostMetricDetail.interfaces';
|
||||
import HostMetricLogsDetailedView from './HostMetricsLogs/HostMetricLogsDetailedView';
|
||||
import HostMetricTraces from './HostMetricTraces/HostMetricTraces';
|
||||
import Metrics from './Metrics/Metrics';
|
||||
import Processes from './Processes/Processes';
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function HostMetricsDetails({
|
||||
host,
|
||||
onClose,
|
||||
isModalTimeSelection,
|
||||
}: HostDetailProps): JSX.Element {
|
||||
const { maxTime, minTime, selectedTime } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const startMs = useMemo(() => Math.floor(Number(minTime) / 1000000000), [
|
||||
minTime,
|
||||
]);
|
||||
const endMs = useMemo(() => Math.floor(Number(maxTime) / 1000000000), [
|
||||
maxTime,
|
||||
]);
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
|
||||
const [modalTimeRange, setModalTimeRange] = useState(() => ({
|
||||
startTime: startMs,
|
||||
endTime: endMs,
|
||||
}));
|
||||
|
||||
const [selectedInterval, setSelectedInterval] = useState<Time>(
|
||||
selectedTime as Time,
|
||||
);
|
||||
|
||||
const [selectedView, setSelectedView] = useState<VIEWS>(VIEWS.METRICS);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const initialFilters = useMemo(
|
||||
() => ({
|
||||
op: 'AND',
|
||||
items: [
|
||||
{
|
||||
id: uuidv4(),
|
||||
key: {
|
||||
key: 'host.name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
id: 'host.name--string--resource--false',
|
||||
},
|
||||
op: '=',
|
||||
value: host?.hostName || '',
|
||||
},
|
||||
],
|
||||
}),
|
||||
[host?.hostName],
|
||||
);
|
||||
|
||||
const [logFilters, setLogFilters] = useState<IBuilderQuery['filters']>(
|
||||
initialFilters,
|
||||
);
|
||||
|
||||
const [tracesFilters, setTracesFilters] = useState<IBuilderQuery['filters']>(
|
||||
initialFilters,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('Infra Monitoring: Hosts list details page visited', {
|
||||
host: host?.hostName,
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
setLogFilters(initialFilters);
|
||||
setTracesFilters(initialFilters);
|
||||
}, [initialFilters]);
|
||||
|
||||
useEffect(() => {
|
||||
setSelectedInterval(selectedTime as Time);
|
||||
|
||||
if (selectedTime !== 'custom') {
|
||||
const { maxTime, minTime } = GetMinMax(selectedTime);
|
||||
|
||||
setModalTimeRange({
|
||||
startTime: Math.floor(minTime / 1000000000),
|
||||
endTime: Math.floor(maxTime / 1000000000),
|
||||
});
|
||||
}
|
||||
}, [selectedTime, minTime, maxTime]);
|
||||
|
||||
const handleTabChange = (e: RadioChangeEvent): void => {
|
||||
setSelectedView(e.target.value);
|
||||
};
|
||||
|
||||
const handleTimeChange = useCallback(
|
||||
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
|
||||
setSelectedInterval(interval as Time);
|
||||
|
||||
if (interval === 'custom' && dateTimeRange) {
|
||||
setModalTimeRange({
|
||||
startTime: Math.floor(dateTimeRange[0] / 1000),
|
||||
endTime: Math.floor(dateTimeRange[1] / 1000),
|
||||
});
|
||||
} else {
|
||||
const { maxTime, minTime } = GetMinMax(interval);
|
||||
|
||||
setModalTimeRange({
|
||||
startTime: Math.floor(minTime / 1000000000),
|
||||
endTime: Math.floor(maxTime / 1000000000),
|
||||
});
|
||||
}
|
||||
|
||||
logEvent('Infra Monitoring: Hosts list details time updated', {
|
||||
host: host?.hostName,
|
||||
interval,
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[],
|
||||
);
|
||||
|
||||
const handleChangeLogFilters = useCallback(
|
||||
(value: IBuilderQuery['filters']) => {
|
||||
setLogFilters((prevFilters) => {
|
||||
const hostNameFilter = prevFilters.items.find(
|
||||
(item) => item.key?.key === 'host.name',
|
||||
);
|
||||
const paginationFilter = value.items.find((item) => item.key?.key === 'id');
|
||||
const newFilters = value.items.filter(
|
||||
(item) => item.key?.key !== 'id' && item.key?.key !== 'host.name',
|
||||
);
|
||||
|
||||
logEvent('Infra Monitoring: Hosts list details logs filters applied', {
|
||||
host: host?.hostName,
|
||||
});
|
||||
|
||||
return {
|
||||
op: 'AND',
|
||||
items: [
|
||||
hostNameFilter,
|
||||
...newFilters,
|
||||
...(paginationFilter ? [paginationFilter] : []),
|
||||
].filter((item): item is TagFilterItem => item !== undefined),
|
||||
};
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[],
|
||||
);
|
||||
|
||||
const handleChangeTracesFilters = useCallback(
|
||||
(value: IBuilderQuery['filters']) => {
|
||||
setTracesFilters((prevFilters) => {
|
||||
const hostNameFilter = prevFilters.items.find(
|
||||
(item) => item.key?.key === 'host.name',
|
||||
);
|
||||
|
||||
logEvent('Infra Monitoring: Hosts list details traces filters applied', {
|
||||
host: host?.hostName,
|
||||
});
|
||||
|
||||
return {
|
||||
op: 'AND',
|
||||
items: [
|
||||
hostNameFilter,
|
||||
...value.items.filter((item) => item.key?.key !== 'host.name'),
|
||||
].filter((item): item is TagFilterItem => item !== undefined),
|
||||
};
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[],
|
||||
);
|
||||
|
||||
const handleExplorePagesRedirect = (): void => {
|
||||
if (selectedInterval !== 'custom') {
|
||||
urlQuery.set(QueryParams.relativeTime, selectedInterval);
|
||||
} else {
|
||||
urlQuery.delete(QueryParams.relativeTime);
|
||||
urlQuery.set(QueryParams.startTime, modalTimeRange.startTime.toString());
|
||||
urlQuery.set(QueryParams.endTime, modalTimeRange.endTime.toString());
|
||||
}
|
||||
|
||||
logEvent('Infra Monitoring: Hosts list details explore clicked', {
|
||||
host: host?.hostName,
|
||||
view: selectedView,
|
||||
});
|
||||
|
||||
if (selectedView === VIEW_TYPES.LOGS) {
|
||||
const filtersWithoutPagination = {
|
||||
...logFilters,
|
||||
items: logFilters.items.filter((item) => item.key?.key !== 'id'),
|
||||
};
|
||||
|
||||
const compositeQuery = {
|
||||
...initialQueryState,
|
||||
queryType: 'builder',
|
||||
builder: {
|
||||
...initialQueryState.builder,
|
||||
queryData: [
|
||||
{
|
||||
...initialQueryBuilderFormValuesMap.logs,
|
||||
aggregateOperator: LogsAggregatorOperator.NOOP,
|
||||
filters: filtersWithoutPagination,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
urlQuery.set('compositeQuery', JSON.stringify(compositeQuery));
|
||||
|
||||
window.open(
|
||||
`${window.location.origin}${ROUTES.LOGS_EXPLORER}?${urlQuery.toString()}`,
|
||||
'_blank',
|
||||
);
|
||||
} else if (selectedView === VIEW_TYPES.TRACES) {
|
||||
const compositeQuery = {
|
||||
...initialQueryState,
|
||||
queryType: 'builder',
|
||||
builder: {
|
||||
...initialQueryState.builder,
|
||||
queryData: [
|
||||
{
|
||||
...initialQueryBuilderFormValuesMap.traces,
|
||||
aggregateOperator: TracesAggregatorOperator.NOOP,
|
||||
filters: tracesFilters,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
urlQuery.set('compositeQuery', JSON.stringify(compositeQuery));
|
||||
|
||||
window.open(
|
||||
`${window.location.origin}${ROUTES.TRACES_EXPLORER}?${urlQuery.toString()}`,
|
||||
'_blank',
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const handleClose = (): void => {
|
||||
setSelectedInterval(selectedTime as Time);
|
||||
|
||||
if (selectedTime !== 'custom') {
|
||||
const { maxTime, minTime } = GetMinMax(selectedTime);
|
||||
|
||||
setModalTimeRange({
|
||||
startTime: Math.floor(minTime / 1000000000),
|
||||
endTime: Math.floor(maxTime / 1000000000),
|
||||
});
|
||||
}
|
||||
setSelectedView(VIEW_TYPES.METRICS);
|
||||
onClose();
|
||||
};
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="70%"
|
||||
title={
|
||||
<>
|
||||
<Divider type="vertical" />
|
||||
<Typography.Text className="title">{host?.hostName}</Typography.Text>
|
||||
</>
|
||||
}
|
||||
placement="right"
|
||||
onClose={handleClose}
|
||||
open={!!host}
|
||||
style={{
|
||||
overscrollBehavior: 'contain',
|
||||
background: isDarkMode ? Color.BG_INK_400 : Color.BG_VANILLA_100,
|
||||
}}
|
||||
className="host-detail-drawer"
|
||||
destroyOnClose
|
||||
closeIcon={<X size={16} style={{ marginTop: Spacing.MARGIN_1 }} />}
|
||||
>
|
||||
{host && (
|
||||
<>
|
||||
<div className="host-detail-drawer__host">
|
||||
<div className="host-details-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="host-details-metadata-label"
|
||||
>
|
||||
STATUS
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="host-details-metadata-label"
|
||||
>
|
||||
OPERATING SYSTEM
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="host-details-metadata-label"
|
||||
>
|
||||
CPU USAGE
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="host-details-metadata-label"
|
||||
>
|
||||
MEMORY USAGE
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
<div className="values-row">
|
||||
<Tag
|
||||
bordered
|
||||
className={`infra-monitoring-tags ${
|
||||
host.active ? 'active' : 'inactive'
|
||||
}`}
|
||||
>
|
||||
{host.active ? 'ACTIVE' : 'INACTIVE'}
|
||||
</Tag>
|
||||
<Tag className="infra-monitoring-tags" bordered>
|
||||
{host.os}
|
||||
</Tag>
|
||||
<div className="progress-container">
|
||||
<Progress
|
||||
percent={Number((host.cpu * 100).toFixed(1))}
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const cpuPercent = Number((host.cpu * 100).toFixed(1));
|
||||
if (cpuPercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (cpuPercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</div>
|
||||
<div className="progress-container">
|
||||
<Progress
|
||||
percent={Number((host.memory * 100).toFixed(1))}
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const memoryPercent = Number((host.memory * 100).toFixed(1));
|
||||
if (memoryPercent >= 90) return Color.BG_CHERRY_500;
|
||||
if (memoryPercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="views-tabs-container">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
onChange={handleTabChange}
|
||||
value={selectedView}
|
||||
>
|
||||
<Radio.Button
|
||||
className={
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
selectedView === VIEW_TYPES.METRICS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.METRICS}
|
||||
>
|
||||
<div className="view-title">
|
||||
<BarChart2 size={14} />
|
||||
Metrics
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.LOGS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.LOGS}
|
||||
>
|
||||
<div className="view-title">
|
||||
<ScrollText size={14} />
|
||||
Logs
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.TRACES ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.TRACES}
|
||||
>
|
||||
<div className="view-title">
|
||||
<DraftingCompass size={14} />
|
||||
Traces
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.CONTAINERS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.CONTAINERS}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Package2 size={14} />
|
||||
Containers
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.PROCESSES ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.PROCESSES}
|
||||
>
|
||||
<div className="view-title">
|
||||
<ChevronsLeftRight size={14} />
|
||||
Processes
|
||||
</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
|
||||
{(selectedView === VIEW_TYPES.LOGS ||
|
||||
selectedView === VIEW_TYPES.TRACES) && (
|
||||
<Button
|
||||
icon={<Compass size={18} />}
|
||||
className="compass-button"
|
||||
onClick={handleExplorePagesRedirect}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{selectedView === VIEW_TYPES.METRICS && (
|
||||
<Metrics
|
||||
selectedInterval={selectedInterval}
|
||||
hostName={host.hostName}
|
||||
timeRange={modalTimeRange}
|
||||
handleTimeChange={handleTimeChange}
|
||||
isModalTimeSelection={isModalTimeSelection}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.LOGS && (
|
||||
<HostMetricLogsDetailedView
|
||||
timeRange={modalTimeRange}
|
||||
isModalTimeSelection={isModalTimeSelection}
|
||||
handleTimeChange={handleTimeChange}
|
||||
handleChangeLogFilters={handleChangeLogFilters}
|
||||
logFilters={logFilters}
|
||||
selectedInterval={selectedInterval}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.TRACES && (
|
||||
<HostMetricTraces
|
||||
timeRange={modalTimeRange}
|
||||
isModalTimeSelection={isModalTimeSelection}
|
||||
handleTimeChange={handleTimeChange}
|
||||
handleChangeTracesFilters={handleChangeTracesFilters}
|
||||
tracesFilters={tracesFilters}
|
||||
selectedInterval={selectedInterval}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.CONTAINERS && <Containers />}
|
||||
{selectedView === VIEW_TYPES.PROCESSES && <Processes />}
|
||||
</>
|
||||
)}
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
export default HostMetricsDetails;
|
||||
@@ -1,133 +0,0 @@
|
||||
.host-metrics-logs-container {
|
||||
margin-top: 1rem;
|
||||
|
||||
.filter-section {
|
||||
flex: 1;
|
||||
|
||||
.ant-select-selector {
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400) !important;
|
||||
background-color: var(--bg-ink-300) !important;
|
||||
|
||||
input {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.ant-tag .ant-typography {
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.host-metrics-logs-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
gap: 8px;
|
||||
|
||||
padding: 12px;
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
}
|
||||
|
||||
.host-metrics-logs {
|
||||
margin-top: 1rem;
|
||||
|
||||
.virtuoso-list {
|
||||
overflow-y: hidden !important;
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
height: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-300);
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb:hover {
|
||||
background: var(--bg-slate-200);
|
||||
}
|
||||
|
||||
.ant-row {
|
||||
width: fit-content;
|
||||
}
|
||||
}
|
||||
|
||||
.skeleton-container {
|
||||
height: 100%;
|
||||
padding: 16px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.host-metrics-logs-list-container {
|
||||
flex: 1;
|
||||
height: calc(100vh - 272px) !important;
|
||||
display: flex;
|
||||
height: 100%;
|
||||
|
||||
.raw-log-content {
|
||||
width: 100%;
|
||||
text-wrap: inherit;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
}
|
||||
|
||||
.host-metrics-logs-list-card {
|
||||
width: 100%;
|
||||
margin-top: 12px;
|
||||
|
||||
.ant-card-body {
|
||||
padding: 0;
|
||||
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
.logs-loading-skeleton {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
padding: 8px 0;
|
||||
|
||||
.ant-skeleton-input-sm {
|
||||
height: 18px;
|
||||
}
|
||||
}
|
||||
|
||||
.no-logs-found {
|
||||
height: 50vh;
|
||||
width: 100%;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
|
||||
padding: 24px;
|
||||
box-sizing: border-box;
|
||||
|
||||
.ant-typography {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.filter-section {
|
||||
border-top: 1px solid var(--bg-vanilla-300);
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.ant-select-selector {
|
||||
border-color: var(--bg-vanilla-300) !important;
|
||||
background-color: var(--bg-vanilla-100) !important;
|
||||
color: var(--bg-ink-200);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
import './HostMetricLogs.styles.scss';
|
||||
|
||||
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useMemo } from 'react';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import HostMetricsLogs from './HostMetricsLogs';
|
||||
|
||||
interface Props {
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
isModalTimeSelection: boolean;
|
||||
handleTimeChange: (
|
||||
interval: Time | CustomTimeType,
|
||||
dateTimeRange?: [number, number],
|
||||
) => void;
|
||||
handleChangeLogFilters: (value: IBuilderQuery['filters']) => void;
|
||||
logFilters: IBuilderQuery['filters'];
|
||||
selectedInterval: Time;
|
||||
}
|
||||
|
||||
function HostMetricLogsDetailedView({
|
||||
timeRange,
|
||||
isModalTimeSelection,
|
||||
handleTimeChange,
|
||||
handleChangeLogFilters,
|
||||
logFilters,
|
||||
selectedInterval,
|
||||
}: Props): JSX.Element {
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
...currentQuery.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[currentQuery],
|
||||
);
|
||||
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
return (
|
||||
<div className="host-metrics-logs-container">
|
||||
<div className="host-metrics-logs-header">
|
||||
<div className="filter-section">
|
||||
{query && (
|
||||
<QueryBuilderSearch
|
||||
query={query}
|
||||
onChange={handleChangeLogFilters}
|
||||
disableNavigationShortcuts
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="datetime-section">
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
showRefreshText={false}
|
||||
hideShareModal
|
||||
isModalTimeSelection={isModalTimeSelection}
|
||||
onTimeChange={handleTimeChange}
|
||||
defaultRelativeTime="5m"
|
||||
modalSelectedInterval={selectedInterval}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<HostMetricsLogs
|
||||
timeRange={timeRange}
|
||||
handleChangeLogFilters={handleChangeLogFilters}
|
||||
filters={logFilters}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default HostMetricLogsDetailedView;
|
||||
@@ -1,216 +0,0 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import './HostMetricLogs.styles.scss';
|
||||
|
||||
import { Card } from 'antd';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
import LogsError from 'container/LogsError/LogsError';
|
||||
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { ORDERBY_FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
import { getHostLogsQueryPayload } from './constants';
|
||||
import NoLogsContainer from './NoLogsContainer';
|
||||
|
||||
interface Props {
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
handleChangeLogFilters: (filters: IBuilderQuery['filters']) => void;
|
||||
filters: IBuilderQuery['filters'];
|
||||
}
|
||||
|
||||
function HostMetricsLogs({
|
||||
timeRange,
|
||||
handleChangeLogFilters,
|
||||
filters,
|
||||
}: Props): JSX.Element {
|
||||
const [logs, setLogs] = useState<ILog[]>([]);
|
||||
const [hasReachedEndOfLogs, setHasReachedEndOfLogs] = useState(false);
|
||||
const [restFilters, setRestFilters] = useState<TagFilterItem[]>([]);
|
||||
const [resetLogsList, setResetLogsList] = useState<boolean>(false);
|
||||
|
||||
useEffect(() => {
|
||||
const newRestFilters = filters.items.filter(
|
||||
(item) => item.key?.key !== 'id' && item.key?.key !== 'host.name',
|
||||
);
|
||||
|
||||
const areFiltersSame = isEqual(restFilters, newRestFilters);
|
||||
|
||||
if (!areFiltersSame) {
|
||||
setResetLogsList(true);
|
||||
}
|
||||
|
||||
setRestFilters(newRestFilters);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [filters]);
|
||||
|
||||
const queryPayload = useMemo(() => {
|
||||
const basePayload = getHostLogsQueryPayload(
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
filters,
|
||||
);
|
||||
|
||||
basePayload.query.builder.queryData[0].pageSize = 100;
|
||||
basePayload.query.builder.queryData[0].orderBy = [
|
||||
{ columnName: 'timestamp', order: ORDERBY_FILTERS.DESC },
|
||||
];
|
||||
|
||||
return basePayload;
|
||||
}, [timeRange.startTime, timeRange.endTime, filters]);
|
||||
|
||||
const [isPaginating, setIsPaginating] = useState(false);
|
||||
|
||||
const { data, isLoading, isFetching, isError } = useQuery({
|
||||
queryKey: [
|
||||
'hostMetricsLogs',
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
filters,
|
||||
],
|
||||
queryFn: () => GetMetricQueryRange(queryPayload, DEFAULT_ENTITY_VERSION),
|
||||
enabled: !!queryPayload,
|
||||
keepPreviousData: isPaginating,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.payload?.data?.newResult?.data?.result) {
|
||||
const currentData = data.payload.data.newResult.data.result;
|
||||
|
||||
if (resetLogsList) {
|
||||
const currentLogs: ILog[] =
|
||||
currentData[0].list?.map((item) => ({
|
||||
...item.data,
|
||||
timestamp: item.timestamp,
|
||||
})) || [];
|
||||
|
||||
setLogs(currentLogs);
|
||||
|
||||
setResetLogsList(false);
|
||||
}
|
||||
|
||||
if (currentData.length > 0 && currentData[0].list) {
|
||||
const currentLogs: ILog[] =
|
||||
currentData[0].list.map((item) => ({
|
||||
...item.data,
|
||||
timestamp: item.timestamp,
|
||||
})) || [];
|
||||
|
||||
setLogs((prev) => [...prev, ...currentLogs]);
|
||||
} else {
|
||||
setHasReachedEndOfLogs(true);
|
||||
}
|
||||
}
|
||||
}, [data, restFilters, isPaginating, resetLogsList]);
|
||||
|
||||
const getItemContent = useCallback(
|
||||
(_: number, logToRender: ILog): JSX.Element => (
|
||||
<RawLogView
|
||||
isReadOnly
|
||||
isTextOverflowEllipsisDisabled
|
||||
key={logToRender.id}
|
||||
data={logToRender}
|
||||
linesPerRow={5}
|
||||
fontSize={FontSize.MEDIUM}
|
||||
/>
|
||||
),
|
||||
[],
|
||||
);
|
||||
|
||||
const loadMoreLogs = useCallback(() => {
|
||||
if (!logs.length) return;
|
||||
|
||||
setIsPaginating(true);
|
||||
const lastLog = logs[logs.length - 1];
|
||||
|
||||
const newItems = [
|
||||
...filters.items.filter((item) => item.key?.key !== 'id'),
|
||||
{
|
||||
id: v4(),
|
||||
key: {
|
||||
key: 'id',
|
||||
type: '',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
op: '<',
|
||||
value: lastLog.id,
|
||||
},
|
||||
];
|
||||
|
||||
const newFilters = {
|
||||
op: 'AND',
|
||||
items: newItems,
|
||||
} as IBuilderQuery['filters'];
|
||||
|
||||
handleChangeLogFilters(newFilters);
|
||||
}, [logs, filters, handleChangeLogFilters]);
|
||||
|
||||
useEffect(() => {
|
||||
setIsPaginating(false);
|
||||
}, [data]);
|
||||
|
||||
const renderFooter = useCallback(
|
||||
(): JSX.Element | null => (
|
||||
// eslint-disable-next-line react/jsx-no-useless-fragment
|
||||
<>
|
||||
{isFetching ? (
|
||||
<div className="logs-loading-skeleton"> Loading more logs ... </div>
|
||||
) : hasReachedEndOfLogs ? (
|
||||
<div className="logs-loading-skeleton"> *** End *** </div>
|
||||
) : null}
|
||||
</>
|
||||
),
|
||||
[isFetching, hasReachedEndOfLogs],
|
||||
);
|
||||
|
||||
const renderContent = useMemo(
|
||||
() => (
|
||||
<Card bordered={false} className="host-metrics-logs-list-card">
|
||||
<OverlayScrollbar isVirtuoso>
|
||||
<Virtuoso
|
||||
className="host-metrics-logs-virtuoso"
|
||||
key="host-metrics-logs-virtuoso"
|
||||
data={logs}
|
||||
endReached={loadMoreLogs}
|
||||
totalCount={logs.length}
|
||||
itemContent={getItemContent}
|
||||
overscan={200}
|
||||
components={{
|
||||
Footer: renderFooter,
|
||||
}}
|
||||
/>
|
||||
</OverlayScrollbar>
|
||||
</Card>
|
||||
),
|
||||
[logs, loadMoreLogs, getItemContent, renderFooter],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="host-metrics-logs">
|
||||
{isLoading && <LogsLoading />}
|
||||
{!isLoading && !isError && logs.length === 0 && <NoLogsContainer />}
|
||||
{isError && !isLoading && <LogsError />}
|
||||
{!isLoading && !isError && logs.length > 0 && (
|
||||
<div className="host-metrics-logs-list-container">{renderContent}</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default HostMetricsLogs;
|
||||
@@ -1,16 +0,0 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Typography } from 'antd';
|
||||
import { Ghost } from 'lucide-react';
|
||||
|
||||
const { Text } = Typography;
|
||||
|
||||
export default function NoLogsContainer(): React.ReactElement {
|
||||
return (
|
||||
<div className="no-logs-found">
|
||||
<Text type="secondary">
|
||||
<Ghost size={24} color={Color.BG_AMBER_500} /> No logs found for this host
|
||||
in the selected time range.
|
||||
</Text>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
export const getHostLogsQueryPayload = (
|
||||
start: number,
|
||||
end: number,
|
||||
filters: IBuilderQuery['filters'],
|
||||
): GetQueryResultsProps => ({
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
query: {
|
||||
clickhouse_sql: [],
|
||||
promql: [],
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: DataSource.LOGS,
|
||||
queryName: 'A',
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
id: '------false',
|
||||
dataType: DataTypes.String,
|
||||
key: '',
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
},
|
||||
timeAggregation: 'rate',
|
||||
spaceAggregation: 'sum',
|
||||
functions: [],
|
||||
filters,
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
stepInterval: 60,
|
||||
having: [],
|
||||
limit: null,
|
||||
orderBy: [
|
||||
{
|
||||
columnName: 'timestamp',
|
||||
order: 'desc',
|
||||
},
|
||||
],
|
||||
groupBy: [],
|
||||
legend: '',
|
||||
reduceTo: 'avg',
|
||||
offset: 0,
|
||||
pageSize: 100,
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
id: uuidv4(),
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
params: {
|
||||
lastLogLineTimestamp: null,
|
||||
},
|
||||
start,
|
||||
end,
|
||||
});
|
||||
@@ -1,45 +0,0 @@
|
||||
.empty-container {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.host-metrics-container {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
.metrics-header {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
margin-top: 1rem;
|
||||
|
||||
gap: 8px;
|
||||
padding: 12px;
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
}
|
||||
|
||||
.host-metrics-card {
|
||||
margin: 8px 0 1rem 0;
|
||||
height: 300px;
|
||||
padding: 10px;
|
||||
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
|
||||
.ant-card-body {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.chart-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.no-data-container {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
}
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
import './Metrics.styles.scss';
|
||||
|
||||
import { Card, Col, Row, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import {
|
||||
getHostQueryPayload,
|
||||
hostWidgetInfo,
|
||||
} from 'container/LogDetailedView/InfraMetrics/constants';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useMemo, useRef } from 'react';
|
||||
import { useQueries, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
interface MetricsTabProps {
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
isModalTimeSelection: boolean;
|
||||
handleTimeChange: (
|
||||
interval: Time | CustomTimeType,
|
||||
dateTimeRange?: [number, number],
|
||||
) => void;
|
||||
selectedInterval: Time;
|
||||
|
||||
hostName: string;
|
||||
}
|
||||
|
||||
function Metrics({
|
||||
selectedInterval,
|
||||
hostName,
|
||||
timeRange,
|
||||
handleTimeChange,
|
||||
isModalTimeSelection,
|
||||
}: MetricsTabProps): JSX.Element {
|
||||
const queryPayloads = useMemo(
|
||||
() => getHostQueryPayload(hostName, timeRange.startTime, timeRange.endTime),
|
||||
[hostName, timeRange.startTime, timeRange.endTime],
|
||||
);
|
||||
|
||||
const queries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: ['host-metrics', payload, ENTITY_VERSION_V4, 'HOST'],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
|
||||
const chartData = useMemo(
|
||||
() => queries.map(({ data }) => getUPlotChartData(data?.payload)),
|
||||
[queries],
|
||||
);
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
queries.map(({ data }, idx) =>
|
||||
getUPlotChartOptions({
|
||||
apiResponse: data?.payload,
|
||||
isDarkMode,
|
||||
dimensions,
|
||||
yAxisUnit: hostWidgetInfo[idx].yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
minTimeScale: timeRange.startTime,
|
||||
maxTimeScale: timeRange.endTime,
|
||||
}),
|
||||
),
|
||||
[queries, isDarkMode, dimensions, timeRange.startTime, timeRange.endTime],
|
||||
);
|
||||
|
||||
const renderCardContent = (
|
||||
query: UseQueryResult<SuccessResponse<MetricRangePayloadProps>, unknown>,
|
||||
idx: number,
|
||||
): JSX.Element => {
|
||||
if (query.isLoading) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
if (query.error) {
|
||||
const errorMessage =
|
||||
(query.error as Error)?.message || 'Something went wrong';
|
||||
return <div>{errorMessage}</div>;
|
||||
}
|
||||
return (
|
||||
<div
|
||||
className={cx('chart-container', {
|
||||
'no-data-container':
|
||||
!query.isLoading && !query?.data?.payload?.data?.result?.length,
|
||||
})}
|
||||
>
|
||||
<Uplot options={options[idx]} data={chartData[idx]} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="metrics-header">
|
||||
<div className="metrics-datetime-section">
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
showRefreshText={false}
|
||||
hideShareModal
|
||||
onTimeChange={handleTimeChange}
|
||||
defaultRelativeTime="5m"
|
||||
isModalTimeSelection={isModalTimeSelection}
|
||||
modalSelectedInterval={selectedInterval}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<Row gutter={24} className="host-metrics-container">
|
||||
{queries.map((query, idx) => (
|
||||
<Col span={12} key={hostWidgetInfo[idx].title}>
|
||||
<Typography.Text>{hostWidgetInfo[idx].title}</Typography.Text>
|
||||
<Card bordered className="host-metrics-card" ref={graphRef}>
|
||||
{renderCardContent(query, idx)}
|
||||
</Card>
|
||||
</Col>
|
||||
))}
|
||||
</Row>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default Metrics;
|
||||
@@ -1,66 +0,0 @@
|
||||
.host-processes {
|
||||
gap: 24px;
|
||||
height: 60vh;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 100%;
|
||||
margin: 0 auto;
|
||||
box-sizing: border-box;
|
||||
|
||||
.infra-container-card-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
.dev-status-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.infra-container-card {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.infra-container-card-text {
|
||||
font-size: var(--font-size-sm);
|
||||
color: var(--text-vanilla-400);
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
width: 400px;
|
||||
font-family: 'Inter';
|
||||
margin-top: 12px;
|
||||
font-weight: 300;
|
||||
}
|
||||
|
||||
.infra-container-working-msg {
|
||||
display: flex;
|
||||
width: 400px;
|
||||
padding: 12px;
|
||||
align-items: flex-start;
|
||||
gap: 12px;
|
||||
border-radius: 4px;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
|
||||
.ant-space {
|
||||
align-items: flex-start;
|
||||
}
|
||||
}
|
||||
|
||||
.infra-container-contact-support-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
margin: auto;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.infra-container-card-text {
|
||||
color: var(--text-ink-200);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user