Compare commits
54 Commits
onboarding
...
add-valida
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c0e96aa55c | ||
|
|
597752a4bc | ||
|
|
07a244f569 | ||
|
|
eb9385840f | ||
|
|
30b689037a | ||
|
|
ba33c885d5 | ||
|
|
a4ed9e4d47 | ||
|
|
df5767198c | ||
|
|
81c7f3221a | ||
|
|
2cbd8733a1 | ||
|
|
71d1dfe9bd | ||
|
|
459712d25c | ||
|
|
61de2d414d | ||
|
|
0b7cd4c1a7 | ||
|
|
62c033ccf8 | ||
|
|
e637487984 | ||
|
|
8fc43a00f8 | ||
|
|
031d62ca44 | ||
|
|
8c4c357351 | ||
|
|
d8d8191a32 | ||
|
|
a876c0a744 | ||
|
|
c36f913a90 | ||
|
|
ed597f00c0 | ||
|
|
4957d3ae93 | ||
|
|
8835e3493d | ||
|
|
027a1631ef | ||
|
|
d7a6607a25 | ||
|
|
7a58bc58c9 | ||
|
|
88be23c3e3 | ||
|
|
8f095dfbc9 | ||
|
|
72207691a3 | ||
|
|
8998ca652e | ||
|
|
f4ae5f19ff | ||
|
|
d1ea608671 | ||
|
|
ac7ecac2c1 | ||
|
|
64071165c4 | ||
|
|
9c25a33cd9 | ||
|
|
3100d602c4 | ||
|
|
d80908a1fc | ||
|
|
bc17a10550 | ||
|
|
694c185373 | ||
|
|
02f3dfefb9 | ||
|
|
3515686daf | ||
|
|
c116bf05be | ||
|
|
4842e3b912 | ||
|
|
9bdf194d70 | ||
|
|
88aa29e94c | ||
|
|
1dfebed93a | ||
|
|
b36d2ec4c6 | ||
|
|
089f128020 | ||
|
|
e30f95c340 | ||
|
|
2c87d96d75 | ||
|
|
1f9b13dc35 | ||
|
|
4de5c01fd5 |
128
.github/workflows/build.yaml
vendored
128
.github/workflows/build.yaml
vendored
@@ -1,48 +1,122 @@
|
||||
name: build-pipeline
|
||||
name: build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release/v*
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
build-frontend:
|
||||
enterprise:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Install dependencies
|
||||
run: cd frontend && yarn install
|
||||
- name: Build frontend static files
|
||||
shell: bash
|
||||
run: |
|
||||
make build-frontend-static
|
||||
|
||||
build-signoz:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup golang
|
||||
uses: actions/setup-go@v4
|
||||
- name: setup
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: Build signoz image
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: create-env-file
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
make build-signoz-amd64
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-enterprise
|
||||
|
||||
build-signoz-community:
|
||||
community:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup golang
|
||||
uses: actions/setup-go@v4
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: Build signoz community image
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
make build-signoz-community-amd64
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-community
|
||||
|
||||
12
.github/workflows/commitci.yaml
vendored
12
.github/workflows/commitci.yaml
vendored
@@ -25,15 +25,3 @@ jobs:
|
||||
else
|
||||
echo "No references to 'ee' packages found in 'pkg' directory"
|
||||
fi
|
||||
lint:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: lint
|
||||
uses: wagoid/commitlint-github-action@v5
|
||||
|
||||
27
.github/workflows/goci.yaml
vendored
27
.github/workflows/goci.yaml
vendored
@@ -34,3 +34,30 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
build:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: qemu-install
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: aarch64-install
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: docker-community
|
||||
shell: bash
|
||||
run: |
|
||||
make docker-build-community
|
||||
- name: docker-enterprise
|
||||
shell: bash
|
||||
run: |
|
||||
make docker-build-enterprise
|
||||
|
||||
2
.github/workflows/gor-signoz-community.yaml
vendored
2
.github/workflows/gor-signoz-community.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
run: |
|
||||
echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||
- name: build-frontend
|
||||
run: make build-frontend-static
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
||||
2
.github/workflows/gor-signoz.yaml
vendored
2
.github/workflows/gor-signoz.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> .env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> .env
|
||||
- name: build-frontend
|
||||
run: make build-frontend-static
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
||||
134
.github/workflows/push.yaml
vendored
134
.github/workflows/push.yaml
vendored
@@ -1,134 +0,0 @@
|
||||
name: push
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
image-build-and-push-signoz:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: create-env-file
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- uses: benjlevesque/short-sha@v2.2
|
||||
id: short-sha
|
||||
- name: branch-name
|
||||
id: branch-name
|
||||
uses: tj-actions/branch-names@v7.0.7
|
||||
- name: docker-tag
|
||||
run: |
|
||||
if [ '${{ steps.branch-name.outputs.is_tag }}' == 'true' ]; then
|
||||
echo "DOCKER_TAG=${{ steps.branch-name.outputs.tag }}" >> $GITHUB_ENV
|
||||
elif [ '${{ steps.branch-name.outputs.current_branch }}' == 'main' ]; then
|
||||
echo "DOCKER_TAG=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "DOCKER_TAG=${{ steps.branch-name.outputs.current_branch }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish-signoz
|
||||
run: make build-push-signoz
|
||||
- name: qs-docker-tag
|
||||
run: |
|
||||
if [ '${{ steps.branch-name.outputs.is_tag }}' == 'true' ]; then
|
||||
tag="${{ steps.branch-name.outputs.tag }}"
|
||||
tag="${tag:1}"
|
||||
echo "DOCKER_TAG=${tag}" >> $GITHUB_ENV
|
||||
elif [ '${{ steps.branch-name.outputs.current_branch }}' == 'main' ]; then
|
||||
echo "DOCKER_TAG=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "DOCKER_TAG=${{ steps.branch-name.outputs.current_branch }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: publish-query-service
|
||||
run: |
|
||||
SIGNOZ_DOCKER_IMAGE=query-service make build-push-signoz
|
||||
|
||||
image-build-and-push-signoz-community:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- uses: benjlevesque/short-sha@v2.2
|
||||
id: short-sha
|
||||
- name: branch-name
|
||||
id: branch-name
|
||||
uses: tj-actions/branch-names@v7.0.7
|
||||
- name: docker-tag
|
||||
run: |
|
||||
if [ '${{ steps.branch-name.outputs.is_tag }}' == 'true' ]; then
|
||||
echo "DOCKER_TAG=${{ steps.branch-name.outputs.tag }}" >> $GITHUB_ENV
|
||||
elif [ '${{ steps.branch-name.outputs.current_branch }}' == 'main' ]; then
|
||||
echo "DOCKER_TAG=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "DOCKER_TAG=${{ steps.branch-name.outputs.current_branch }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish-signoz-community
|
||||
run: make build-push-signoz-community
|
||||
- name: qs-docker-tag
|
||||
run: |
|
||||
if [ '${{ steps.branch-name.outputs.is_tag }}' == 'true' ]; then
|
||||
tag="${{ steps.branch-name.outputs.tag }}"
|
||||
tag="${tag:1}"
|
||||
echo "DOCKER_TAG=${tag}-oss" >> $GITHUB_ENV
|
||||
elif [ '${{ steps.branch-name.outputs.current_branch }}' == 'main' ]; then
|
||||
echo "DOCKER_TAG=latest-oss" >> $GITHUB_ENV
|
||||
else
|
||||
echo "DOCKER_TAG=${{ steps.branch-name.outputs.current_branch }}-oss" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: publish-query-service-oss
|
||||
run: |
|
||||
SIGNOZ_COMMUNITY_DOCKER_IMAGE=query-service make build-push-signoz-community
|
||||
7
.github/workflows/staging-deployment.yaml
vendored
7
.github/workflows/staging-deployment.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
read -r -d '' COMMAND <<EOF || true
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export OTELCOL_TAG="main"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
export KAFKA_SPAN_EVAL="true"
|
||||
@@ -49,7 +49,8 @@ jobs:
|
||||
git fetch origin
|
||||
git checkout ${GITHUB_BRANCH}
|
||||
git pull
|
||||
make build-signoz-amd64
|
||||
make run-testing
|
||||
make docker-build-enterprise-amd64
|
||||
export VERSION="${GITHUB_SHA:0:7}-amd64"
|
||||
docker-compose -f deploy/docker/docker-compose.testing.yaml up --build -d
|
||||
EOF
|
||||
gcloud beta compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
|
||||
|
||||
7
.github/workflows/testing-deployment.yaml
vendored
7
.github/workflows/testing-deployment.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
read -r -d '' COMMAND <<EOF || true
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export DEV_BUILD="1"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
docker system prune --force
|
||||
@@ -49,7 +49,8 @@ jobs:
|
||||
# This is added to include the scenerio when new commit in PR is force-pushed
|
||||
git branch -D ${GITHUB_BRANCH}
|
||||
git checkout --track origin/${GITHUB_BRANCH}
|
||||
make build-signoz-amd64
|
||||
make run-testing
|
||||
make docker-build-enterprise-amd64
|
||||
export VERSION="${GITHUB_SHA:0:7}-amd64"
|
||||
docker-compose -f deploy/docker/docker-compose.testing.yaml up --build -d
|
||||
EOF
|
||||
gcloud beta compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -54,6 +54,7 @@ ee/query-service/tests/test-deploy/data/
|
||||
bin/
|
||||
.local/
|
||||
*/query-service/queries.active
|
||||
ee/query-service/db
|
||||
|
||||
# e2e
|
||||
|
||||
|
||||
@@ -77,3 +77,4 @@ Need assistance? Join our Slack community:
|
||||
## Where do I go from here?
|
||||
|
||||
- Set up your [development environment](docs/contributing/development.md)
|
||||
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo-docs.md)
|
||||
|
||||
294
Makefile
294
Makefile
@@ -1,49 +1,44 @@
|
||||
#
|
||||
# Reference Guide - https://www.gnu.org/software/make/manual/make.html
|
||||
#
|
||||
##############################################################
|
||||
# variables
|
||||
##############################################################
|
||||
SHELL := /bin/bash
|
||||
SRC ?= $(shell pwd)
|
||||
NAME ?= signoz
|
||||
OS ?= $(shell uname -s | tr '[A-Z]' '[a-z]')
|
||||
ARCH ?= $(shell uname -m | sed 's/x86_64/amd64/g' | sed 's/aarch64/arm64/g')
|
||||
COMMIT_SHORT_SHA ?= $(shell git rev-parse --short HEAD)
|
||||
BRANCH_NAME ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
|
||||
VERSION ?= $(BRANCH_NAME)-$(COMMIT_SHORT_SHA)
|
||||
TIMESTAMP ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
ARCHS = amd64 arm64
|
||||
TARGET_DIR ?= $(shell pwd)/target
|
||||
|
||||
# Build variables
|
||||
BUILD_VERSION ?= $(shell git describe --always --tags)
|
||||
BUILD_HASH ?= $(shell git rev-parse --short HEAD)
|
||||
BUILD_TIME ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
BUILD_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
LICENSE_SIGNOZ_IO ?= https://license.signoz.io/api/v1
|
||||
DEV_LICENSE_SIGNOZ_IO ?= https://staging-license.signoz.io/api/v1
|
||||
ZEUS_URL ?= https://api.signoz.cloud
|
||||
DEV_ZEUS_URL ?= https://api.staging.signoz.cloud
|
||||
DEV_BUILD ?= "" # set to any non-empty value to enable dev build
|
||||
ZEUS_URL ?= https://api.signoz.cloud
|
||||
GO_BUILD_LDFLAG_ZEUS_URL = -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=$(ZEUS_URL)
|
||||
LICENSE_URL ?= https://license.signoz.io/api/v1
|
||||
GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO = -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=$(LICENSE_URL)
|
||||
|
||||
# Internal variables or constants.
|
||||
FRONTEND_DIRECTORY ?= frontend
|
||||
QUERY_SERVICE_DIRECTORY ?= pkg/query-service
|
||||
EE_QUERY_SERVICE_DIRECTORY ?= ee/query-service
|
||||
STANDALONE_DIRECTORY ?= deploy/docker
|
||||
SWARM_DIRECTORY ?= deploy/docker-swarm
|
||||
CH_HISTOGRAM_QUANTILE_DIRECTORY ?= scripts/clickhouse/histogramquantile
|
||||
GORELEASER_BIN ?= goreleaser
|
||||
GO_BUILD_VERSION_LDFLAGS = -X github.com/SigNoz/signoz/pkg/version.version=$(VERSION) -X github.com/SigNoz/signoz/pkg/version.hash=$(COMMIT_SHORT_SHA) -X github.com/SigNoz/signoz/pkg/version.time=$(TIMESTAMP) -X github.com/SigNoz/signoz/pkg/version.branch=$(BRANCH_NAME)
|
||||
GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/pkg/query-service
|
||||
GO_BUILD_LDFLAGS_COMMUNITY = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
GO_BUILD_ARCHS_ENTERPRISE = $(addprefix go-build-enterprise-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/ee/query-service
|
||||
GO_BUILD_LDFLAGS_ENTERPRISE = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=enterprise $(GO_BUILD_LDFLAG_ZEUS_URL) $(GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO)
|
||||
|
||||
GOOS ?= $(shell go env GOOS)
|
||||
GOARCH ?= $(shell go env GOARCH)
|
||||
GOPATH ?= $(shell go env GOPATH)
|
||||
|
||||
REPONAME ?= signoz
|
||||
DOCKER_TAG ?= $(BUILD_VERSION)
|
||||
SIGNOZ_DOCKER_IMAGE ?= signoz
|
||||
SIGNOZ_COMMUNITY_DOCKER_IMAGE ?= signoz-community
|
||||
|
||||
# Build-time Go variables
|
||||
PACKAGE?=github.com/SigNoz/signoz
|
||||
buildVersion=${PACKAGE}/pkg/query-service/version.buildVersion
|
||||
buildHash=${PACKAGE}/pkg/query-service/version.buildHash
|
||||
buildTime=${PACKAGE}/pkg/query-service/version.buildTime
|
||||
gitBranch=${PACKAGE}/pkg/query-service/version.gitBranch
|
||||
licenseSignozIo=${PACKAGE}/ee/query-service/constants.LicenseSignozIo
|
||||
zeusURL=${PACKAGE}/ee/query-service/constants.ZeusURL
|
||||
|
||||
LD_FLAGS=-X ${buildHash}=${BUILD_HASH} -X ${buildTime}=${BUILD_TIME} -X ${buildVersion}=${BUILD_VERSION} -X ${gitBranch}=${BUILD_BRANCH}
|
||||
PROD_LD_FLAGS=-X ${zeusURL}=${ZEUS_URL} -X ${licenseSignozIo}=${LICENSE_SIGNOZ_IO}
|
||||
DEV_LD_FLAGS=-X ${zeusURL}=${DEV_ZEUS_URL} -X ${licenseSignozIo}=${DEV_LICENSE_SIGNOZ_IO}
|
||||
DOCKER_BUILD_ARCHS_COMMUNITY = $(addprefix docker-build-community-,$(ARCHS))
|
||||
DOCKERFILE_COMMUNITY = $(SRC)/pkg/query-service/Dockerfile
|
||||
DOCKER_REGISTRY_COMMUNITY ?= docker.io/signoz/signoz-community
|
||||
DOCKER_BUILD_ARCHS_ENTERPRISE = $(addprefix docker-build-enterprise-,$(ARCHS))
|
||||
DOCKERFILE_ENTERPRISE = $(SRC)/ee/query-service/Dockerfile
|
||||
DOCKER_REGISTRY_ENTERPRISE ?= docker.io/signoz/signoz
|
||||
JS_BUILD_CONTEXT = $(SRC)/frontend
|
||||
|
||||
##############################################################
|
||||
# directories
|
||||
##############################################################
|
||||
$(TARGET_DIR):
|
||||
mkdir -p $(TARGET_DIR)
|
||||
|
||||
##############################################################
|
||||
# common commands
|
||||
@@ -61,10 +56,10 @@ devenv-clickhouse: ## Run clickhouse in devenv
|
||||
docker compose -f compose.yaml up -d
|
||||
|
||||
##############################################################
|
||||
# run commands
|
||||
# go commands
|
||||
##############################################################
|
||||
.PHONY: run-go
|
||||
run-go: ## Runs the go backend server
|
||||
.PHONY: go-run-enterprise
|
||||
go-run-enterprise: ## Runs the enterprise go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
@@ -73,147 +68,102 @@ run-go: ## Runs the go backend server
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
go run -race \
|
||||
./ee/query-service/main.go \
|
||||
--config ./pkg/query-service/config/prometheus.yml \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/main.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster \
|
||||
--use-logs-new-schema true \
|
||||
--use-trace-new-schema true
|
||||
|
||||
all: build-push-frontend build-push-signoz
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@go test -race ./...
|
||||
|
||||
# Steps to build static files of frontend
|
||||
build-frontend-static:
|
||||
@echo "------------------"
|
||||
@echo "--> Building frontend static files"
|
||||
@echo "------------------"
|
||||
@cd $(FRONTEND_DIRECTORY) && \
|
||||
rm -rf build && \
|
||||
CI=1 yarn install && \
|
||||
yarn build && \
|
||||
ls -l build
|
||||
.PHONY: go-run-community
|
||||
go-run-community: ## Runs the community go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/main.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster \
|
||||
--use-logs-new-schema true \
|
||||
--use-trace-new-schema true
|
||||
|
||||
# Steps to build static binary of signoz
|
||||
.PHONY: build-signoz-static
|
||||
build-signoz-static:
|
||||
@echo "------------------"
|
||||
@echo "--> Building signoz static binary"
|
||||
@echo "------------------"
|
||||
@if [ $(DEV_BUILD) != "" ]; then \
|
||||
cd $(EE_QUERY_SERVICE_DIRECTORY) && \
|
||||
CGO_ENABLED=1 go build -tags timetzdata -a -o ./bin/signoz-${GOOS}-${GOARCH} \
|
||||
-ldflags "-linkmode external -extldflags '-static' -s -w ${LD_FLAGS} ${DEV_LD_FLAGS}"; \
|
||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
go-build-community: ## Builds the go backend server for community
|
||||
go-build-community: $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
$(GO_BUILD_ARCHS_COMMUNITY): go-build-community-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)-community"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
else \
|
||||
cd $(EE_QUERY_SERVICE_DIRECTORY) && \
|
||||
CGO_ENABLED=1 go build -tags timetzdata -a -o ./bin/signoz-${GOOS}-${GOARCH} \
|
||||
-ldflags "-linkmode external -extldflags '-static' -s -w ${LD_FLAGS} ${PROD_LD_FLAGS}"; \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
fi
|
||||
|
||||
.PHONY: build-signoz-static-amd64
|
||||
build-signoz-static-amd64:
|
||||
make GOARCH=amd64 build-signoz-static
|
||||
|
||||
.PHONY: build-signoz-static-arm64
|
||||
build-signoz-static-arm64:
|
||||
make CC=aarch64-linux-gnu-gcc GOARCH=arm64 build-signoz-static
|
||||
|
||||
# Steps to build static binary of signoz for all platforms
|
||||
.PHONY: build-signoz-static-all
|
||||
build-signoz-static-all: build-signoz-static-amd64 build-signoz-static-arm64 build-frontend-static
|
||||
|
||||
# Steps to build and push docker image of signoz
|
||||
.PHONY: build-signoz-amd64 build-push-signoz
|
||||
# Step to build docker image of signoz in amd64 (used in build pipeline)
|
||||
build-signoz-amd64: build-signoz-static-amd64 build-frontend-static
|
||||
@echo "------------------"
|
||||
@echo "--> Building signoz docker image for amd64"
|
||||
@echo "------------------"
|
||||
@docker build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
|
||||
--tag $(REPONAME)/$(SIGNOZ_DOCKER_IMAGE):$(DOCKER_TAG) \
|
||||
--build-arg TARGETPLATFORM="linux/amd64" .
|
||||
|
||||
# Step to build and push docker image of query in amd64 and arm64 (used in push pipeline)
|
||||
build-push-signoz: build-signoz-static-all
|
||||
@echo "------------------"
|
||||
@echo "--> Building and pushing signoz docker image"
|
||||
@echo "------------------"
|
||||
@docker buildx build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plain \
|
||||
--push --platform linux/arm64,linux/amd64 \
|
||||
--tag $(REPONAME)/$(SIGNOZ_DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||
|
||||
# Step to build docker image of signoz community in amd64 (used in build pipeline)
|
||||
build-signoz-community-amd64:
|
||||
@echo "------------------"
|
||||
@echo "--> Building signoz docker image for amd64"
|
||||
@echo "------------------"
|
||||
make EE_QUERY_SERVICE_DIRECTORY=${QUERY_SERVICE_DIRECTORY} SIGNOZ_DOCKER_IMAGE=${SIGNOZ_COMMUNITY_DOCKER_IMAGE} build-signoz-amd64
|
||||
|
||||
# Step to build and push docker image of signoz community in amd64 and arm64 (used in push pipeline)
|
||||
build-push-signoz-community:
|
||||
@echo "------------------"
|
||||
@echo "--> Building and pushing signoz community docker image"
|
||||
@echo "------------------"
|
||||
make EE_QUERY_SERVICE_DIRECTORY=${QUERY_SERVICE_DIRECTORY} SIGNOZ_DOCKER_IMAGE=${SIGNOZ_COMMUNITY_DOCKER_IMAGE} build-push-signoz
|
||||
|
||||
pull-signoz:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml pull
|
||||
|
||||
run-signoz:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml up --build -d
|
||||
|
||||
run-testing:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.testing.yaml up --build -d
|
||||
|
||||
down-signoz:
|
||||
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml down -v
|
||||
|
||||
check-no-ee-references:
|
||||
@echo "Checking for 'ee' package references in 'pkg' directory..."
|
||||
@if grep -R --include="*.go" '.*/ee/.*' pkg/; then \
|
||||
echo "Error: Found references to 'ee' packages in 'pkg' directory"; \
|
||||
exit 1; \
|
||||
.PHONY: go-build-enterprise $(GO_BUILD_ARCHS_ENTERPRISE)
|
||||
go-build-enterprise: ## Builds the go backend server for enterprise
|
||||
go-build-enterprise: $(GO_BUILD_ARCHS_ENTERPRISE)
|
||||
$(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
echo "No references to 'ee' packages found in 'pkg' directory"; \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
test:
|
||||
go test ./pkg/...
|
||||
##############################################################
|
||||
# js commands
|
||||
##############################################################
|
||||
.PHONY: js-build
|
||||
js-build: ## Builds the js frontend
|
||||
@echo ">> building js frontend"
|
||||
@cd $(JS_BUILD_CONTEXT) && CI=1 yarn install && yarn build
|
||||
|
||||
########################################################
|
||||
# Goreleaser
|
||||
########################################################
|
||||
.PHONY: gor-snapshot gor-snapshot-histogram-quantile gor-snapshot-signoz gor-snapshot-signoz-community gor-split gor-split-histogram-quantile gor-split-signoz gor-split-signoz-community gor-merge
|
||||
##############################################################
|
||||
# docker commands
|
||||
##############################################################
|
||||
.PHONY: docker-build-community $(DOCKER_BUILD_ARCHS_COMMUNITY)
|
||||
docker-build-community: ## Builds the docker image for community
|
||||
docker-build-community: $(DOCKER_BUILD_ARCHS_COMMUNITY)
|
||||
$(DOCKER_BUILD_ARCHS_COMMUNITY): docker-build-community-%: go-build-community-% js-build
|
||||
@echo ">> building docker image for $(NAME)-community"
|
||||
@docker build -t "$(DOCKER_REGISTRY_COMMUNITY):$(VERSION)-$*" \
|
||||
--build-arg TARGETARCH="$*" \
|
||||
-f $(DOCKERFILE_COMMUNITY) $(SRC)
|
||||
|
||||
gor-snapshot:
|
||||
@if [[ ${GORELEASER_WORKDIR} ]]; then \
|
||||
${GORELEASER_BIN} release --config ${GORELEASER_WORKDIR}/.goreleaser.yaml --clean --snapshot; \
|
||||
else \
|
||||
${GORELEASER_BIN} release --clean --snapshot; \
|
||||
fi
|
||||
.PHONY: docker-buildx-community
|
||||
docker-buildx-community: ## Builds the docker image for community using buildx
|
||||
docker-buildx-community: go-build-community js-build
|
||||
@echo ">> building docker image for $(NAME)-community"
|
||||
@docker buildx build --file $(DOCKERFILE_COMMUNITY) \
|
||||
--progress plain \
|
||||
--platform linux/arm64,linux/amd64 \
|
||||
--push \
|
||||
--tag $(DOCKER_REGISTRY_COMMUNITY):$(VERSION) $(SRC)
|
||||
|
||||
gor-snapshot-histogram-quantile:
|
||||
make GORELEASER_WORKDIR=$(CH_HISTOGRAM_QUANTILE_DIRECTORY) goreleaser-snapshot
|
||||
.PHONY: docker-build-enterprise $(DOCKER_BUILD_ARCHS_ENTERPRISE)
|
||||
docker-build-enterprise: ## Builds the docker image for enterprise
|
||||
docker-build-enterprise: $(DOCKER_BUILD_ARCHS_ENTERPRISE)
|
||||
$(DOCKER_BUILD_ARCHS_ENTERPRISE): docker-build-enterprise-%: go-build-enterprise-% js-build
|
||||
@echo ">> building docker image for $(NAME)"
|
||||
@docker build -t "$(DOCKER_REGISTRY_ENTERPRISE):$(VERSION)-$*" \
|
||||
--build-arg TARGETARCH="$*" \
|
||||
-f $(DOCKERFILE_ENTERPRISE) $(SRC)
|
||||
|
||||
gor-snapshot-signoz: build-frontend-static
|
||||
make GORELEASER_WORKDIR=$(EE_QUERY_SERVICE_DIRECTORY) goreleaser-snapshot
|
||||
|
||||
gor-snapshot-signoz-community: build-frontend-static
|
||||
make GORELEASER_WORKDIR=$(QUERY_SERVICE_DIRECTORY) goreleaser-snapshot
|
||||
|
||||
gor-split:
|
||||
@if [[ ${GORELEASER_WORKDIR} ]]; then \
|
||||
${GORELEASER_BIN} release --config ${GORELEASER_WORKDIR}/.goreleaser.yaml --clean --split; \
|
||||
else \
|
||||
${GORELEASER_BIN} release --clean --split; \
|
||||
fi
|
||||
|
||||
gor-split-histogram-quantile:
|
||||
make GORELEASER_WORKDIR=$(CH_HISTOGRAM_QUANTILE_DIRECTORY) goreleaser-split
|
||||
|
||||
gor-split-signoz: build-frontend-static
|
||||
make GORELEASER_WORKDIR=$(EE_QUERY_SERVICE_DIRECTORY) goreleaser-split
|
||||
|
||||
gor-split-signoz-community: build-frontend-static
|
||||
make GORELEASER_WORKDIR=$(QUERY_SERVICE_DIRECTORY) goreleaser-split
|
||||
|
||||
gor-merge:
|
||||
${GORELEASER_BIN} continue --merge
|
||||
.PHONY: docker-buildx-enterprise
|
||||
docker-buildx-enterprise: ## Builds the docker image for enterprise using buildx
|
||||
docker-buildx-enterprise: go-build-enterprise js-build
|
||||
@echo ">> building docker image for $(NAME)"
|
||||
@docker buildx build --file $(DOCKERFILE_ENTERPRISE) \
|
||||
--progress plain \
|
||||
--platform linux/arm64,linux/amd64 \
|
||||
--push \
|
||||
--tag $(DOCKER_REGISTRY_ENTERPRISE):$(VERSION) $(SRC)
|
||||
|
||||
@@ -3,6 +3,12 @@
|
||||
# Do not modify this file
|
||||
#
|
||||
|
||||
##################### Version #####################
|
||||
version:
|
||||
banner:
|
||||
# Whether to enable the version banner on startup.
|
||||
enabled: true
|
||||
|
||||
##################### Instrumentation #####################
|
||||
instrumentation:
|
||||
logs:
|
||||
@@ -66,7 +72,6 @@ sqlstore:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
|
||||
|
||||
##################### APIServer #####################
|
||||
apiserver:
|
||||
timeout:
|
||||
@@ -85,20 +90,29 @@ apiserver:
|
||||
- /api/v1/version
|
||||
- /
|
||||
|
||||
|
||||
##################### TelemetryStore #####################
|
||||
telemetrystore:
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
# Maximum number of idle connections in the connection pool.
|
||||
max_idle_conns: 50
|
||||
# Maximum number of open connections to the database.
|
||||
max_open_conns: 100
|
||||
# Maximum time to wait for a connection to be established.
|
||||
dial_timeout: 5s
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
clickhouse:
|
||||
# The DSN to use for ClickHouse.
|
||||
dsn: http://localhost:9000
|
||||
# The DSN to use for clickhouse.
|
||||
dsn: tcp://localhost:9000
|
||||
|
||||
##################### Prometheus #####################
|
||||
prometheus:
|
||||
active_query_tracker:
|
||||
# Whether to enable the active query tracker.
|
||||
enabled: true
|
||||
# The path to use for the active query tracker.
|
||||
path: ""
|
||||
# The maximum number of concurrent queries.
|
||||
max_concurrent: 20
|
||||
|
||||
##################### Alertmanager #####################
|
||||
alertmanager:
|
||||
@@ -111,7 +125,7 @@ alertmanager:
|
||||
# The poll interval for periodically syncing the alertmanager with the config in the store.
|
||||
poll_interval: 1m
|
||||
# The URL under which Alertmanager is externally reachable (for example, if Alertmanager is served via a reverse proxy). Used for generating relative and absolute links back to Alertmanager itself.
|
||||
external_url: http://localhost:9093
|
||||
external_url: http://localhost:8080
|
||||
# The global configuration for the alertmanager. All the exahustive fields can be found in the upstream: https://github.com/prometheus/alertmanager/blob/efa05feffd644ba4accb526e98a8c6545d26a783/config/config.go#L833
|
||||
global:
|
||||
# ResolveTimeout is the time after which an alert is declared resolved if it has not been updated.
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${DOCKER_TAG:-v0.76.2}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${DOCKER_TAG:-v0.76.2}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${DOCKER_TAG:-v0.76.2}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -61,7 +61,7 @@ This command:
|
||||
|
||||
1. Run the backend server:
|
||||
```bash
|
||||
make run-go
|
||||
make go-run-community
|
||||
```
|
||||
|
||||
2. Verify it's working:
|
||||
|
||||
BIN
docs/img/otel-demo-docker-containers.png
Normal file
BIN
docs/img/otel-demo-docker-containers.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 143 KiB |
BIN
docs/img/otel-demo-helm.png
Normal file
BIN
docs/img/otel-demo-helm.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 157 KiB |
BIN
docs/img/otel-demo-pods.png
Normal file
BIN
docs/img/otel-demo-pods.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 76 KiB |
BIN
docs/img/otel-demo-services.png
Normal file
BIN
docs/img/otel-demo-services.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 306 KiB |
246
docs/otel-demo-docs.md
Normal file
246
docs/otel-demo-docs.md
Normal file
@@ -0,0 +1,246 @@
|
||||
# Configuring OpenTelemetry Demo App with SigNoz
|
||||
|
||||
[The OpenTelemetry Astronomy Shop](https://github.com/open-telemetry/opentelemetry-demo) is an e-commerce web application, with **15 core microservices** in a **distributed system** which communicate over gRPC. Designed as a **polyglot** environment, it leverages a diverse set of programming languages, including Go, Python, .NET, Java, and others, showcasing cross-language instrumentation with OpenTelemetry. The intention is to get a quickstart application to send data and experience SigNoz firsthand.
|
||||
|
||||
This guide provides a step-by-step walkthrough for setting up the **OpenTelemetry Demo App** with **SigNoz** as backend for observability. It outlines steps to export telemetry data to **SigNoz self-hosted with Docker**, **SigNoz self-hosted with Kubernetes** and **SigNoz cloud**.
|
||||
<br/>
|
||||
|
||||
__Table of Contents__
|
||||
- [Send data to SigNoz Self-hosted with Docker](#send-data-to-signoz-self-hosted-with-docker)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Clone the OpenTelemetry Demo App Repository](#clone-the-opentelemetry-demo-app-repository)
|
||||
- [Modify OpenTelemetry Collector Config](#modify-opentelemetry-collector-config)
|
||||
- [Start the OpenTelemetry Demo App](#start-the-opentelemetry-demo-app)
|
||||
- [Monitor with SigNoz (Docker)](#monitor-with-signoz-docker)
|
||||
- [Send data to SigNoz Self-hosted with Kubernetes](#send-data-to-signoz-self-hosted-with-kubernetes)
|
||||
- [Prerequisites](#prerequisites-1)
|
||||
- [Install Helm Repo and Charts](#install-helm-repo-and-charts)
|
||||
- [Start the OpenTelemetry Demo App](#start-the-opentelemetry-demo-app-1)
|
||||
- [Moniitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes)
|
||||
- [What's next](#whats-next)
|
||||
|
||||
|
||||
# Send data to SigNoz Self-hosted with Docker
|
||||
|
||||
In this guide you will install the OTel demo application using Docker and send telemetry data to SigNoz hosted with Docker, referred as SigNoz [Docker] from now.
|
||||
|
||||
|
||||
## Prerequisites
|
||||
- Docker and Docker Compose installed
|
||||
- 6 GB of RAM for the application [as per OpenTelemetry documentation]
|
||||
- Nice to have Docker Desktop, for easy monitoring
|
||||
|
||||
|
||||
## Clone the OpenTelemetry Demo App Repository
|
||||
Clone the OTel demo app to any folder of your choice.
|
||||
```sh
|
||||
# Clone the OpenTelemetry Demo repository
|
||||
git clone https://github.com/open-telemetry/opentelemetry-demo.git
|
||||
cd opentelemetry-demo
|
||||
```
|
||||
|
||||
## Modify OpenTelemetry Collector Config
|
||||
|
||||
By default, the collector in the demo application will merge the configuration from two files:
|
||||
|
||||
1. otelcol-config.yml [we don't touch this]
|
||||
2. otelcol-config-extras.yml [we modify this]
|
||||
|
||||
To add SigNoz [Docker] as the backend, open the file `src/otel-collector/otelcol-config-extras.yml` and add the following,
|
||||
```yaml
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: "http://host.docker.internal:4317"
|
||||
tls:
|
||||
insecure: true
|
||||
debug:
|
||||
verbosity: detailed
|
||||
|
||||
service:
|
||||
pipelines:
|
||||
metrics:
|
||||
exporters: [otlp]
|
||||
traces:
|
||||
exporters: [spanmetrics, otlp]
|
||||
logs:
|
||||
exporters: [otlp]
|
||||
```
|
||||
|
||||
The SigNoz OTel collector [sigNoz's otel-collector service] listens at 4317 port on localhost. When the OTel demo app is running within a Docker container and needs to transmit telemetry data to SigNoz, it cannot directly reference 'localhost' as this would refer to the container's own internal network. Instead, Docker provides a special DNS name, `host.docker.internal`, which resolves to the host machine's IP address from within containers. By configuring the OpenTelemetry Demo application to send data to `host.docker.internal:4317`, we establish a network path that allows the containerized application to transmit telemetry data across the container boundary to the SigNoz OTel collector running on the host machine's port 4317.
|
||||
|
||||
>
|
||||
> Note: When merging extra configuration values with the existing collector config (`src/otel-collector/otelcol-config.yml`), objects are merged and arrays are replaced resulting in previous pipeline configurations getting overridden.
|
||||
The spanmetrics exporter must be included in the array of exporters for the traces pipeline if overridden. Not including this exporter will result in an error.
|
||||
>
|
||||
<br>
|
||||
<u>To send data to SigNoz Cloud</u>
|
||||
|
||||
If you want to send data to cloud instead, open the file `src/otel-collector/otelcol-config-extras.yml` and add the following,
|
||||
```yaml
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: "https://ingest.{your-region}.signoz.cloud:443"
|
||||
tls:
|
||||
insecure: false
|
||||
headers:
|
||||
signoz-access-token: <SIGNOZ-KEY>
|
||||
debug:
|
||||
verbosity: detailed
|
||||
|
||||
service:
|
||||
pipelines:
|
||||
metrics:
|
||||
exporters: [otlp]
|
||||
traces:
|
||||
exporters: [spanmetrics, otlp]
|
||||
logs:
|
||||
exporters: [otlp]
|
||||
```
|
||||
Remember to replace the region and ingestion key with proper values as obtained from your account.
|
||||
|
||||
|
||||
## Start the OpenTelemetry Demo App
|
||||
|
||||
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
|
||||
|
||||
```sh
|
||||
ENVOY_PORT=8081 docker compose up -d
|
||||
```
|
||||
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
|
||||
```sh
|
||||
docker compose ps -a
|
||||
```
|
||||
The result should look similar to this,
|
||||

|
||||
|
||||
|
||||
|
||||
Navigate to `http://localhost:8081/` where you can access OTel demo app UI. Generate some traffic to send to SigNoz [Docker].
|
||||
|
||||
## Monitor with SigNoz [Docker]
|
||||
Signoz exposes its UI at `http://localhost:8080/`. You should be able to see multiple services listed down as shown in the snapshot below.
|
||||
|
||||
|
||||

|
||||
|
||||
This verifies that your OTel demo app is successfully sending telemetry data to SigNoz [Docker] as expected.
|
||||
|
||||
|
||||
# Send data to SigNoz Self-hosted with Kubernetes
|
||||
|
||||
In this guide you will install the OTel demo application using Helm and send telemetry data to SigNoz hosted with Kubernetes, referred as SigNoz [Kubernetes] from now.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Helm charts installed
|
||||
- 6 GB of free RAM for the application [as per OpenTelemetry documentation]
|
||||
- A kubernetes cluster (EKS, GKE, Minikube)
|
||||
- kubectl [CLI for Kubernetes]
|
||||
|
||||
>Note: We will be installing OTel demo app using Helm charts, since it is recommended by OpenTelemetry. If you wish to install using kubectl, follow [this](https://opentelemetry.io/docs/demo/kubernetes-deployment/#install-using-kubectl).
|
||||
|
||||
|
||||
## Install Helm Repo and Charts
|
||||
You’ll need to **install the Helm repository** to start sending data to SigNoz cloud.
|
||||
|
||||
```sh
|
||||
helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts
|
||||
```
|
||||
The OpenTelemetry Collector’s configuration is exposed in the Helm chart. All additions made will be merged into the default configuration. We use this capability to add SigNoz as an exporter, and make pipelines as desired.
|
||||
|
||||
For this we have to create a `values.yaml` which will override the existing configurations that comes with the Helm chart.
|
||||
|
||||
```yaml
|
||||
default:
|
||||
env:
|
||||
- name: OTEL_SERVICE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: "metadata.labels['app.kubernetes.io/component']"
|
||||
- name: OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE
|
||||
value: cumulative
|
||||
- name: OTEL_RESOURCE_ATTRIBUTES
|
||||
value: 'service.name=$(OTEL_SERVICE_NAME),service.namespace=opentelemetry-demo'
|
||||
- name: OTEL_COLLECTOR_NAME
|
||||
value: signoz-otel-collector.<namespace>.svc.cluster.local
|
||||
```
|
||||
Replace namespace with your appropriate namespace. This file will replace the chart’s existing settings with our new ones, ensuring telemetry data is sent to SigNoz [Kubernetes].
|
||||
|
||||
> Note: When merging YAML values with Helm, objects are merged and arrays are replaced. The spanmetrics exporter must be included in the array of exporters for the traces pipeline if overridden. Not including this exporter will result in an error.
|
||||
|
||||
<br>
|
||||
<u>To send data to SigNoz cloud</u>
|
||||
|
||||
If you wish to send data to cloud instance of SigNoz, we have to create a `values.yaml` which will override the existing configurations that comes with the Helm chart.
|
||||
|
||||
```sh
|
||||
opentelemetry-collector:
|
||||
config:
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: "https://ingest.{your-region}.signoz.cloud:443"
|
||||
tls:
|
||||
insecure: false
|
||||
headers:
|
||||
signoz-access-token: <SIGNOZ-KEY>
|
||||
debug:
|
||||
verbosity: detailed
|
||||
service:
|
||||
pipelines:
|
||||
traces:
|
||||
exporters: [spanmetrics, otlp]
|
||||
metrics:
|
||||
exporters: [otlp]
|
||||
logs:
|
||||
exporters: [otlp]
|
||||
```
|
||||
Make sure to replace the region and key with values obtained from the account
|
||||
|
||||
Now **install the helm chart** with a release name and namespace of your choice. Let's take *my-otel-demo* as the release name and *otel-demo* as the namespace for the context of the code snippet below,
|
||||
|
||||
```sh
|
||||
# Create a new Kubernetes namespace called "otel-demo"
|
||||
kubectl create namespace otel-demo
|
||||
# Install the OpenTelemetry Demo Helm chart with the release name "my-otel-demo"
|
||||
helm install my-otel-demo open-telemetry/opentelemetry-demo --namespace otel-demo -f values.yaml
|
||||
```
|
||||
You should see a similar output on your terminal,
|
||||

|
||||
|
||||
To verify if all the pods are running,
|
||||
```sh
|
||||
kubectl get pods -n otel-demo
|
||||
```
|
||||
The output should look similar to this,
|
||||
|
||||

|
||||
|
||||
## Start the OpenTelemetry Demo App
|
||||
|
||||
To expose the OTel demo app UI [frontend-proxy service] use the following command (replace my-otel-demo with your Helm chart release name):
|
||||
|
||||
```sh
|
||||
kubectl port-forward svc/my-otel-demo-frontend-proxy 8080:8081
|
||||
```
|
||||
Navigate to `http://localhost:8081/` where you can access OTel demo app UI. Generate some traffic to send to SigNoz [Kubernetes].
|
||||
|
||||
|
||||
|
||||
## Monitor with SigNoz [Kubernetes]
|
||||
Signoz exposes it's UI at `http://localhost:8080/`. You should be able to see multiple services listed down as shown in the snapshot below.
|
||||
|
||||
|
||||

|
||||
|
||||
This verifies that your OTel demo app is successfully sending telemetry data to SigNoz [Kubernetes] as expected.
|
||||
|
||||
|
||||
|
||||
# What's next?
|
||||
|
||||
|
||||
|
||||
Don't forget to check our OpenTelemetry [track](https://signoz.io/resource-center/opentelemetry/), guaranteed to take you from a newbie to sensei in no time!
|
||||
|
||||
Also from a fellow OTel fan to another, we at [SigNoz](https://signoz.io/) are building an open-source, OTel native, observability platform (one of its kind). So, show us love - star us on [GitHub](https://github.com/SigNoz/signoz), nitpick our [docs](https://signoz.io/docs/introduction/), or just tell your app we’re the ones who’ll catch its crashes mid-flight and finally shush all the 3am panic calls!
|
||||
@@ -4,84 +4,80 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/uptrace/bun"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type Pat struct {
|
||||
db *bun.DB
|
||||
store sqlstore.SQLStore
|
||||
uuid *authtypes.UUID
|
||||
headers []string
|
||||
}
|
||||
|
||||
func NewPat(db *bun.DB, headers []string) *Pat {
|
||||
return &Pat{db: db, uuid: authtypes.NewUUID(), headers: headers}
|
||||
func NewPat(store sqlstore.SQLStore, headers []string) *Pat {
|
||||
return &Pat{store: store, uuid: authtypes.NewUUID(), headers: headers}
|
||||
}
|
||||
|
||||
func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var values []string
|
||||
var patToken string
|
||||
var pat types.StorablePersonalAccessToken
|
||||
var updateLastUsed bool
|
||||
var pat eeTypes.StorablePersonalAccessToken
|
||||
|
||||
for _, header := range p.headers {
|
||||
values = append(values, r.Header.Get(header))
|
||||
|
||||
if header == "SIGNOZ-API-KEY" {
|
||||
patToken = values[0]
|
||||
err := p.db.NewSelect().Model(&pat).Where("token = ?", patToken).Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// get user from db
|
||||
user := types.User{}
|
||||
err = p.db.NewSelect().Model(&user).Where("id = ?", pat.UserID).Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
jwt := authtypes.Claims{
|
||||
UserID: user.ID,
|
||||
GroupID: user.GroupID,
|
||||
Email: user.Email,
|
||||
OrgID: user.OrgID,
|
||||
}
|
||||
|
||||
ctx := authtypes.NewContextWithClaims(r.Context(), jwt)
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
// Mark to update last used since SIGNOZ-API-KEY is present and successful
|
||||
updateLastUsed = true
|
||||
}
|
||||
}
|
||||
|
||||
ctx, err := p.uuid.ContextFromRequest(r.Context(), values...)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
patToken, ok := authtypes.UUIDFromContext(ctx)
|
||||
if !ok {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
err = p.store.BunDB().NewSelect().Model(&pat).Where("token = ?", patToken).Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// get user from db
|
||||
user := types.User{}
|
||||
err = p.store.BunDB().NewSelect().Model(&user).Where("id = ?", pat.UserID).Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
jwt := authtypes.Claims{
|
||||
UserID: user.ID,
|
||||
GroupID: user.GroupID,
|
||||
Email: user.Email,
|
||||
OrgID: user.OrgID,
|
||||
}
|
||||
|
||||
ctx = authtypes.NewContextWithClaims(ctx, jwt)
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
|
||||
// update last used only if SIGNOZ-API-KEY was present and successful
|
||||
if updateLastUsed {
|
||||
pat.LastUsed = time.Now().Unix()
|
||||
_, err = p.db.NewUpdate().Model(&pat).Column("last_used").Where("token = ?", patToken).Where("revoked = false").Exec(r.Context())
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
|
||||
}
|
||||
pat.LastUsed = time.Now().Unix()
|
||||
_, err = p.store.BunDB().NewUpdate().Model(&pat).Column("last_used").Where("token = ?", patToken).Where("revoked = false").Exec(r.Context())
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
@@ -30,13 +30,11 @@ builds:
|
||||
- v8.0
|
||||
ldflags:
|
||||
- -s -w
|
||||
- -X github.com/SigNoz/signoz/pkg/query-service/version.version={{ .Version }}
|
||||
- -X main.commit={{ .Commit }} -X main.date={{ .CommitDate }}
|
||||
- -X main.builtBy=goreleaser
|
||||
- -X github.com/SigNoz/signoz/pkg/query-service/version.buildVersion={{ .Version }}
|
||||
- -X github.com/SigNoz/signoz/pkg/query-service/version.buildHash={{ .ShortCommit }}
|
||||
- -X github.com/SigNoz/signoz/pkg/query-service/version.buildTime={{ .Date }}
|
||||
- -X github.com/SigNoz/signoz/pkg/query-service/version.gitBranch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.version=v{{ .Version }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
- -X github.com/SigNoz/signoz/pkg/version.hash={{ .ShortCommit }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.time={{ .CommitTimestamp }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
- >-
|
||||
|
||||
@@ -1,34 +1,21 @@
|
||||
# use a minimal alpine image
|
||||
FROM alpine:3.20.3
|
||||
|
||||
# Add Maintainer Info
|
||||
LABEL maintainer="signoz"
|
||||
|
||||
# define arguments that can be passed during build time
|
||||
ARG TARGETOS TARGETARCH
|
||||
|
||||
# add ca-certificates in case you need them
|
||||
RUN apk update && apk add ca-certificates && rm -rf /var/cache/apk/*
|
||||
|
||||
# set working directory
|
||||
WORKDIR /root
|
||||
|
||||
# copy the signoz binary
|
||||
COPY ee/query-service/bin/signoz-${TARGETOS}-${TARGETARCH} /root/signoz
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
|
||||
# copy prometheus YAML config
|
||||
COPY pkg/query-service/config/prometheus.yml /root/config/prometheus.yml
|
||||
COPY pkg/query-service/templates /root/templates
|
||||
RUN apk update && \
|
||||
apk add ca-certificates && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
# Make signoz executable for non-root users
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
# Copy frontend
|
||||
COPY ./target/${OS}-${TARGETARCH}/signoz /root/signoz
|
||||
COPY ./conf/prometheus.yml /root/config/prometheus.yml
|
||||
COPY ./templates/email /root/templates
|
||||
COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
# run the binary
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
@@ -313,6 +313,9 @@ func (p *BaseSeasonalProvider) getScore(
|
||||
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
|
||||
) float64 {
|
||||
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
|
||||
if expectedValue < 0 {
|
||||
expectedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
}
|
||||
return (value - expectedValue) / p.getStdDev(weekSeries)
|
||||
}
|
||||
|
||||
|
||||
@@ -19,9 +19,9 @@ import (
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/version"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
@@ -200,9 +200,8 @@ func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *ba
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) {
|
||||
version := version.GetVersion()
|
||||
versionResponse := basemodel.GetVersionResponse{
|
||||
Version: version,
|
||||
Version: version.Info.Version(),
|
||||
EE: "Y",
|
||||
SetupCompleted: ah.SetupCompleted,
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
@@ -135,19 +135,12 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newPAT := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Token: generatePATToken(),
|
||||
UserID: integrationUser.ID,
|
||||
Name: integrationPATName,
|
||||
Role: baseconstants.ViewerGroup,
|
||||
ExpiresAt: 0,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
},
|
||||
}
|
||||
newPAT := eeTypes.NewGettablePAT(
|
||||
integrationPATName,
|
||||
baseconstants.ViewerGroup,
|
||||
integrationUser.ID,
|
||||
0,
|
||||
)
|
||||
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
|
||||
@@ -2,31 +2,21 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func generatePATToken() string {
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
return encodedToken
|
||||
}
|
||||
|
||||
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
@@ -43,31 +33,18 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
pat := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Name: req.Name,
|
||||
Role: req.Role,
|
||||
ExpiresAt: req.ExpiresInDays,
|
||||
},
|
||||
}
|
||||
pat := eeTypes.NewGettablePAT(
|
||||
req.Name,
|
||||
req.Role,
|
||||
user.ID,
|
||||
req.ExpiresInDays,
|
||||
)
|
||||
err = validatePATRequest(pat)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// All the PATs are associated with the user creating the PAT.
|
||||
pat.UserID = user.ID
|
||||
pat.CreatedAt = time.Now()
|
||||
pat.UpdatedAt = time.Now()
|
||||
pat.LastUsed = 0
|
||||
pat.Token = generatePATToken()
|
||||
|
||||
if pat.ExpiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
|
||||
var apierr basemodel.BaseApiError
|
||||
if pat, apierr = ah.AppDao().CreatePAT(ctx, user.OrgID, pat); apierr != nil {
|
||||
@@ -78,7 +55,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ah.Respond(w, &pat)
|
||||
}
|
||||
|
||||
func validatePATRequest(req model.PAT) error {
|
||||
func validatePATRequest(req types.GettablePAT) error {
|
||||
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
|
||||
return fmt.Errorf("valid role is required")
|
||||
}
|
||||
@@ -94,7 +71,7 @@ func validatePATRequest(req model.PAT) error {
|
||||
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
req := model.PAT{}
|
||||
req := types.GettablePAT{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
|
||||
@@ -8,8 +8,10 @@ import (
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
)
|
||||
|
||||
type ClickhouseReader struct {
|
||||
@@ -20,8 +22,8 @@ type ClickhouseReader struct {
|
||||
|
||||
func NewDataConnector(
|
||||
localDB *sqlx.DB,
|
||||
ch clickhouse.Conn,
|
||||
promConfigPath string,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
lm interfaces.FeatureLookup,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
@@ -29,14 +31,10 @@ func NewDataConnector(
|
||||
fluxIntervalForTraceDetail time.Duration,
|
||||
cache cache.Cache,
|
||||
) *ClickhouseReader {
|
||||
chReader := basechr.NewReader(localDB, ch, promConfigPath, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
chReader := basechr.NewReader(localDB, telemetryStore, prometheus, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
return &ClickhouseReader{
|
||||
conn: ch,
|
||||
conn: telemetryStore.ClickhouseDB(),
|
||||
appdb: localDB,
|
||||
ClickHouseReader: chReader,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickhouseReader) Start(readerReady chan bool) {
|
||||
r.ClickHouseReader.Start(readerReady)
|
||||
}
|
||||
|
||||
@@ -18,13 +18,14 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/web"
|
||||
@@ -49,7 +50,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
pqle "github.com/SigNoz/signoz/pkg/query-service/pqlEngine"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
@@ -112,7 +112,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := baseexplorer.InitWithDSN(serverOptions.SigNoz.SQLStore.BunDB()); err != nil {
|
||||
if err := baseexplorer.InitWithDSN(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore.BunDB()); err != nil {
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -130,25 +130,23 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// initiate license manager
|
||||
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore.BunDB())
|
||||
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// set license manager as feature flag provider in dao
|
||||
modelDao.SetFlagProvider(lm)
|
||||
readerReady := make(chan bool)
|
||||
|
||||
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var reader interfaces.DataConnector
|
||||
qb := db.NewDataConnector(
|
||||
reader := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
serverOptions.SigNoz.TelemetryStore.ClickHouseDB(),
|
||||
serverOptions.PromConfigPath,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
lm,
|
||||
serverOptions.Cluster,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
@@ -156,8 +154,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
fluxIntervalForTraceDetail,
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
go qb.Start(readerReady)
|
||||
reader = qb
|
||||
|
||||
skipConfig := &basemodel.SkipConfig{}
|
||||
if serverOptions.SkipTopLvlOpsPath != "" {
|
||||
@@ -176,9 +172,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
<-readerReady
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.PromConfigPath,
|
||||
serverOptions.RuleRepoURL,
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
@@ -189,6 +183,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.UseTraceNewSchema,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -217,7 +213,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
// ingestion pipelines manager
|
||||
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(), integrationsController.GetPipelinesForInstalledIntegrations,
|
||||
serverOptions.SigNoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -233,7 +229,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// start the usagemanager
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickHouseDB(), serverOptions.Config.TelemetryStore.ClickHouse.DSN)
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.Config.TelemetryStore.Clickhouse.DSN)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -304,7 +300,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
&opAmpModel.AllAgents, agentConfMgr,
|
||||
)
|
||||
|
||||
errorList := qb.PreloadMetricsMetadata(context.Background())
|
||||
errorList := reader.PreloadMetricsMetadata(context.Background())
|
||||
for _, er := range errorList {
|
||||
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
|
||||
}
|
||||
@@ -317,7 +313,7 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
|
||||
r := baseapp.NewRouter()
|
||||
|
||||
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
|
||||
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore.BunDB(), []string{"SIGNOZ-API-KEY"}).Wrap)
|
||||
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
|
||||
r.Use(middleware.NewTimeout(zap.L(),
|
||||
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
|
||||
s.serverOptions.Config.APIServer.Timeout.Default,
|
||||
@@ -365,7 +361,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
am := baseapp.NewAuthMiddleware(getUserFromRequest)
|
||||
|
||||
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
|
||||
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore.BunDB(), []string{"SIGNOZ-API-KEY"}).Wrap)
|
||||
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
|
||||
r.Use(middleware.NewTimeout(zap.L(),
|
||||
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
|
||||
s.serverOptions.Config.APIServer.Timeout.Default,
|
||||
@@ -537,7 +533,6 @@ func (s *Server) Stop() error {
|
||||
}
|
||||
|
||||
func makeRulesManager(
|
||||
promConfigPath,
|
||||
ruleRepoURL string,
|
||||
db *sqlx.DB,
|
||||
ch baseint.Reader,
|
||||
@@ -548,16 +543,13 @@ func makeRulesManager(
|
||||
useTraceNewSchema bool,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
) (*baserules.Manager, error) {
|
||||
// create engine
|
||||
pqle, err := pqle.FromConfigPath(promConfigPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create pql engine : %v", err)
|
||||
}
|
||||
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
PqlEngine: pqle,
|
||||
TelemetryStore: telemetryStore,
|
||||
Prometheus: prometheus,
|
||||
RepoURL: ruleRepoURL,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
@@ -36,11 +35,11 @@ type ModelDao interface {
|
||||
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
|
||||
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
|
||||
CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError)
|
||||
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id string) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError
|
||||
}
|
||||
|
||||
@@ -6,41 +6,47 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
|
||||
p.StorablePersonalAccessToken.OrgID = orgID
|
||||
_, err := m.DB().NewInsert().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Returning("id").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, p.UserID)
|
||||
if createdByUser == nil {
|
||||
p.CreatedByUser = model.User{
|
||||
p.CreatedByUser = types.PatUser{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
p.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
p.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError {
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id string) basemodel.BaseApiError {
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Column("role", "name", "updated_at", "updated_by_user_id").
|
||||
@@ -55,7 +61,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p model.PAT, id
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
@@ -68,41 +74,51 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]model.PAT, bas
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
|
||||
}
|
||||
|
||||
patsWithUsers := []model.PAT{}
|
||||
patsWithUsers := []types.GettablePAT{}
|
||||
for i := range pats {
|
||||
patWithUser := model.PAT{
|
||||
patWithUser := types.GettablePAT{
|
||||
StorablePersonalAccessToken: pats[i],
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
|
||||
if createdByUser == nil {
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
|
||||
if updatedByUser == nil {
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
Id: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
CreatedAt: updatedByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: updatedByUser.CreatedAt,
|
||||
UpdatedAt: updatedByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,7 +144,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id string, userI
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
@@ -146,14 +162,14 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemo
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := model.PAT{
|
||||
patWithUser := types.GettablePAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
@@ -172,7 +188,7 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*mo
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := model.PAT{
|
||||
patWithUser := types.GettablePAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
@@ -180,8 +196,8 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*mo
|
||||
}
|
||||
|
||||
// deprecated
|
||||
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*types.GettableUser, basemodel.BaseApiError) {
|
||||
users := []types.GettableUser{}
|
||||
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError) {
|
||||
users := []ossTypes.GettableUser{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&users).
|
||||
|
||||
@@ -7,6 +7,5 @@ import (
|
||||
// Connector defines methods for interaction
|
||||
// with o11y data. for example - clickhouse
|
||||
type DataConnector interface {
|
||||
Start(readerReady chan bool)
|
||||
baseint.Reader
|
||||
}
|
||||
|
||||
@@ -9,10 +9,10 @@ import (
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/mattn/go-sqlite3"
|
||||
"github.com/uptrace/bun"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@@ -20,14 +20,14 @@ import (
|
||||
// Repo is license repo. stores license keys in a secured DB
|
||||
type Repo struct {
|
||||
db *sqlx.DB
|
||||
bundb *bun.DB
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
// NewLicenseRepo initiates a new license repo
|
||||
func NewLicenseRepo(db *sqlx.DB, bundb *bun.DB) Repo {
|
||||
func NewLicenseRepo(db *sqlx.DB, store sqlstore.SQLStore) Repo {
|
||||
return Repo{
|
||||
db: db,
|
||||
bundb: bundb,
|
||||
store: store,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,7 +171,7 @@ func (r *Repo) UpdateLicenseV3(ctx context.Context, l *model.LicenseV3) error {
|
||||
|
||||
func (r *Repo) CreateFeature(req *types.FeatureStatus) *basemodel.ApiError {
|
||||
|
||||
_, err := r.bundb.NewInsert().
|
||||
_, err := r.store.BunDB().NewInsert().
|
||||
Model(req).
|
||||
Exec(context.Background())
|
||||
if err != nil {
|
||||
@@ -183,7 +183,7 @@ func (r *Repo) CreateFeature(req *types.FeatureStatus) *basemodel.ApiError {
|
||||
func (r *Repo) GetFeature(featureName string) (types.FeatureStatus, error) {
|
||||
var feature types.FeatureStatus
|
||||
|
||||
err := r.bundb.NewSelect().
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&feature).
|
||||
Where("name = ?", featureName).
|
||||
Scan(context.Background())
|
||||
@@ -212,7 +212,7 @@ func (r *Repo) GetAllFeatures() ([]basemodel.Feature, error) {
|
||||
|
||||
func (r *Repo) UpdateFeature(req types.FeatureStatus) error {
|
||||
|
||||
_, err := r.bundb.NewUpdate().
|
||||
_, err := r.store.BunDB().NewUpdate().
|
||||
Model(&req).
|
||||
Where("name = ?", req.Name).
|
||||
Exec(context.Background())
|
||||
|
||||
@@ -7,11 +7,11 @@ import (
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
|
||||
"sync"
|
||||
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
|
||||
@@ -45,12 +45,12 @@ type Manager struct {
|
||||
activeFeatures basemodel.FeatureSet
|
||||
}
|
||||
|
||||
func StartManager(db *sqlx.DB, bundb *bun.DB, features ...basemodel.Feature) (*Manager, error) {
|
||||
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, features ...basemodel.Feature) (*Manager, error) {
|
||||
if LM != nil {
|
||||
return LM, nil
|
||||
}
|
||||
|
||||
repo := NewLicenseRepo(db, bundb)
|
||||
repo := NewLicenseRepo(db, store)
|
||||
m := &Manager{
|
||||
repo: &repo,
|
||||
}
|
||||
|
||||
@@ -3,89 +3,33 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"log"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/version"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"go.opentelemetry.io/otel/sdk/resource"
|
||||
semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
|
||||
prommodel "github.com/prometheus/common/model"
|
||||
|
||||
zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder"
|
||||
zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
)
|
||||
|
||||
func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
|
||||
func initZapLog() *zap.Logger {
|
||||
config := zap.NewProductionConfig()
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
|
||||
defer stop()
|
||||
|
||||
config.EncoderConfig.EncodeDuration = zapcore.MillisDurationEncoder
|
||||
config.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
|
||||
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
|
||||
consoleEncoder := zapcore.NewJSONEncoder(config.EncoderConfig)
|
||||
defaultLogLevel := zapcore.InfoLevel
|
||||
|
||||
res := resource.NewWithAttributes(
|
||||
semconv.SchemaURL,
|
||||
semconv.ServiceNameKey.String("query-service"),
|
||||
)
|
||||
|
||||
core := zapcore.NewTee(
|
||||
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
|
||||
)
|
||||
|
||||
if enableQueryServiceLogOTLPExport {
|
||||
ctx, cancel := context.WithTimeout(ctx, time.Second*30)
|
||||
defer cancel()
|
||||
conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials()))
|
||||
if err != nil {
|
||||
log.Fatalf("failed to establish connection: %v", err)
|
||||
} else {
|
||||
logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize)
|
||||
if err != nil {
|
||||
logExportBatchSizeInt = 512
|
||||
}
|
||||
ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{
|
||||
BatchSize: logExportBatchSizeInt,
|
||||
ResourceSchema: semconv.SchemaURL,
|
||||
Resource: res,
|
||||
}))
|
||||
core = zapcore.NewTee(
|
||||
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
|
||||
zapcore.NewCore(otlpEncoder, zapcore.NewMultiWriteSyncer(ws), defaultLogLevel),
|
||||
)
|
||||
}
|
||||
}
|
||||
logger := zap.New(core, zap.AddCaller(), zap.AddStacktrace(zapcore.ErrorLevel))
|
||||
|
||||
logger, _ := config.Build()
|
||||
return logger
|
||||
}
|
||||
|
||||
func init() {
|
||||
prommodel.NameValidationScheme = prommodel.UTF8Validation
|
||||
}
|
||||
|
||||
func main() {
|
||||
var promConfigPath, skipTopLvlOpsPath string
|
||||
|
||||
@@ -99,7 +43,6 @@ func main() {
|
||||
var useLogsNewSchema bool
|
||||
var useTraceNewSchema bool
|
||||
var cacheConfigPath, fluxInterval, fluxIntervalForTraceDetail string
|
||||
var enableQueryServiceLogOTLPExport bool
|
||||
var preferSpanMetrics bool
|
||||
|
||||
var maxIdleConns int
|
||||
@@ -121,19 +64,15 @@ func main() {
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)")
|
||||
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
|
||||
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
|
||||
flag.Parse()
|
||||
|
||||
loggerMgr := initZapLog(enableQueryServiceLogOTLPExport)
|
||||
|
||||
loggerMgr := initZapLog()
|
||||
zap.ReplaceGlobals(loggerMgr)
|
||||
defer loggerMgr.Sync() // flushes buffer, if any
|
||||
|
||||
version.PrintVersion()
|
||||
|
||||
config, err := signoz.NewConfig(context.Background(), config.ResolverConfig{
|
||||
Uris: []string{"env:"},
|
||||
ProviderFactories: []config.ProviderFactory{
|
||||
@@ -144,21 +83,29 @@ func main() {
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
Config: promConfigPath,
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create config", zap.Error(err))
|
||||
}
|
||||
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
sqlStoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlStoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
signoz.NewSQLStoreProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz struct", zap.Error(err))
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
|
||||
@@ -1,25 +1,7 @@
|
||||
package model
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types"
|
||||
|
||||
type User struct {
|
||||
Id string `json:"id" db:"id"`
|
||||
Name string `json:"name" db:"name"`
|
||||
Email string `json:"email" db:"email"`
|
||||
CreatedAt int64 `json:"createdAt" db:"created_at"`
|
||||
ProfilePictureURL string `json:"profilePictureURL" db:"profile_picture_url"`
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
type CreatePATRequestBody struct {
|
||||
Name string `json:"name"`
|
||||
Role string `json:"role"`
|
||||
ExpiresInDays int64 `json:"expiresInDays"`
|
||||
}
|
||||
|
||||
type PAT struct {
|
||||
CreatedByUser User `json:"createdByUser"`
|
||||
UpdatedByUser User `json:"updatedByUser"`
|
||||
|
||||
types.StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
@@ -157,6 +157,13 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var ProPlan = basemodel.FeatureSet{
|
||||
@@ -279,6 +286,13 @@ var ProPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var EnterprisePlan = basemodel.FeatureSet{
|
||||
@@ -415,4 +429,11 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Rule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
|
||||
@@ -145,7 +145,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
|
||||
220
ee/sqlstore/postgressqlstore/dialect.go
Normal file
220
ee/sqlstore/postgressqlstore/dialect.go
Normal file
@@ -0,0 +1,220 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type dialect struct {
|
||||
}
|
||||
|
||||
func (dialect *dialect) MigrateIntToTimestamp(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// bigint for postgres and INTEGER for sqlite
|
||||
if columnType != "bigint" {
|
||||
return nil
|
||||
}
|
||||
|
||||
// if the columns is integer then do this
|
||||
if _, err := bun.
|
||||
ExecContext(ctx, `ALTER TABLE `+table+` RENAME COLUMN `+column+` TO `+column+`_old`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// add new timestamp column
|
||||
if _, err := bun.
|
||||
NewAddColumn().
|
||||
Table(table).
|
||||
ColumnExpr(column + " TIMESTAMP").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := bun.
|
||||
NewUpdate().
|
||||
Table(table).
|
||||
Set(column + " = to_timestamp(cast(" + column + "_old as INTEGER))").
|
||||
Where("1=1").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// drop old column
|
||||
if _, err := bun.
|
||||
NewDropColumn().
|
||||
Table(table).
|
||||
Column(column + "_old").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) MigrateIntToBoolean(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if columnType != "bigint" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, err := bun.
|
||||
ExecContext(ctx, `ALTER TABLE `+table+` RENAME COLUMN `+column+` TO `+column+`_old`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// add new boolean column
|
||||
if _, err := bun.
|
||||
NewAddColumn().
|
||||
Table(table).
|
||||
ColumnExpr(column + " BOOLEAN").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// copy data from old column to new column, converting from int to boolean
|
||||
if _, err := bun.NewUpdate().
|
||||
Table(table).
|
||||
Set(column + " = CASE WHEN " + column + "_old = 1 THEN true ELSE false END").
|
||||
Where("1=1").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// drop old column
|
||||
if _, err := bun.NewDropColumn().Table(table).Column(column + "_old").Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) GetColumnType(ctx context.Context, bun bun.IDB, table string, column string) (string, error) {
|
||||
var columnType string
|
||||
|
||||
err := bun.NewSelect().
|
||||
ColumnExpr("data_type").
|
||||
TableExpr("information_schema.columns").
|
||||
Where("table_name = ?", table).
|
||||
Where("column_name = ?", column).
|
||||
Scan(ctx, &columnType)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return columnType, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) ColumnExists(ctx context.Context, bun bun.IDB, table string, column string) (bool, error) {
|
||||
var count int
|
||||
err := bun.NewSelect().
|
||||
ColumnExpr("COUNT(*)").
|
||||
TableExpr("information_schema.columns").
|
||||
Where("table_name = ?", table).
|
||||
Where("column_name = ?", column).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table string, oldColumnName string, newColumnName string) (bool, error) {
|
||||
oldColumnExists, err := dialect.ColumnExists(ctx, bun, table, oldColumnName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
newColumnExists, err := dialect.ColumnExists(ctx, bun, table, newColumnName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if !oldColumnExists && newColumnExists {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, "ALTER TABLE "+table+" RENAME COLUMN "+oldColumnName+" TO "+newColumnName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table interface{}) (bool, error) {
|
||||
|
||||
count := 0
|
||||
err := bun.
|
||||
NewSelect().
|
||||
ColumnExpr("count(*)").
|
||||
Table("pg_catalog.pg_tables").
|
||||
Where("tablename = ?", bun.Dialect().Tables().Get(reflect.TypeOf(table)).Name).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if count == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddNotNullDefaultToColumn(ctx context.Context, bun bun.IDB, table string, column, columnType, defaultValue string) error {
|
||||
query := fmt.Sprintf("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT %s, ALTER COLUMN %s SET NOT NULL", table, column, defaultValue, column)
|
||||
if _, err := bun.ExecContext(ctx, query); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -18,7 +18,7 @@ type provider struct {
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
sqlxdb *sqlx.DB
|
||||
dialect *PGDialect
|
||||
dialect *dialect
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||
@@ -60,7 +60,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
|
||||
sqlxdb: sqlx.NewDb(sqldb, "postgres"),
|
||||
dialect: &PGDialect{},
|
||||
dialect: new(dialect),
|
||||
}, nil
|
||||
}
|
||||
|
||||
73
ee/types/personal_access_token.go
Normal file
73
ee/types/personal_access_token.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type GettablePAT struct {
|
||||
CreatedByUser PatUser `json:"createdByUser"`
|
||||
UpdatedByUser PatUser `json:"updatedByUser"`
|
||||
|
||||
StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
type PatUser struct {
|
||||
types.User
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
|
||||
return GettablePAT{
|
||||
StorablePersonalAccessToken: NewStorablePersonalAccessToken(name, role, userID, expiresAt),
|
||||
}
|
||||
}
|
||||
|
||||
type StorablePersonalAccessToken struct {
|
||||
bun.BaseModel `bun:"table:personal_access_tokens"`
|
||||
|
||||
types.TimeAuditable
|
||||
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
|
||||
ID int `json:"id" bun:"id,pk,autoincrement"`
|
||||
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
|
||||
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
|
||||
Token string `json:"token" bun:"token,type:text,notnull,unique"`
|
||||
Name string `json:"name" bun:"name,type:text,notnull"`
|
||||
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
|
||||
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
|
||||
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
|
||||
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
|
||||
}
|
||||
|
||||
func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64) StorablePersonalAccessToken {
|
||||
now := time.Now()
|
||||
if expiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
expiresAt = now.Unix() + (expiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
|
||||
return StorablePersonalAccessToken{
|
||||
Token: encodedToken,
|
||||
Name: name,
|
||||
Role: role,
|
||||
UserID: userID,
|
||||
ExpiresAt: expiresAt,
|
||||
LastUsed: 0,
|
||||
Revoked: false,
|
||||
UpdatedByUserID: "",
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -90,7 +90,7 @@
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "0.379.0",
|
||||
"lucide-react": "0.427.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"motion": "12.4.13",
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
|
||||
@@ -54,15 +54,20 @@
|
||||
"SUPPORT": "SigNoz | Support",
|
||||
"LOGS_SAVE_VIEWS": "SigNoz | Logs Saved Views",
|
||||
"TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views",
|
||||
"TRACES_FUNNELS": "SigNoz | Traces Funnels",
|
||||
"DEFAULT": "Open source Observability Platform | SigNoz",
|
||||
"SHORTCUTS": "SigNoz | Shortcuts",
|
||||
"INTEGRATIONS": "SigNoz | Integrations",
|
||||
"ALERT_HISTORY": "SigNoz | Alert Rule History",
|
||||
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
|
||||
"MESSAGING_QUEUES": "SigNoz | Messaging Queues",
|
||||
"MESSAGING_QUEUES_OVERVIEW": "SigNoz | Messaging Queues",
|
||||
"MESSAGING_QUEUES_KAFKA": "SigNoz | Messaging Queues | Kafka",
|
||||
"MESSAGING_QUEUES_KAFKA_DETAIL": "SigNoz | Messaging Queues | Kafka",
|
||||
"MESSAGING_QUEUES_CELERY_TASK": "SigNoz | Messaging Queues | Celery",
|
||||
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
||||
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer"
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | API Monitoring"
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { ConfigProvider } from 'antd';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
@@ -15,6 +16,7 @@ import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import history from 'lib/history';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import posthog from 'posthog-js';
|
||||
import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
@@ -26,6 +28,7 @@ import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
import { Home } from './pageComponents';
|
||||
import PrivateRoute from './Private';
|
||||
import defaultRoutes, {
|
||||
AppRoutes,
|
||||
@@ -45,7 +48,6 @@ function App(): JSX.Element {
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicenseV3,
|
||||
userFetchError,
|
||||
licensesFetchError,
|
||||
featureFlagsFetchError,
|
||||
isLoggedIn: isLoggedInState,
|
||||
featureFlags,
|
||||
@@ -55,10 +57,7 @@ function App(): JSX.Element {
|
||||
|
||||
const { hostname, pathname } = window.location;
|
||||
|
||||
const {
|
||||
isCloudUser: isCloudUserVal,
|
||||
isEECloudUser: isEECloudUserVal,
|
||||
} = useGetTenantLicense();
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
|
||||
const enableAnalytics = useCallback(
|
||||
(user: IUser): void => {
|
||||
@@ -168,7 +167,7 @@ function App(): JSX.Element {
|
||||
|
||||
let updatedRoutes = defaultRoutes;
|
||||
// if the user is a cloud user
|
||||
if (isCloudUserVal || isEECloudUserVal) {
|
||||
if (isCloudUser || isEnterpriseSelfHostedUser) {
|
||||
// if the user is on basic plan then remove billing
|
||||
if (isOnBasicPlan) {
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
@@ -190,10 +189,10 @@ function App(): JSX.Element {
|
||||
isLoggedInState,
|
||||
user,
|
||||
licenses,
|
||||
isCloudUserVal,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isFetchingLicenses,
|
||||
isFetchingUser,
|
||||
isEECloudUserVal,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -208,6 +207,7 @@ function App(): JSX.Element {
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
// feature flag shouldn't be loading and featureFlags or fetchError any one of this should be true indicating that req is complete
|
||||
// licenses should also be present. there is no check for licenses for loading and error as that is mandatory if not present then routing
|
||||
@@ -233,7 +233,12 @@ function App(): JSX.Element {
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled && !trialInfo?.trialConvertedToSubscription;
|
||||
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
if (
|
||||
isLoggedInState &&
|
||||
isChatSupportEnabled &&
|
||||
!showAddCreditCardModal &&
|
||||
(isCloudUser || isEnterpriseSelfHostedUser)
|
||||
) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
@@ -252,13 +257,53 @@ function App(): JSX.Element {
|
||||
licenses,
|
||||
activeLicenseV3,
|
||||
trialInfo,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingUser && isCloudUserVal && user && user.email) {
|
||||
if (!isFetchingUser && isCloudUser && user && user.email) {
|
||||
enableAnalytics(user);
|
||||
}
|
||||
}, [user, isFetchingUser, isCloudUserVal, enableAnalytics]);
|
||||
}, [user, isFetchingUser, isCloudUser, enableAnalytics]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isCloudUser || isEnterpriseSelfHostedUser) {
|
||||
if (process.env.POSTHOG_KEY) {
|
||||
posthog.init(process.env.POSTHOG_KEY, {
|
||||
api_host: 'https://us.i.posthog.com',
|
||||
person_profiles: 'identified_only', // or 'always' to create profiles for anonymous users as well
|
||||
});
|
||||
}
|
||||
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
environment: 'production',
|
||||
integrations: [
|
||||
Sentry.browserTracingIntegration(),
|
||||
Sentry.replayIntegration({
|
||||
maskAllText: false,
|
||||
blockAllMedia: false,
|
||||
}),
|
||||
],
|
||||
// Performance Monitoring
|
||||
tracesSampleRate: 1.0, // Capture 100% of the transactions
|
||||
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
|
||||
tracePropagationTargets: [],
|
||||
// Session Replay
|
||||
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
||||
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
||||
});
|
||||
} else {
|
||||
posthog.reset();
|
||||
Sentry.close();
|
||||
|
||||
if (window.cioanalytics && typeof window.cioanalytics.reset === 'function') {
|
||||
window.cioanalytics.reset();
|
||||
}
|
||||
}
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
|
||||
// if the user is in logged in state
|
||||
if (isLoggedInState) {
|
||||
@@ -270,60 +315,55 @@ function App(): JSX.Element {
|
||||
// if the required calls fails then return a something went wrong error
|
||||
// this needs to be on top of data missing error because if there is an error, data will never be loaded and it will
|
||||
// move to indefinitive loading
|
||||
if (
|
||||
(userFetchError || licensesFetchError) &&
|
||||
pathname !== ROUTES.SOMETHING_WENT_WRONG
|
||||
) {
|
||||
if (userFetchError && pathname !== ROUTES.SOMETHING_WENT_WRONG) {
|
||||
history.replace(ROUTES.SOMETHING_WENT_WRONG);
|
||||
}
|
||||
|
||||
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
|
||||
if (
|
||||
(!licenses || !user.email || !featureFlags) &&
|
||||
!userFetchError &&
|
||||
!licensesFetchError
|
||||
) {
|
||||
if ((!licenses || !user.email || !featureFlags) && !userFetchError) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -42,6 +42,17 @@ export const TracesSaveViews = Loadable(
|
||||
import(/* webpackChunkName: "Traces Save Views" */ 'pages/TracesModulePage'),
|
||||
);
|
||||
|
||||
export const TracesFunnels = Loadable(
|
||||
() =>
|
||||
import(/* webpackChunkName: "Traces Funnels" */ 'pages/TracesModulePage'),
|
||||
);
|
||||
export const TracesFunnelDetails = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "Traces Funnel Details" */ 'pages/TracesFunnelDetails'
|
||||
),
|
||||
);
|
||||
|
||||
export const TraceFilter = Loadable(
|
||||
() => import(/* webpackChunkName: "Trace Filter Page" */ 'pages/Trace'),
|
||||
);
|
||||
@@ -284,3 +295,7 @@ export const MetricsExplorer = Loadable(
|
||||
() =>
|
||||
import(/* webpackChunkName: "MetricsExplorer" */ 'pages/MetricsExplorer'),
|
||||
);
|
||||
|
||||
export const ApiMonitoring = Loadable(
|
||||
() => import(/* webpackChunkName: "ApiMonitoring" */ 'pages/ApiMonitoring'),
|
||||
);
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
AllAlertChannels,
|
||||
AllErrors,
|
||||
APIKeys,
|
||||
ApiMonitoring,
|
||||
BillingPage,
|
||||
CreateAlertChannelAlerts,
|
||||
CreateNewAlerts,
|
||||
@@ -52,6 +53,8 @@ import {
|
||||
TraceDetail,
|
||||
TraceFilter,
|
||||
TracesExplorer,
|
||||
TracesFunnelDetails,
|
||||
TracesFunnels,
|
||||
TracesSaveViews,
|
||||
UnAuthorized,
|
||||
UsageExplorerPage,
|
||||
@@ -236,6 +239,20 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'TRACES_SAVE_VIEWS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.TRACES_FUNNELS,
|
||||
exact: true,
|
||||
component: TracesFunnels,
|
||||
isPrivate: true,
|
||||
key: 'TRACES_FUNNELS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.TRACES_FUNNELS_DETAIL,
|
||||
exact: true,
|
||||
component: TracesFunnelDetails,
|
||||
isPrivate: true,
|
||||
key: 'TRACES_FUNNELS_DETAIL',
|
||||
},
|
||||
{
|
||||
path: ROUTES.CHANNELS_NEW,
|
||||
exact: true,
|
||||
@@ -481,6 +498,13 @@ const routes: AppRoutes[] = [
|
||||
key: 'METRICS_EXPLORER_VIEWS',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.API_MONITORING,
|
||||
exact: true,
|
||||
component: ApiMonitoring,
|
||||
key: 'API_MONITORING',
|
||||
isPrivate: true,
|
||||
},
|
||||
];
|
||||
|
||||
export const SUPPORT_ROUTE: AppRoutes = {
|
||||
|
||||
109
frontend/src/api/traceFunnels/index.ts
Normal file
109
frontend/src/api/traceFunnels/index.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
CreateFunnelPayload,
|
||||
CreateFunnelResponse,
|
||||
FunnelData,
|
||||
} from 'types/api/traceFunnels';
|
||||
|
||||
const FUNNELS_BASE_PATH = '/trace-funnels';
|
||||
|
||||
export const createFunnel = async (
|
||||
payload: CreateFunnelPayload,
|
||||
): Promise<SuccessResponse<CreateFunnelResponse> | ErrorResponse> => {
|
||||
const response: AxiosResponse = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/new-funnel`,
|
||||
payload,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Funnel created successfully',
|
||||
payload: response.data,
|
||||
};
|
||||
};
|
||||
|
||||
interface GetFunnelsListParams {
|
||||
search?: string;
|
||||
}
|
||||
|
||||
export const getFunnelsList = async ({
|
||||
search = '',
|
||||
}: GetFunnelsListParams = {}): Promise<
|
||||
SuccessResponse<FunnelData[]> | ErrorResponse
|
||||
> => {
|
||||
const params = new URLSearchParams();
|
||||
if (search.length) {
|
||||
params.set('search', search);
|
||||
}
|
||||
|
||||
const response: AxiosResponse = await axios.get(
|
||||
`${FUNNELS_BASE_PATH}/list${
|
||||
params.toString() ? `?${params.toString()}` : ''
|
||||
}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: '',
|
||||
payload: response.data,
|
||||
};
|
||||
};
|
||||
|
||||
export const getFunnelById = async (
|
||||
funnelId: string,
|
||||
): Promise<SuccessResponse<FunnelData> | ErrorResponse> => {
|
||||
const response: AxiosResponse = await axios.get(
|
||||
`${FUNNELS_BASE_PATH}/get/${funnelId}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: '',
|
||||
payload: response.data,
|
||||
};
|
||||
};
|
||||
|
||||
interface RenameFunnelPayload {
|
||||
id: string;
|
||||
funnel_name: string;
|
||||
}
|
||||
|
||||
export const renameFunnel = async (
|
||||
payload: RenameFunnelPayload,
|
||||
): Promise<SuccessResponse<FunnelData> | ErrorResponse> => {
|
||||
const response: AxiosResponse = await axios.put(
|
||||
`${FUNNELS_BASE_PATH}/${payload.id}/update`,
|
||||
{ funnel_name: payload.funnel_name },
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Funnel renamed successfully',
|
||||
payload: response.data,
|
||||
};
|
||||
};
|
||||
|
||||
interface DeleteFunnelPayload {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export const deleteFunnel = async (
|
||||
payload: DeleteFunnelPayload,
|
||||
): Promise<SuccessResponse<FunnelData> | ErrorResponse> => {
|
||||
const response: AxiosResponse = await axios.delete(
|
||||
`${FUNNELS_BASE_PATH}/delete/${payload.id}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Funnel deleted successfully',
|
||||
payload: response.data,
|
||||
};
|
||||
};
|
||||
@@ -521,7 +521,7 @@ export default function CeleryOverviewTable({
|
||||
locale={{
|
||||
emptyText: isLoading ? null : <Typography.Text>No data</Typography.Text>,
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
scroll={{ x: 'max-content' }}
|
||||
showSorterTooltip
|
||||
onDragColumn={handleDragColumn}
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
|
||||
@@ -199,12 +199,12 @@ function ExplorerCard({
|
||||
value={viewName || undefined}
|
||||
>
|
||||
{viewsData?.data.data.map((view) => (
|
||||
<Select.Option key={view.uuid} value={view.name}>
|
||||
<Select.Option key={view.id} value={view.name}>
|
||||
<MenuItemGenerator
|
||||
viewName={view.name}
|
||||
viewKey={viewKey}
|
||||
createdBy={view.createdBy}
|
||||
uuid={view.uuid}
|
||||
uuid={view.id}
|
||||
refetchAllView={refetchAllView}
|
||||
viewData={viewsData.data.data}
|
||||
sourcePage={sourcepage}
|
||||
|
||||
@@ -53,17 +53,12 @@ function MenuItemGenerator({
|
||||
({ key }: { key: string }): void => {
|
||||
const currentViewDetails = getViewDetailsUsingViewKey(key, viewData);
|
||||
if (!currentViewDetails) return;
|
||||
const {
|
||||
query,
|
||||
name,
|
||||
uuid,
|
||||
panelType: currentPanelType,
|
||||
} = currentViewDetails;
|
||||
const { query, name, id, panelType: currentPanelType } = currentViewDetails;
|
||||
|
||||
handleExplorerTabChange(currentPanelType, {
|
||||
query,
|
||||
name,
|
||||
uuid,
|
||||
id,
|
||||
});
|
||||
},
|
||||
[viewData, handleExplorerTabChange],
|
||||
|
||||
@@ -4,7 +4,7 @@ import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
export const viewMockData: ViewProps[] = [
|
||||
{
|
||||
uuid: 'view1',
|
||||
id: 'view1',
|
||||
name: 'View 1',
|
||||
createdBy: 'User 1',
|
||||
category: 'category 1',
|
||||
@@ -17,7 +17,7 @@ export const viewMockData: ViewProps[] = [
|
||||
updatedBy: 'User 1',
|
||||
},
|
||||
{
|
||||
uuid: 'view2',
|
||||
id: 'view2',
|
||||
name: 'View 2',
|
||||
createdBy: 'User 2',
|
||||
category: 'category 2',
|
||||
|
||||
@@ -25,9 +25,9 @@ describe('MenuItemGenerator', () => {
|
||||
<MockQueryClientProvider>
|
||||
<MenuItemGenerator
|
||||
viewName={viewMockData[0].name}
|
||||
viewKey={viewMockData[0].uuid}
|
||||
viewKey={viewMockData[0].id}
|
||||
createdBy={viewMockData[0].createdBy}
|
||||
uuid={viewMockData[0].uuid}
|
||||
uuid={viewMockData[0].id}
|
||||
refetchAllView={jest.fn()}
|
||||
viewData={viewMockData}
|
||||
sourcePage={DataSource.TRACES}
|
||||
@@ -43,9 +43,9 @@ describe('MenuItemGenerator', () => {
|
||||
<MockQueryClientProvider>
|
||||
<MenuItemGenerator
|
||||
viewName={viewMockData[0].name}
|
||||
viewKey={viewMockData[0].uuid}
|
||||
viewKey={viewMockData[0].id}
|
||||
createdBy={viewMockData[0].createdBy}
|
||||
uuid={viewMockData[0].uuid}
|
||||
uuid={viewMockData[0].id}
|
||||
refetchAllView={jest.fn()}
|
||||
viewData={viewMockData}
|
||||
sourcePage={DataSource.TRACES}
|
||||
|
||||
@@ -26,7 +26,7 @@ export type GetViewDetailsUsingViewKey = (
|
||||
| {
|
||||
query: Query;
|
||||
name: string;
|
||||
uuid: string;
|
||||
id: string;
|
||||
panelType: PANEL_TYPES;
|
||||
extraData?: string;
|
||||
}
|
||||
|
||||
@@ -27,11 +27,11 @@ export const getViewDetailsUsingViewKey: GetViewDetailsUsingViewKey = (
|
||||
viewKey,
|
||||
data,
|
||||
) => {
|
||||
const selectedView = data?.find((view) => view.uuid === viewKey);
|
||||
const selectedView = data?.find((view) => view.id === viewKey);
|
||||
if (selectedView) {
|
||||
const { compositeQuery, name, uuid, extraData } = selectedView;
|
||||
const { compositeQuery, name, id, extraData } = selectedView;
|
||||
const query = mapQueryDataFromApi(compositeQuery);
|
||||
return { query, name, uuid, panelType: compositeQuery.panelType, extraData };
|
||||
return { query, name, id, panelType: compositeQuery.panelType, extraData };
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import './HostMetricTraces.styles.scss';
|
||||
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
import { InfraMonitoringEvents } from 'constants/events';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch';
|
||||
import NoLogs from 'container/NoLogs/NoLogs';
|
||||
@@ -19,7 +21,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { Pagination } from 'hooks/queryPagination';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@@ -137,6 +139,13 @@ function HostMetricTraces({
|
||||
const totalCount =
|
||||
data?.payload?.data?.newResult?.data?.result?.[0]?.list?.length || 0;
|
||||
|
||||
const handleRowClick = useCallback(() => {
|
||||
logEvent(InfraMonitoringEvents.ItemClicked, {
|
||||
entity: InfraMonitoringEvents.HostEntity,
|
||||
view: InfraMonitoringEvents.TracesView,
|
||||
});
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="host-metric-traces">
|
||||
<div className="host-metric-traces-header">
|
||||
@@ -189,6 +198,9 @@ function HostMetricTraces({
|
||||
loading={isFetching}
|
||||
dataSource={traces}
|
||||
columns={traceListColumns}
|
||||
onRow={(): Record<string, unknown> => ({
|
||||
onClick: (): void => handleRowClick(),
|
||||
})}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -149,6 +149,10 @@ function HostMetricsDetails({
|
||||
|
||||
const handleTabChange = (e: RadioChangeEvent): void => {
|
||||
setSelectedView(e.target.value);
|
||||
logEvent(InfraMonitoringEvents.TabChanged, {
|
||||
entity: InfraMonitoringEvents.HostEntity,
|
||||
view: e.target.value,
|
||||
});
|
||||
};
|
||||
|
||||
const handleTimeChange = useCallback(
|
||||
|
||||
14
frontend/src/components/LearnMore/LearnMore.styles.scss
Normal file
14
frontend/src/components/LearnMore/LearnMore.styles.scss
Normal file
@@ -0,0 +1,14 @@
|
||||
.learn-more {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
padding: 0;
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
&,&:hover {
|
||||
color: var(--bg-robin-400) !important;
|
||||
}
|
||||
}
|
||||
34
frontend/src/components/LearnMore/LearnMore.tsx
Normal file
34
frontend/src/components/LearnMore/LearnMore.tsx
Normal file
@@ -0,0 +1,34 @@
|
||||
import './LearnMore.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button } from 'antd';
|
||||
import { ArrowUpRight } from 'lucide-react';
|
||||
|
||||
type LearnMoreProps = {
|
||||
text?: string;
|
||||
url?: string;
|
||||
onClick?: () => void;
|
||||
};
|
||||
|
||||
function LearnMore({ text, url, onClick }: LearnMoreProps): JSX.Element {
|
||||
const handleClick = (): void => {
|
||||
onClick?.();
|
||||
if (url) {
|
||||
window.open(url, '_blank');
|
||||
}
|
||||
};
|
||||
return (
|
||||
<Button type="link" className="learn-more" onClick={handleClick}>
|
||||
<div className="learn-more__text">{text}</div>
|
||||
<ArrowUpRight size={16} color={Color.BG_ROBIN_400} />
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
|
||||
LearnMore.defaultProps = {
|
||||
text: 'Learn more',
|
||||
url: '',
|
||||
onClick: (): void => {},
|
||||
};
|
||||
|
||||
export default LearnMore;
|
||||
@@ -18,6 +18,7 @@ function CopyClipboardHOC({
|
||||
|
||||
notifications.success({
|
||||
message: notificationMessage,
|
||||
key: notificationMessage,
|
||||
});
|
||||
}
|
||||
}, [value, notifications, entityKey]);
|
||||
|
||||
@@ -63,30 +63,31 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<div className="quick-filters">
|
||||
{source !== QuickFiltersSource.INFRA_MONITORING && (
|
||||
<section className="header">
|
||||
<section className="left-actions">
|
||||
<FilterOutlined />
|
||||
<Typography.Text className="text">Filters for</Typography.Text>
|
||||
<Tooltip title={`Filter currently in sync with query ${lastQueryName}`}>
|
||||
<Typography.Text className="sync-tag">{lastQueryName}</Typography.Text>
|
||||
</Tooltip>
|
||||
</section>
|
||||
{source !== QuickFiltersSource.INFRA_MONITORING &&
|
||||
source !== QuickFiltersSource.API_MONITORING && (
|
||||
<section className="header">
|
||||
<section className="left-actions">
|
||||
<FilterOutlined />
|
||||
<Typography.Text className="text">Filters for</Typography.Text>
|
||||
<Tooltip title={`Filter currently in sync with query ${lastQueryName}`}>
|
||||
<Typography.Text className="sync-tag">{lastQueryName}</Typography.Text>
|
||||
</Tooltip>
|
||||
</section>
|
||||
|
||||
<section className="right-actions">
|
||||
<Tooltip title="Reset All">
|
||||
<SyncOutlined className="sync-icon" onClick={handleReset} />
|
||||
</Tooltip>
|
||||
<div className="divider-filter" />
|
||||
<Tooltip title="Collapse Filters">
|
||||
<VerticalAlignTopOutlined
|
||||
rotate={270}
|
||||
onClick={handleFilterVisibilityChange}
|
||||
/>
|
||||
</Tooltip>
|
||||
<section className="right-actions">
|
||||
<Tooltip title="Reset All">
|
||||
<SyncOutlined className="sync-icon" onClick={handleReset} />
|
||||
</Tooltip>
|
||||
<div className="divider-filter" />
|
||||
<Tooltip title="Collapse Filters">
|
||||
<VerticalAlignTopOutlined
|
||||
rotate={270}
|
||||
onClick={handleFilterVisibilityChange}
|
||||
/>
|
||||
</Tooltip>
|
||||
</section>
|
||||
</section>
|
||||
</section>
|
||||
)}
|
||||
)}
|
||||
|
||||
<section className="filters">
|
||||
{config.map((filter) => {
|
||||
|
||||
@@ -39,4 +39,5 @@ export enum QuickFiltersSource {
|
||||
LOGS_EXPLORER = 'logs-explorer',
|
||||
INFRA_MONITORING = 'infra-monitoring',
|
||||
TRACES_EXPLORER = 'traces-explorer',
|
||||
API_MONITORING = 'api-monitoring',
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import './ResizeTable.styles.scss';
|
||||
|
||||
import { SyntheticEvent, useMemo } from 'react';
|
||||
import { Resizable, ResizeCallbackData } from 'react-resizable';
|
||||
|
||||
@@ -10,8 +12,8 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
const handle = useMemo(
|
||||
() => (
|
||||
<SpanStyle
|
||||
className="react-resizable-handle"
|
||||
onClick={(e): void => e.stopPropagation()}
|
||||
className="resize-handle"
|
||||
/>
|
||||
),
|
||||
[],
|
||||
@@ -19,7 +21,7 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
|
||||
if (!width) {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
return <th {...restProps} />;
|
||||
return <th {...restProps} className="resizable-header" />;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -29,9 +31,10 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
handle={handle}
|
||||
onResize={onResize}
|
||||
draggableOpts={enableUserSelectHack}
|
||||
minConstraints={[150, 0]}
|
||||
>
|
||||
{/* eslint-disable-next-line react/jsx-props-no-spreading */}
|
||||
<th {...restProps} />
|
||||
<th {...restProps} className="resizable-header" />
|
||||
</Resizable>
|
||||
);
|
||||
}
|
||||
|
||||
53
frontend/src/components/ResizeTable/ResizeTable.styles.scss
Normal file
53
frontend/src/components/ResizeTable/ResizeTable.styles.scss
Normal file
@@ -0,0 +1,53 @@
|
||||
.resizable-header {
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
position: relative;
|
||||
|
||||
.ant-table-column-title {
|
||||
white-space: normal;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
}
|
||||
|
||||
.resize-main-table {
|
||||
.ant-table-body {
|
||||
.ant-table-tbody {
|
||||
.ant-table-row {
|
||||
.ant-table-cell {
|
||||
.ant-typography {
|
||||
white-space: unset;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.logs-table,
|
||||
.traces-table {
|
||||
.resize-table {
|
||||
.resize-handle {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
inset-inline-end: -5px;
|
||||
width: 10px;
|
||||
cursor: col-resize;
|
||||
|
||||
&::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
width: 1px;
|
||||
height: 1.6em;
|
||||
background-color: var(--bg-slate-200);
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,35 +2,63 @@
|
||||
|
||||
import { Table } from 'antd';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import cx from 'classnames';
|
||||
import { dragColumnParams } from 'hooks/useDragColumns/configs';
|
||||
import { set } from 'lodash-es';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { debounce, set } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import {
|
||||
SyntheticEvent,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import ReactDragListView from 'react-drag-listview';
|
||||
import { ResizeCallbackData } from 'react-resizable';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
import ResizableHeader from './ResizableHeader';
|
||||
import { DragSpanStyle } from './styles';
|
||||
import { ResizeTableProps } from './types';
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function ResizeTable({
|
||||
columns,
|
||||
onDragColumn,
|
||||
pagination,
|
||||
widgetId,
|
||||
shouldPersistColumnWidths = false,
|
||||
...restProps
|
||||
}: ResizeTableProps): JSX.Element {
|
||||
const [columnsData, setColumns] = useState<ColumnsType>([]);
|
||||
const { setColumnWidths, selectedDashboard } = useDashboard();
|
||||
|
||||
const columnWidths = shouldPersistColumnWidths
|
||||
? (selectedDashboard?.data?.widgets?.find(
|
||||
(widget) => widget.id === widgetId,
|
||||
) as Widgets)?.columnWidths
|
||||
: undefined;
|
||||
|
||||
const updateAllColumnWidths = useRef(
|
||||
debounce((widthsConfig: Record<string, number>) => {
|
||||
if (!widgetId || !shouldPersistColumnWidths) return;
|
||||
setColumnWidths?.((prev) => ({
|
||||
...prev,
|
||||
[widgetId]: widthsConfig,
|
||||
}));
|
||||
}, 1000),
|
||||
).current;
|
||||
|
||||
const handleResize = useCallback(
|
||||
(index: number) => (
|
||||
_e: SyntheticEvent<Element>,
|
||||
e: SyntheticEvent<Element>,
|
||||
{ size }: ResizeCallbackData,
|
||||
): void => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
const newColumns = [...columnsData];
|
||||
newColumns[index] = {
|
||||
...newColumns[index],
|
||||
@@ -65,6 +93,7 @@ function ResizeTable({
|
||||
...restProps,
|
||||
components: { header: { cell: ResizableHeader } },
|
||||
columns: mergedColumns,
|
||||
className: cx('resize-main-table', restProps.className),
|
||||
};
|
||||
|
||||
set(
|
||||
@@ -78,9 +107,39 @@ function ResizeTable({
|
||||
|
||||
useEffect(() => {
|
||||
if (columns) {
|
||||
setColumns(columns);
|
||||
// Apply stored column widths from widget configuration
|
||||
const columnsWithStoredWidths = columns.map((col) => {
|
||||
const dataIndex = (col as RowData).dataIndex as string;
|
||||
if (dataIndex && columnWidths && columnWidths[dataIndex]) {
|
||||
return {
|
||||
...col,
|
||||
width: columnWidths[dataIndex], // Apply stored width
|
||||
};
|
||||
}
|
||||
return col;
|
||||
});
|
||||
|
||||
setColumns(columnsWithStoredWidths);
|
||||
}
|
||||
}, [columns]);
|
||||
}, [columns, columnWidths]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!shouldPersistColumnWidths) return;
|
||||
// Collect all column widths in a single object
|
||||
const newColumnWidths: Record<string, number> = {};
|
||||
|
||||
mergedColumns.forEach((col) => {
|
||||
if (col.width && (col as RowData).dataIndex) {
|
||||
const dataIndex = (col as RowData).dataIndex as string;
|
||||
newColumnWidths[dataIndex] = col.width as number;
|
||||
}
|
||||
});
|
||||
|
||||
// Only update if there are actual widths to set
|
||||
if (Object.keys(newColumnWidths).length > 0) {
|
||||
updateAllColumnWidths(newColumnWidths);
|
||||
}
|
||||
}, [mergedColumns, updateAllColumnWidths, shouldPersistColumnWidths]);
|
||||
|
||||
return onDragColumn ? (
|
||||
<ReactDragListView.DragColumn {...dragColumnParams} onDragEnd={onDragColumn}>
|
||||
|
||||
@@ -8,6 +8,8 @@ export const SpanStyle = styled.span`
|
||||
width: 0.625rem;
|
||||
height: 100%;
|
||||
cursor: col-resize;
|
||||
margin-left: 4px;
|
||||
margin-right: 4px;
|
||||
`;
|
||||
|
||||
export const DragSpanStyle = styled.span`
|
||||
|
||||
@@ -9,6 +9,8 @@ import { TableDataSource } from './contants';
|
||||
|
||||
export interface ResizeTableProps extends TableProps<any> {
|
||||
onDragColumn?: (fromIndex: number, toIndex: number) => void;
|
||||
widgetId?: string;
|
||||
shouldPersistColumnWidths?: boolean;
|
||||
}
|
||||
export interface DynamicColumnTableProps extends TableProps<any> {
|
||||
tablesource: typeof TableDataSource[keyof typeof TableDataSource];
|
||||
|
||||
@@ -67,6 +67,17 @@
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-modal-footer {
|
||||
.ant-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
&-icon {
|
||||
margin: 0 !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
|
||||
@@ -4,14 +4,14 @@ import { Modal, ModalProps } from 'antd';
|
||||
|
||||
function SignozModal({
|
||||
children,
|
||||
|
||||
width = 672,
|
||||
rootClassName = '',
|
||||
...rest
|
||||
}: ModalProps): JSX.Element {
|
||||
return (
|
||||
<Modal
|
||||
centered
|
||||
width={672}
|
||||
width={width}
|
||||
cancelText="Close"
|
||||
rootClassName={`signoz-modal ${rootClassName}`}
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
|
||||
@@ -56,6 +56,7 @@ export const DATE_TIME_FORMATS = {
|
||||
|
||||
// Formats with dash separator
|
||||
DASH_DATETIME: 'MMM D, YYYY ⎯ HH:mm:ss',
|
||||
FUNNELS_LIST_DATE: 'MMM D ⎯ HH:mm:ss',
|
||||
DASH_DATETIME_UTC: 'MMM D, YYYY ⎯ HH:mm:ss (UTC Z)',
|
||||
DASH_TIME_DATE: 'HH:mm:ss ⎯ MMM D, YYYY (UTC Z)',
|
||||
} as const;
|
||||
|
||||
@@ -25,4 +25,6 @@ export enum FeatureKeys {
|
||||
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
||||
AWS_INTEGRATION = 'AWS_INTEGRATION',
|
||||
ONBOARDING_V3 = 'ONBOARDING_V3',
|
||||
THIRD_PARTY_API = 'THIRD_PARTY_API',
|
||||
TRACE_FUNNELS = 'TRACE_FUNNELS',
|
||||
}
|
||||
|
||||
@@ -51,4 +51,21 @@ export const REACT_QUERY_KEY = {
|
||||
GET_METRICS_LIST_FILTER_VALUES: 'GET_METRICS_LIST_FILTER_VALUES',
|
||||
GET_METRIC_DETAILS: 'GET_METRIC_DETAILS',
|
||||
GET_RELATED_METRICS: 'GET_RELATED_METRICS',
|
||||
};
|
||||
|
||||
// API Monitoring Query Keys
|
||||
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',
|
||||
GET_ENDPOINTS_LIST_BY_DOMAIN: 'GET_ENDPOINTS_LIST_BY_DOMAIN',
|
||||
GET_NESTED_ENDPOINTS_LIST: 'GET_NESTED_ENDPOINTS_LIST',
|
||||
GET_ENDPOINT_METRICS_DATA: 'GET_ENDPOINT_METRICS_DATA',
|
||||
GET_ENDPOINT_STATUS_CODE_DATA: 'GET_ENDPOINT_STATUS_CODE_DATA',
|
||||
GET_ENDPOINT_RATE_OVER_TIME_DATA: 'GET_ENDPOINT_RATE_OVER_TIME_DATA',
|
||||
GET_ENDPOINT_LATENCY_OVER_TIME_DATA: 'GET_ENDPOINT_LATENCY_OVER_TIME_DATA',
|
||||
GET_ENDPOINT_DROPDOWN_DATA: 'GET_ENDPOINT_DROPDOWN_DATA',
|
||||
GET_ENDPOINT_DEPENDENT_SERVICES_DATA: 'GET_ENDPOINT_DEPENDENT_SERVICES_DATA',
|
||||
GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA:
|
||||
'GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA',
|
||||
GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA:
|
||||
'GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA',
|
||||
GET_FUNNELS_LIST: 'GET_FUNNELS_LIST',
|
||||
GET_FUNNEL_DETAILS: 'GET_FUNNEL_DETAILS',
|
||||
} as const;
|
||||
|
||||
@@ -56,6 +56,8 @@ const ROUTES = {
|
||||
SUPPORT: '/support',
|
||||
LOGS_SAVE_VIEWS: '/logs/saved-views',
|
||||
TRACES_SAVE_VIEWS: '/traces/saved-views',
|
||||
TRACES_FUNNELS: '/traces/funnels',
|
||||
TRACES_FUNNELS_DETAIL: '/traces/funnels/:funnelId',
|
||||
WORKSPACE_LOCKED: '/workspace-locked',
|
||||
WORKSPACE_SUSPENDED: '/workspace-suspended',
|
||||
SHORTCUTS: '/shortcuts',
|
||||
@@ -69,6 +71,7 @@ const ROUTES = {
|
||||
METRICS_EXPLORER: '/metrics-explorer/summary',
|
||||
METRICS_EXPLORER_EXPLORER: '/metrics-explorer/explorer',
|
||||
METRICS_EXPLORER_VIEWS: '/metrics-explorer/views',
|
||||
API_MONITORING: '/api-monitoring/explorer',
|
||||
METRICS_EXPLORER_BASE: '/metrics-explorer',
|
||||
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',
|
||||
HOME_PAGE: '/',
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import AlertChannels from 'container/AllAlertChannels';
|
||||
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
|
||||
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
@@ -20,6 +21,13 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('Alert Channels Settings List page', () => {
|
||||
beforeEach(() => {
|
||||
render(<AlertChannels />);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import AlertChannels from 'container/AllAlertChannels';
|
||||
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
|
||||
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
@@ -25,6 +26,13 @@ jest.mock('hooks/useComponentPermission', () => ({
|
||||
default: jest.fn().mockImplementation(() => [false]),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('Alert Channels Settings List page (Normal User)', () => {
|
||||
beforeEach(() => {
|
||||
render(<AlertChannels />);
|
||||
|
||||
@@ -0,0 +1,241 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Select, Spin, Table, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
EndPointsTableRowData,
|
||||
formatEndPointsDataForTable,
|
||||
getEndPointsColumnsConfig,
|
||||
getEndPointsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import ErrorState from './components/ErrorState';
|
||||
import ExpandedRow from './components/ExpandedRow';
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
|
||||
function AllEndPoints({
|
||||
domainName,
|
||||
setSelectedEndPointName,
|
||||
setSelectedView,
|
||||
groupBy,
|
||||
setGroupBy,
|
||||
}: {
|
||||
domainName: string;
|
||||
setSelectedEndPointName: (name: string) => void;
|
||||
setSelectedView: (tab: VIEWS) => void;
|
||||
groupBy: IBuilderQuery['groupBy'];
|
||||
setGroupBy: (groupBy: IBuilderQuery['groupBy']) => void;
|
||||
}): JSX.Element {
|
||||
const {
|
||||
data: groupByFiltersData,
|
||||
isLoading: isLoadingGroupByFilters,
|
||||
} = useGetAggregateKeys({
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateAttribute: '',
|
||||
aggregateOperator: 'noop',
|
||||
searchText: '',
|
||||
tagType: '',
|
||||
});
|
||||
|
||||
const [groupByOptions, setGroupByOptions] = useState<
|
||||
{ value: string; label: string }[]
|
||||
>([]);
|
||||
|
||||
const [expandedRowKeys, setExpandedRowKeys] = useState<React.Key[]>([]);
|
||||
|
||||
const handleGroupByChange = useCallback(
|
||||
(value: IBuilderQuery['groupBy']) => {
|
||||
const groupBy = [];
|
||||
|
||||
for (let index = 0; index < value.length; index++) {
|
||||
const element = (value[index] as unknown) as string;
|
||||
|
||||
const key = groupByFiltersData?.payload?.attributeKeys?.find(
|
||||
(key) => key.key === element,
|
||||
);
|
||||
|
||||
if (key) {
|
||||
groupBy.push(key);
|
||||
}
|
||||
}
|
||||
setGroupBy(groupBy);
|
||||
},
|
||||
[groupByFiltersData, setGroupBy],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (groupByFiltersData?.payload) {
|
||||
setGroupByOptions(
|
||||
groupByFiltersData?.payload?.attributeKeys?.map((filter) => ({
|
||||
value: filter.key,
|
||||
label: filter.key,
|
||||
})) || [],
|
||||
);
|
||||
}
|
||||
}, [groupByFiltersData]);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getEndPointsQueryPayload(
|
||||
groupBy,
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
),
|
||||
[groupBy, domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
// Since only one query here
|
||||
const endPointsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_ENDPOINTS_LIST_BY_DOMAIN,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
groupBy,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||
})),
|
||||
);
|
||||
|
||||
const endPointsDataQuery = endPointsDataQueries[0];
|
||||
const {
|
||||
data: allEndPointsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = endPointsDataQuery;
|
||||
|
||||
const endPointsColumnsConfig = useMemo(
|
||||
() => getEndPointsColumnsConfig(groupBy.length > 0, expandedRowKeys),
|
||||
[groupBy.length, expandedRowKeys],
|
||||
);
|
||||
|
||||
const expandedRowRender = (record: EndPointsTableRowData): JSX.Element => (
|
||||
<ExpandedRow
|
||||
domainName={domainName}
|
||||
selectedRowData={record}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
setSelectedView={setSelectedView}
|
||||
/>
|
||||
);
|
||||
|
||||
const handleGroupByRowClick = (record: EndPointsTableRowData): void => {
|
||||
if (expandedRowKeys.includes(record.key)) {
|
||||
setExpandedRowKeys(expandedRowKeys.filter((key) => key !== record.key));
|
||||
} else {
|
||||
setExpandedRowKeys((expandedRowKeys) => [...expandedRowKeys, record.key]);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRowClick = (record: EndPointsTableRowData): void => {
|
||||
if (groupBy.length === 0) {
|
||||
setSelectedEndPointName(record.endpointName); // this will open up the endpoint details tab
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
} else {
|
||||
handleGroupByRowClick(record); // this will prepare the nested query payload
|
||||
}
|
||||
};
|
||||
|
||||
const formattedEndPointsData = useMemo(
|
||||
() =>
|
||||
formatEndPointsDataForTable(
|
||||
allEndPointsData?.payload?.data?.result[0]?.table?.rows,
|
||||
groupBy,
|
||||
),
|
||||
[groupBy, allEndPointsData],
|
||||
);
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="all-endpoints-error-state-wrapper">
|
||||
<ErrorState refetch={refetch} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="all-endpoints-container">
|
||||
<div className="group-by-container">
|
||||
<div className="group-by-label"> Group by </div>
|
||||
<Select
|
||||
className="group-by-select"
|
||||
loading={isLoadingGroupByFilters}
|
||||
mode="multiple"
|
||||
value={groupBy}
|
||||
allowClear
|
||||
maxTagCount="responsive"
|
||||
placeholder="Search for attribute"
|
||||
options={groupByOptions}
|
||||
onChange={handleGroupByChange}
|
||||
/>{' '}
|
||||
</div>
|
||||
<div className="endpoints-table-container">
|
||||
<div className="endpoints-table-header">Endpoint overview</div>
|
||||
<Table
|
||||
columns={endPointsColumnsConfig}
|
||||
loading={{
|
||||
spinning: isLoading || isRefetching,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
dataSource={isLoading || isRefetching ? [] : formattedEndPointsData}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-filtered-endpoints-message-container">
|
||||
<div className="no-filtered-endpoints-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-filtered-endpoints-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => handleRowClick(record),
|
||||
className: 'clickable-row',
|
||||
})}
|
||||
expandable={{
|
||||
expandedRowRender: groupBy.length > 0 ? expandedRowRender : undefined,
|
||||
expandedRowKeys,
|
||||
expandIconColumnIndex: -1,
|
||||
}}
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default AllEndPoints;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,146 @@
|
||||
import './DomainDetails.styles.scss';
|
||||
|
||||
import { Color, Spacing } from '@signozhq/design-tokens';
|
||||
import { Button, Divider, Drawer, Radio, Typography } from 'antd';
|
||||
import { RadioChangeEvent } from 'antd/lib';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { ArrowDown, ArrowUp, X } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import AllEndPoints from './AllEndPoints';
|
||||
import DomainMetrics from './components/DomainMetrics';
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
import EndPointDetailsWrapper from './EndPointDetailsWrapper';
|
||||
|
||||
function DomainDetails({
|
||||
domainData,
|
||||
handleClose,
|
||||
selectedDomainIndex,
|
||||
setSelectedDomainIndex,
|
||||
domainListLength,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainData: any;
|
||||
handleClose: () => void;
|
||||
selectedDomainIndex: number;
|
||||
setSelectedDomainIndex: (index: number) => void;
|
||||
domainListLength: number;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const [selectedView, setSelectedView] = useState<VIEWS>(VIEWS.ALL_ENDPOINTS);
|
||||
const [selectedEndPointName, setSelectedEndPointName] = useState<string>('');
|
||||
const [endPointsGroupBy, setEndPointsGroupBy] = useState<
|
||||
IBuilderQuery['groupBy']
|
||||
>([]);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const handleTabChange = (e: RadioChangeEvent): void => {
|
||||
setSelectedView(e.target.value);
|
||||
};
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
title={
|
||||
<div className="domain-details-drawer-header">
|
||||
<div className="domain-details-drawer-header-title">
|
||||
<Divider type="vertical" />
|
||||
<Typography.Text className="title">
|
||||
{domainData.domainName}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<Button.Group className="domain-details-drawer-header-ctas">
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex - 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowUp size={16} />}
|
||||
disabled={selectedDomainIndex === 0}
|
||||
title="Previous domain"
|
||||
/>
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex + 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowDown size={16} />}
|
||||
disabled={selectedDomainIndex === domainListLength - 1}
|
||||
title="Next domain"
|
||||
/>
|
||||
</Button.Group>
|
||||
</div>
|
||||
}
|
||||
placement="right"
|
||||
onClose={handleClose}
|
||||
open={!!domainData}
|
||||
style={{
|
||||
overscrollBehavior: 'contain',
|
||||
background: isDarkMode ? Color.BG_INK_400 : Color.BG_VANILLA_100,
|
||||
}}
|
||||
className="domain-detail-drawer"
|
||||
destroyOnClose
|
||||
closeIcon={<X size={16} style={{ marginTop: Spacing.MARGIN_1 }} />}
|
||||
>
|
||||
{domainData && (
|
||||
<>
|
||||
<DomainMetrics domainData={domainData} />
|
||||
<div className="views-tabs-container">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
onChange={handleTabChange}
|
||||
value={selectedView}
|
||||
>
|
||||
<Radio.Button
|
||||
className={
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
selectedView === VIEW_TYPES.ALL_ENDPOINTS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.ALL_ENDPOINTS}
|
||||
>
|
||||
<div className="view-title">All Endpoints</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.ENDPOINT_DETAILS
|
||||
? 'tab selected_view'
|
||||
: 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.ENDPOINT_DETAILS}
|
||||
>
|
||||
<div className="view-title">Endpoint Details</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
{selectedView === VIEW_TYPES.ALL_ENDPOINTS && (
|
||||
<AllEndPoints
|
||||
domainName={domainData.domainName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
setSelectedView={setSelectedView}
|
||||
groupBy={endPointsGroupBy}
|
||||
setGroupBy={setEndPointsGroupBy}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.ENDPOINT_DETAILS && (
|
||||
<EndPointDetailsWrapper
|
||||
domainName={domainData.domainName}
|
||||
endPointName={selectedEndPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
export default DomainDetails;
|
||||
@@ -0,0 +1,199 @@
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
extractPortAndEndpoint,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import DependentServices from './components/DependentServices';
|
||||
import EndPointMetrics from './components/EndPointMetrics';
|
||||
import EndPointsDropDown from './components/EndPointsDropDown';
|
||||
import MetricOverTimeGraph from './components/MetricOverTimeGraph';
|
||||
import StatusCodeBarCharts from './components/StatusCodeBarCharts';
|
||||
import StatusCodeTable from './components/StatusCodeTable';
|
||||
|
||||
function EndPointDetails({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const currentQuery = initialQueriesMap[DataSource.TRACES];
|
||||
|
||||
const [filters, setFilters] = useState<IBuilderQuery['filters']>({
|
||||
op: 'AND',
|
||||
items: [],
|
||||
});
|
||||
|
||||
// Manually update the query to include the filters
|
||||
// Because using the hook is causing the global domain
|
||||
// query to be updated and causing main domain list to
|
||||
// refetch with the filters of endpoints
|
||||
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
filters,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[filters, currentQuery],
|
||||
);
|
||||
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
const isServicesFilterApplied = useMemo(
|
||||
() => filters.items.some((item) => item.key?.key === 'service.name'),
|
||||
[filters],
|
||||
);
|
||||
|
||||
const endPointDetailsQueryPayload = useMemo(
|
||||
() =>
|
||||
getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
endPointName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
filters,
|
||||
),
|
||||
[domainName, endPointName, filters, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointDetailsDataQueries = useQueries(
|
||||
endPointDetailsQueryPayload.map((payload, index) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
filters.items,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const [
|
||||
endPointMetricsDataQuery,
|
||||
endPointStatusCodeDataQuery,
|
||||
endPointDropDownDataQuery,
|
||||
endPointDependentServicesDataQuery,
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
] = useMemo(
|
||||
() => [
|
||||
endPointDetailsDataQueries[0],
|
||||
endPointDetailsDataQueries[1],
|
||||
endPointDetailsDataQueries[2],
|
||||
endPointDetailsDataQueries[3],
|
||||
endPointDetailsDataQueries[4],
|
||||
endPointDetailsDataQueries[5],
|
||||
],
|
||||
[endPointDetailsDataQueries],
|
||||
);
|
||||
|
||||
const { endpoint, port } = useMemo(
|
||||
() => extractPortAndEndpoint(endPointName),
|
||||
[endPointName],
|
||||
);
|
||||
|
||||
const [rateOverTimeWidget, latencyOverTimeWidget] = useMemo(
|
||||
() => [
|
||||
getRateOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
getLatencyOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
],
|
||||
[domainName, endPointName, filters, domainListFilters],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="endpoint-details-container">
|
||||
<div className="endpoint-details-filters-container">
|
||||
<div className="endpoint-details-filters-container-dropdown">
|
||||
<EndPointsDropDown
|
||||
selectedEndPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".endpoint-details-filters-container"
|
||||
dropdownStyle={{ width: 'calc(100% - 36px)' }}
|
||||
/>
|
||||
</div>
|
||||
<div className="endpoint-details-filters-container-search">
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={(searchFilters): void => {
|
||||
setFilters(searchFilters);
|
||||
}}
|
||||
placeholder="Search for filters..."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data">
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Endpoint</div>
|
||||
<div className="endpoint-meta-data-value">{endpoint || '-'}</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Port</div>
|
||||
<div className="endpoint-meta-data-value">{port || '-'}</div>
|
||||
</div>
|
||||
</div>
|
||||
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
|
||||
{!isServicesFilterApplied && (
|
||||
<DependentServices
|
||||
dependentServicesQuery={endPointDependentServicesDataQuery}
|
||||
/>
|
||||
)}
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={endPointStatusCodeBarChartsDataQuery}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={
|
||||
endPointStatusCodeLatencyBarChartsDataQuery
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
filters={filters}
|
||||
/>
|
||||
<StatusCodeTable endPointStatusCodeDataQuery={endPointStatusCodeDataQuery} />
|
||||
<MetricOverTimeGraph widget={rateOverTimeWidget} />
|
||||
<MetricOverTimeGraph widget={latencyOverTimeWidget} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointDetails;
|
||||
@@ -0,0 +1,80 @@
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { getEndPointZeroStateQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import EndPointDetailsZeroState from './components/EndPointDetailsZeroState';
|
||||
import EndPointDetails from './EndPointDetails';
|
||||
|
||||
function EndPointDetailsWrapper({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const endPointZeroStateQueryPayload = useMemo(
|
||||
() =>
|
||||
getEndPointZeroStateQueryPayload(
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
),
|
||||
[domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointZeroStateDataQueries = useQueries(
|
||||
endPointZeroStateQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
// Since only one query here
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DROPDOWN_DATA,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const [endPointZeroStateDataQuery] = useMemo(
|
||||
() => [endPointZeroStateDataQueries[0]],
|
||||
[endPointZeroStateDataQueries],
|
||||
);
|
||||
|
||||
if (endPointName === '') {
|
||||
return (
|
||||
<EndPointDetailsZeroState
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointZeroStateDataQuery}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<EndPointDetails
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointDetailsWrapper;
|
||||
@@ -0,0 +1,108 @@
|
||||
import { Typography } from 'antd';
|
||||
import Skeleton from 'antd/lib/skeleton';
|
||||
import { getFormattedDependentServicesData } from 'container/ApiMonitoring/utils';
|
||||
import { UnfoldVertical } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
interface DependentServicesProps {
|
||||
dependentServicesQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
}
|
||||
|
||||
function DependentServices({
|
||||
dependentServicesQuery,
|
||||
}: DependentServicesProps): JSX.Element {
|
||||
const {
|
||||
data,
|
||||
refetch,
|
||||
isError,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
} = dependentServicesQuery;
|
||||
|
||||
const [currentRenderCount, setCurrentRenderCount] = useState(0);
|
||||
|
||||
const dependentServicesData = useMemo(() => {
|
||||
const formattedDependentServicesData = getFormattedDependentServicesData(
|
||||
data?.payload?.data?.result[0].table.rows,
|
||||
);
|
||||
setCurrentRenderCount(Math.min(formattedDependentServicesData.length, 5));
|
||||
return formattedDependentServicesData;
|
||||
}, [data]);
|
||||
|
||||
const renderItems = useMemo(
|
||||
() => dependentServicesData.slice(0, currentRenderCount),
|
||||
[currentRenderCount, dependentServicesData],
|
||||
);
|
||||
|
||||
if (isLoading || isRefetching) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
if (isError) {
|
||||
return <ErrorState refetch={refetch} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="top-services-content">
|
||||
<div className="top-services-title">
|
||||
<span className="title-wrapper">Dependent Services</span>
|
||||
</div>
|
||||
<div className="dependent-services-container">
|
||||
{renderItems.length === 0 ? (
|
||||
<div className="no-dependent-services-message-container">
|
||||
<div className="no-dependent-services-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-dependent-services-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
renderItems.map((item) => (
|
||||
<div className="top-services-item" key={item.key}>
|
||||
<div className="top-services-item-progress">
|
||||
<div className="top-services-item-key">{item.serviceName}</div>
|
||||
<div className="top-services-item-count">{item.count}</div>
|
||||
<div
|
||||
className="top-services-item-progress-bar"
|
||||
style={{ width: `${item.percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
<div className="top-services-item-percentage">
|
||||
{item.percentage.toFixed(2)}%
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
|
||||
{currentRenderCount < dependentServicesData.length && (
|
||||
<div
|
||||
className="top-services-load-more"
|
||||
onClick={(): void => setCurrentRenderCount(dependentServicesData.length)}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
setCurrentRenderCount(dependentServicesData.length);
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
<UnfoldVertical size={14} />
|
||||
Show more...
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DependentServices;
|
||||
@@ -0,0 +1,82 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Tooltip, Typography } from 'antd';
|
||||
import { getLastUsedRelativeTime } from 'container/ApiMonitoring/utils';
|
||||
|
||||
function DomainMetrics({ domainData }: { domainData: any }): JSX.Element {
|
||||
return (
|
||||
<div className="domain-detail-drawer__endpoint">
|
||||
<div className="domain-details-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
EXTERNAL API
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
AVERAGE LATENCY
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
ERROR RATE
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
LAST USED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
<div className="values-row">
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.endpointCount}>
|
||||
<span className="round-metric-tag">{domainData.endpointCount}</span>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.latency}>
|
||||
<span className="round-metric-tag">
|
||||
{(domainData.latency / 1000).toFixed(3)}s
|
||||
</span>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value error-rate">
|
||||
<Tooltip title={domainData.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number((domainData.errorRate * 100).toFixed(1))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
(domainData.errorRate * 100).toFixed(1),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.lastUsed}>
|
||||
{getLastUsedRelativeTime(domainData.lastUsed)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DomainMetrics;
|
||||
@@ -0,0 +1,40 @@
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import EndPointsDropDown from './EndPointsDropDown';
|
||||
|
||||
function EndPointDetailsZeroState({
|
||||
setSelectedEndPointName,
|
||||
endPointDropDownDataQuery,
|
||||
}: {
|
||||
setSelectedEndPointName: (endPointName: string) => void;
|
||||
endPointDropDownDataQuery: UseQueryResult<SuccessResponse<any>>;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<div className="end-point-details-zero-state-wrapper">
|
||||
<div className="end-point-details-zero-state-content">
|
||||
<img
|
||||
src="/Icons/no-data.svg"
|
||||
alt="no-data"
|
||||
width={32}
|
||||
height={32}
|
||||
className="end-point-details-zero-state-icon"
|
||||
/>
|
||||
<div className="end-point-details-zero-state-content-wrapper">
|
||||
<div className="end-point-details-zero-state-text-content">
|
||||
<div className="title">No endpoint selected yet</div>
|
||||
<div className="description">Select an endpoint to see the details</div>
|
||||
</div>
|
||||
<EndPointsDropDown
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".end-point-details-zero-state-wrapper"
|
||||
dropdownStyle={{ width: '60%' }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointDetailsZeroState;
|
||||
@@ -0,0 +1,121 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function EndPointMetrics({
|
||||
endPointMetricsDataQuery,
|
||||
}: {
|
||||
endPointMetricsDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
}): JSX.Element {
|
||||
const {
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
data,
|
||||
refetch,
|
||||
} = endPointMetricsDataQuery;
|
||||
|
||||
const metricsData = useMemo(() => {
|
||||
if (isLoading || isRefetching || isError) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return getFormattedEndPointMetricsData(
|
||||
data?.payload?.data?.result[0].table.rows,
|
||||
);
|
||||
}, [data?.payload?.data?.result, isLoading, isRefetching, isError]);
|
||||
|
||||
if (isError) {
|
||||
return <ErrorState refetch={refetch} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="domain-detail-drawer__endpoint">
|
||||
<div className="domain-details-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
Rate
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
AVERAGE LATENCY
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
ERROR RATE
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
LAST USED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
<div className="values-row">
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.rate}>
|
||||
<span className="round-metric-tag">{metricsData?.rate} ops/sec</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.latency}>
|
||||
<span className="round-metric-tag">{metricsData?.latency}ms</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="domain-details-metadata-value error-rate">
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.errorRate}>
|
||||
<Progress
|
||||
percent={Number((metricsData?.errorRate ?? 0 * 100).toFixed(1))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
(metricsData?.errorRate ?? 0 * 100).toFixed(1),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.lastUsed}>{metricsData?.lastUsed}</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointMetrics;
|
||||
@@ -0,0 +1,61 @@
|
||||
import { Select } from 'antd';
|
||||
import { getFormattedEndPointDropDownData } from 'container/ApiMonitoring/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
interface EndPointsDropDownProps {
|
||||
selectedEndPointName?: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
endPointDropDownDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
parentContainerDiv?: string;
|
||||
dropdownStyle?: React.CSSProperties;
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
selectedEndPointName: '',
|
||||
parentContainerDiv: '',
|
||||
dropdownStyle: {},
|
||||
};
|
||||
|
||||
function EndPointsDropDown({
|
||||
selectedEndPointName,
|
||||
setSelectedEndPointName,
|
||||
endPointDropDownDataQuery,
|
||||
parentContainerDiv,
|
||||
dropdownStyle,
|
||||
}: EndPointsDropDownProps): JSX.Element {
|
||||
const { data, isLoading, isFetching } = endPointDropDownDataQuery;
|
||||
|
||||
const handleChange = (value: string): void => {
|
||||
setSelectedEndPointName(value);
|
||||
};
|
||||
|
||||
const formattedData = useMemo(
|
||||
() =>
|
||||
getFormattedEndPointDropDownData(data?.payload.data.result[0].table.rows),
|
||||
[data?.payload.data.result],
|
||||
);
|
||||
|
||||
return (
|
||||
<Select
|
||||
value={selectedEndPointName || undefined}
|
||||
placeholder="Select endpoint"
|
||||
loading={isLoading || isFetching}
|
||||
style={{ width: '100%' }}
|
||||
onChange={handleChange}
|
||||
options={formattedData}
|
||||
getPopupContainer={
|
||||
parentContainerDiv
|
||||
? (): HTMLElement =>
|
||||
document.querySelector(parentContainerDiv) as HTMLElement
|
||||
: (triggerNode): HTMLElement => triggerNode.parentNode as HTMLElement
|
||||
}
|
||||
dropdownStyle={dropdownStyle}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
EndPointsDropDown.defaultProps = defaultProps;
|
||||
|
||||
export default EndPointsDropDown;
|
||||
@@ -0,0 +1,31 @@
|
||||
import { Button, Typography } from 'antd';
|
||||
import { RotateCw } from 'lucide-react';
|
||||
|
||||
function ErrorState({ refetch }: { refetch: () => void }): JSX.Element {
|
||||
return (
|
||||
<div className="error-state-container">
|
||||
<div className="error-state-content-wrapper">
|
||||
<div className="error-state-content">
|
||||
<div className="icon">
|
||||
<img src="/Icons/awwSnap.svg" alt="awwSnap" width={32} height={32} />
|
||||
</div>
|
||||
<div className="error-state-text">
|
||||
<Typography.Text>Uh-oh :/ We ran into an error.</Typography.Text>
|
||||
<Typography.Text type="secondary">
|
||||
Please refresh this panel.
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
<Button
|
||||
className="refresh-cta"
|
||||
onClick={(): void => refetch()}
|
||||
icon={<RotateCw size={16} />}
|
||||
>
|
||||
Refresh this panel
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ErrorState;
|
||||
@@ -0,0 +1,129 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table } from 'antd';
|
||||
import { ColumnType } from 'antd/lib/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
createFiltersForSelectedRowData,
|
||||
EndPointsTableRowData,
|
||||
formatEndPointsDataForTable,
|
||||
getEndPointsColumnsConfig,
|
||||
getEndPointsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import LoadingContainer from 'container/InfraMonitoringK8s/LoadingContainer';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { VIEW_TYPES, VIEWS } from '../constants';
|
||||
|
||||
function ExpandedRow({
|
||||
domainName,
|
||||
selectedRowData,
|
||||
setSelectedEndPointName,
|
||||
setSelectedView,
|
||||
}: {
|
||||
domainName: string;
|
||||
selectedRowData: EndPointsTableRowData;
|
||||
setSelectedEndPointName: (name: string) => void;
|
||||
setSelectedView: (view: VIEWS) => void;
|
||||
}): JSX.Element {
|
||||
const nestedColumns = useMemo(() => getEndPointsColumnsConfig(false, []), []);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const groupedByRowDataQueryPayload = useMemo(() => {
|
||||
if (!selectedRowData) return null;
|
||||
|
||||
const filters = createFiltersForSelectedRowData(selectedRowData);
|
||||
|
||||
const baseQueryPayload = getEndPointsQueryPayload(
|
||||
[],
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
);
|
||||
|
||||
return baseQueryPayload.map((currentQueryPayload) => ({
|
||||
...currentQueryPayload,
|
||||
query: {
|
||||
...currentQueryPayload.query,
|
||||
builder: {
|
||||
...currentQueryPayload.query.builder,
|
||||
queryData: currentQueryPayload.query.builder.queryData.map(
|
||||
(queryData) => ({
|
||||
...queryData,
|
||||
filters: {
|
||||
items: [...(queryData.filters?.items || []), ...filters.items],
|
||||
op: 'AND',
|
||||
},
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
}));
|
||||
}, [domainName, minTime, maxTime, selectedRowData]);
|
||||
|
||||
const groupedByRowQueries = useQueries(
|
||||
groupedByRowDataQueryPayload
|
||||
? groupedByRowDataQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
`${REACT_QUERY_KEY.GET_NESTED_ENDPOINTS_LIST}-${domainName}-${selectedRowData?.key}`,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
selectedRowData?.key,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload && !!selectedRowData,
|
||||
}))
|
||||
: [],
|
||||
);
|
||||
|
||||
const groupedByRowQuery = groupedByRowQueries[0];
|
||||
return (
|
||||
<div className="expanded-table-container">
|
||||
{groupedByRowQuery?.isFetching || groupedByRowQuery?.isLoading ? (
|
||||
<LoadingContainer />
|
||||
) : (
|
||||
<div className="expanded-table">
|
||||
<Table
|
||||
columns={nestedColumns as ColumnType<EndPointsTableRowData>[]}
|
||||
dataSource={
|
||||
groupedByRowQuery?.data
|
||||
? formatEndPointsDataForTable(
|
||||
groupedByRowQuery.data?.payload.data.result[0].table?.rows,
|
||||
[],
|
||||
)
|
||||
: []
|
||||
}
|
||||
pagination={false}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
showHeader={false}
|
||||
loading={{
|
||||
spinning: groupedByRowQuery?.isFetching || groupedByRowQuery?.isLoading,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => {
|
||||
setSelectedEndPointName(record.endpointName);
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
})}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ExpandedRow;
|
||||
@@ -0,0 +1,22 @@
|
||||
import { Card } from 'antd';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
function MetricOverTimeGraph({ widget }: { widget: Widgets }): JSX.Element {
|
||||
return (
|
||||
<div>
|
||||
<Card bordered className="endpoint-details-card">
|
||||
<div className="graph-container">
|
||||
<GridCard
|
||||
widget={widget}
|
||||
isQueryEnabled
|
||||
onDragSelect={(): void => {}}
|
||||
customOnDragSelect={(): void => {}}
|
||||
/>
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MetricOverTimeGraph;
|
||||
@@ -0,0 +1,257 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Card, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { useGetGraphCustomSeries } from 'components/CeleryTask/useGetGraphCustomSeries';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
getCustomFiltersForBarChart,
|
||||
getFormattedEndPointStatusCodeChartData,
|
||||
getStatusCodeBarChartWidgetData,
|
||||
statusCodeWidgetInfo,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { handleGraphClick } from 'container/GridCardLayout/GridCard/utils';
|
||||
import { useGraphClickToShowButton } from 'container/GridCardLayout/useGraphClickToShowButton';
|
||||
import useNavigateToExplorerPages from 'container/GridCardLayout/useNavigateToExplorerPages';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { Options } from 'uplot';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function StatusCodeBarCharts({
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
domainListFilters,
|
||||
filters,
|
||||
}: {
|
||||
endPointStatusCodeBarChartsDataQuery: UseQueryResult<
|
||||
SuccessResponse<any>,
|
||||
unknown
|
||||
>;
|
||||
endPointStatusCodeLatencyBarChartsDataQuery: UseQueryResult<
|
||||
SuccessResponse<any>,
|
||||
unknown
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
filters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
// 0 : Status Code Count
|
||||
// 1 : Status Code Latency
|
||||
const [currentWidgetInfoIndex, setCurrentWidgetInfoIndex] = useState(0);
|
||||
|
||||
const {
|
||||
data: endPointStatusCodeBarChartsData,
|
||||
} = endPointStatusCodeBarChartsDataQuery;
|
||||
|
||||
const {
|
||||
data: endPointStatusCodeLatencyBarChartsData,
|
||||
} = endPointStatusCodeLatencyBarChartsDataQuery;
|
||||
|
||||
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
const formattedEndPointStatusCodeBarChartsDataPayload = useMemo(
|
||||
() =>
|
||||
getFormattedEndPointStatusCodeChartData(
|
||||
endPointStatusCodeBarChartsData?.payload,
|
||||
'sum',
|
||||
),
|
||||
[endPointStatusCodeBarChartsData?.payload],
|
||||
);
|
||||
|
||||
const formattedEndPointStatusCodeLatencyBarChartsDataPayload = useMemo(
|
||||
() =>
|
||||
getFormattedEndPointStatusCodeChartData(
|
||||
endPointStatusCodeLatencyBarChartsData?.payload,
|
||||
'average',
|
||||
),
|
||||
[endPointStatusCodeLatencyBarChartsData?.payload],
|
||||
);
|
||||
|
||||
const chartData = useMemo(
|
||||
() =>
|
||||
getUPlotChartData(
|
||||
currentWidgetInfoIndex === 0
|
||||
? formattedEndPointStatusCodeBarChartsDataPayload
|
||||
: formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
),
|
||||
[
|
||||
currentWidgetInfoIndex,
|
||||
formattedEndPointStatusCodeBarChartsDataPayload,
|
||||
formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
],
|
||||
);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const graphClick = useGraphClickToShowButton({
|
||||
graphRef,
|
||||
isButtonEnabled: true,
|
||||
buttonClassName: 'view-onclick-show-button',
|
||||
});
|
||||
|
||||
const navigateToExplorer = useNavigateToExplorer();
|
||||
|
||||
const navigateToExplorerPages = useNavigateToExplorerPages();
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { getCustomSeries } = useGetGraphCustomSeries({
|
||||
isDarkMode,
|
||||
drawStyle: 'bars',
|
||||
colorMapping: {
|
||||
'200-299': Color.BG_FOREST_500,
|
||||
'300-399': Color.BG_AMBER_400,
|
||||
'400-499': Color.BG_CHERRY_500,
|
||||
'500-599': Color.BG_ROBIN_500,
|
||||
Other: Color.BG_SIENNA_500,
|
||||
},
|
||||
});
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
[domainName, endPointName, domainListFilters, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
metric?: { [key: string]: string },
|
||||
queryData?: { queryName: string; inFocusOrNot: boolean },
|
||||
): void => {
|
||||
const customFilters = getCustomFiltersForBarChart(metric);
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
metric,
|
||||
queryData,
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
customFilters,
|
||||
});
|
||||
},
|
||||
[
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
],
|
||||
);
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
getUPlotChartOptions({
|
||||
apiResponse:
|
||||
currentWidgetInfoIndex === 0
|
||||
? formattedEndPointStatusCodeBarChartsDataPayload
|
||||
: formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
isDarkMode,
|
||||
dimensions,
|
||||
yAxisUnit: statusCodeWidgetInfo[currentWidgetInfoIndex].yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
minTimeScale: Math.floor(minTime / 1e9),
|
||||
maxTimeScale: Math.floor(maxTime / 1e9),
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
onClickHandler: graphClickHandler,
|
||||
customSeries: getCustomSeries,
|
||||
}),
|
||||
[
|
||||
minTime,
|
||||
maxTime,
|
||||
currentWidgetInfoIndex,
|
||||
dimensions,
|
||||
formattedEndPointStatusCodeBarChartsDataPayload,
|
||||
formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
isDarkMode,
|
||||
graphClickHandler,
|
||||
getCustomSeries,
|
||||
],
|
||||
);
|
||||
|
||||
const renderCardContent = useCallback(
|
||||
(query: UseQueryResult<SuccessResponse<any>, unknown>): JSX.Element => {
|
||||
if (query.isLoading) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
if (query.error) {
|
||||
return <ErrorState refetch={query.refetch} />;
|
||||
}
|
||||
return (
|
||||
<div
|
||||
className={cx('chart-container', {
|
||||
'no-data-container':
|
||||
!query.isLoading && !query?.data?.payload?.data?.result?.length,
|
||||
})}
|
||||
>
|
||||
<Uplot options={options as Options} data={chartData} />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[options, chartData],
|
||||
);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Card bordered className="endpoint-details-card">
|
||||
<div className="header">
|
||||
<Typography.Text>Call response status</Typography.Text>
|
||||
<Button.Group className="views-tabs">
|
||||
<Button
|
||||
value={0}
|
||||
className={currentWidgetInfoIndex === 0 ? 'selected_view tab' : 'tab'}
|
||||
disabled={false}
|
||||
onClick={(): void => setCurrentWidgetInfoIndex(0)}
|
||||
>
|
||||
Number of calls
|
||||
</Button>
|
||||
<Button
|
||||
value={1}
|
||||
className={currentWidgetInfoIndex === 1 ? 'selected_view tab' : 'tab'}
|
||||
onClick={(): void => setCurrentWidgetInfoIndex(1)}
|
||||
>
|
||||
Latency
|
||||
</Button>
|
||||
</Button.Group>
|
||||
</div>
|
||||
<div className="graph-container" ref={graphRef}>
|
||||
{renderCardContent(endPointStatusCodeBarChartsDataQuery)}
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
export default StatusCodeBarCharts;
|
||||
@@ -0,0 +1,72 @@
|
||||
import { Table, Typography } from 'antd';
|
||||
import {
|
||||
endPointStatusCodeColumns,
|
||||
getFormattedEndPointStatusCodeData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function StatusCodeTable({
|
||||
endPointStatusCodeDataQuery,
|
||||
}: {
|
||||
endPointStatusCodeDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
}): JSX.Element {
|
||||
const {
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
data,
|
||||
refetch,
|
||||
} = endPointStatusCodeDataQuery;
|
||||
|
||||
const statusCodeData = useMemo(() => {
|
||||
if (isLoading || isRefetching || isError) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return getFormattedEndPointStatusCodeData(
|
||||
data?.payload?.data?.result[0].table.rows,
|
||||
);
|
||||
}, [data?.payload?.data?.result, isLoading, isRefetching, isError]);
|
||||
|
||||
if (isError) {
|
||||
return <ErrorState refetch={refetch} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="status-code-table-container">
|
||||
<Table
|
||||
loading={isLoading || isRefetching}
|
||||
dataSource={statusCodeData || []}
|
||||
columns={endPointStatusCodeColumns}
|
||||
pagination={false}
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-status-code-data-message-container">
|
||||
<div className="no-status-code-data-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-status-code-data-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default StatusCodeTable;
|
||||
@@ -0,0 +1,9 @@
|
||||
export enum VIEWS {
|
||||
ALL_ENDPOINTS = 'all_endpoints',
|
||||
ENDPOINT_DETAILS = 'endpoint_details',
|
||||
}
|
||||
|
||||
export const VIEW_TYPES = {
|
||||
ALL_ENDPOINTS: VIEWS.ALL_ENDPOINTS,
|
||||
ENDPOINT_DETAILS: VIEWS.ENDPOINT_DETAILS,
|
||||
};
|
||||
@@ -0,0 +1,159 @@
|
||||
import '../Explorer.styles.scss';
|
||||
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table, Typography } from 'antd';
|
||||
import axios from 'api';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import cx from 'classnames';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { HandleChangeQueryData } from 'types/common/operations.types';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
columnsConfig,
|
||||
formatDataForTable,
|
||||
hardcodedAttributeKeys,
|
||||
} from '../../utils';
|
||||
import DomainDetails from './DomainDetails/DomainDetails';
|
||||
|
||||
function DomainList({
|
||||
query,
|
||||
showIP,
|
||||
handleChangeQueryData,
|
||||
}: {
|
||||
query: IBuilderQuery;
|
||||
showIP: boolean;
|
||||
handleChangeQueryData: HandleChangeQueryData;
|
||||
}): JSX.Element {
|
||||
const [selectedDomainIndex, setSelectedDomainIndex] = useState<number>(-1);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const fetchApiOverview = async (): Promise<
|
||||
SuccessResponse<any> | ErrorResponse
|
||||
> => {
|
||||
const requestBody = {
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
show_ip: showIP,
|
||||
filters: {
|
||||
op: 'AND',
|
||||
items: query?.filters.items,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await axios.post(
|
||||
'/third-party-apis/overview/list',
|
||||
requestBody,
|
||||
);
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
const { data, isLoading, isFetching } = useQuery(
|
||||
[REACT_QUERY_KEY.GET_DOMAINS_LIST, minTime, maxTime, query, showIP],
|
||||
fetchApiOverview,
|
||||
);
|
||||
|
||||
const formattedDataForTable = useMemo(
|
||||
() => formatDataForTable(data?.payload?.data?.result[0]?.table?.rows),
|
||||
[data],
|
||||
);
|
||||
|
||||
return (
|
||||
<section className={cx('api-module-right-section')}>
|
||||
<div className={cx('api-monitoring-list-header')}>
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={(searchFilters): void =>
|
||||
handleChangeQueryData('filters', searchFilters)
|
||||
}
|
||||
placeholder="Search filters..."
|
||||
hardcodedAttributeKeys={hardcodedAttributeKeys}
|
||||
/>
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
showRefreshText={false}
|
||||
hideShareModal
|
||||
/>
|
||||
</div>
|
||||
<Table
|
||||
className={cx('api-monitoring-domain-list-table')}
|
||||
dataSource={isFetching || isLoading ? [] : formattedDataForTable}
|
||||
columns={columnsConfig}
|
||||
loading={{
|
||||
spinning: isFetching || isLoading,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
locale={{
|
||||
emptyText:
|
||||
isFetching || isLoading ? null : (
|
||||
<div className="no-filtered-domains-message-container">
|
||||
<div className="no-filtered-domains-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-filtered-domains-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
onRow={(record, index): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => {
|
||||
if (index !== undefined) {
|
||||
const dataIndex = formattedDataForTable.findIndex(
|
||||
(item) => item.key === record.key,
|
||||
);
|
||||
setSelectedDomainIndex(dataIndex);
|
||||
logEvent('API Monitoring: Domain name row clicked', {});
|
||||
}
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
})}
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
/>
|
||||
{selectedDomainIndex !== -1 && (
|
||||
<DomainDetails
|
||||
domainData={formattedDataForTable[selectedDomainIndex]}
|
||||
selectedDomainIndex={selectedDomainIndex}
|
||||
setSelectedDomainIndex={setSelectedDomainIndex}
|
||||
domainListLength={formattedDataForTable.length}
|
||||
handleClose={(): void => {
|
||||
setSelectedDomainIndex(-1);
|
||||
}}
|
||||
domainListFilters={query?.filters}
|
||||
/>
|
||||
)}
|
||||
</section>
|
||||
);
|
||||
}
|
||||
|
||||
export default DomainList;
|
||||
@@ -0,0 +1,219 @@
|
||||
.api-monitoring-page {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
|
||||
.api-quick-filter-left-section {
|
||||
width: 0%;
|
||||
flex-shrink: 0;
|
||||
|
||||
.api-quick-filters-header {
|
||||
padding: 12px;
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
|
||||
font-size: 14px;
|
||||
line-height: 18px;
|
||||
}
|
||||
}
|
||||
|
||||
.api-module-right-section {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
width: 100%;
|
||||
|
||||
.api-monitoring-list-header {
|
||||
width: 100%;
|
||||
padding: 8px;
|
||||
display: flex;
|
||||
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.query-builder-search-v2 {
|
||||
min-width: 80%;
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.api-monitoring-domain-list-table {
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
padding: 12px;
|
||||
font-weight: 500;
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
border-bottom: none;
|
||||
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 18px;
|
||||
/* 163.636% */
|
||||
letter-spacing: 0.44px;
|
||||
text-transform: uppercase;
|
||||
background: none;
|
||||
|
||||
&::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-thead > tr > th:has(.domain-list-name-col-header) {
|
||||
background: var(--bg-ink-300);
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
padding: 12px;
|
||||
font-size: 13px;
|
||||
line-height: 20px;
|
||||
color: var(--bg-vanilla-100);
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.domain-list-name-col-value) {
|
||||
background: var(--bg-ink-300);
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.round-metric-tag {
|
||||
display: inline-flex;
|
||||
padding: 2px 8px;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
width: fit-content;
|
||||
|
||||
border-radius: 50px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-slate-500);
|
||||
text-transform: lowercase;
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(255, 255, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-cell:first-child {
|
||||
text-align: justify;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(2) {
|
||||
padding-left: 16px;
|
||||
padding-right: 16px;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(n + 3) {
|
||||
padding-right: 24px;
|
||||
}
|
||||
|
||||
.column-header-right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr > td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.ant-table-thead
|
||||
> tr
|
||||
> th:not(:last-child):not(.ant-table-selection-column):not(.ant-table-row-expand-icon-cell):not([colspan])::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.ant-empty-normal {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.table-row-light {
|
||||
background: none;
|
||||
}
|
||||
|
||||
.table-row-dark {
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.error-rate {
|
||||
width: 120px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&.filter-visible {
|
||||
.api-quick-filter-left-section {
|
||||
width: 260px;
|
||||
}
|
||||
|
||||
.api-module-right-section {
|
||||
width: calc(100% - 260px);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.no-filtered-domains-message-container {
|
||||
height: 30vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
.no-filtered-domains-message-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
justify-content: center;
|
||||
|
||||
width: fit-content;
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.no-filtered-domains-message {
|
||||
margin-top: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.api-monitoring-domain-list-table {
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-thead > tr > th:has(.domain-list-name-col-header) {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.domain-list-name-col-value) {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(0, 0, 0, 0.04);
|
||||
}
|
||||
|
||||
.table-row-light {
|
||||
background: none;
|
||||
}
|
||||
|
||||
.table-row-dark {
|
||||
background: none;
|
||||
}
|
||||
|
||||
.round-metric-tag {
|
||||
color: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
101
frontend/src/container/ApiMonitoring/Explorer/Explorer.tsx
Normal file
101
frontend/src/container/ApiMonitoring/Explorer/Explorer.tsx
Normal file
@@ -0,0 +1,101 @@
|
||||
import './Explorer.styles.scss';
|
||||
|
||||
import { FilterOutlined } from '@ant-design/icons';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Switch, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import QuickFilters from 'components/QuickFilters/QuickFilters';
|
||||
import { QuickFiltersSource } from 'components/QuickFilters/types';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { ApiMonitoringQuickFiltersConfig } from '../utils';
|
||||
import DomainList from './Domains/DomainList';
|
||||
|
||||
function Explorer(): JSX.Element {
|
||||
const [showIP, setShowIP] = useState<boolean>(true);
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('API Monitoring: Landing page visited', {});
|
||||
}, []);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query: currentQuery.builder.queryData[0],
|
||||
entityVersion: '',
|
||||
});
|
||||
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
...currentQuery.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[currentQuery],
|
||||
);
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className={cx('api-monitoring-page', 'filter-visible')}>
|
||||
<section className="api-quick-filter-left-section">
|
||||
<div className="api-quick-filters-header">
|
||||
<FilterOutlined />
|
||||
<Typography.Text>Filters</Typography.Text>
|
||||
</div>
|
||||
|
||||
<div className="api-quick-filters-header">
|
||||
<Typography.Text>Show IP addresses</Typography.Text>
|
||||
<Switch
|
||||
size="small"
|
||||
style={{ marginLeft: 'auto' }}
|
||||
checked={showIP}
|
||||
onClick={(): void => {
|
||||
setShowIP((showIP): boolean => {
|
||||
logEvent('API Monitoring: Show IP addresses clicked', {
|
||||
showIP: !showIP,
|
||||
});
|
||||
return !showIP;
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<QuickFilters
|
||||
source={QuickFiltersSource.API_MONITORING}
|
||||
config={ApiMonitoringQuickFiltersConfig}
|
||||
handleFilterVisibilityChange={(): void => {}}
|
||||
onFilterChange={(query: Query): void =>
|
||||
handleChangeQueryData('filters', query.builder.queryData[0].filters)
|
||||
}
|
||||
/>
|
||||
</section>
|
||||
<DomainList
|
||||
query={query}
|
||||
showIP={showIP}
|
||||
handleChangeQueryData={handleChangeQueryData}
|
||||
/>
|
||||
</div>
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
}
|
||||
|
||||
export default Explorer;
|
||||
2283
frontend/src/container/ApiMonitoring/utils.tsx
Normal file
2283
frontend/src/container/ApiMonitoring/utils.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -337,6 +337,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
routeKey === 'LOGS_PIPELINES' ||
|
||||
routeKey === 'LOGS_SAVE_VIEWS';
|
||||
|
||||
const isApiMonitoringView = (): boolean => routeKey === 'API_MONITORING';
|
||||
|
||||
const isTracesView = (): boolean =>
|
||||
routeKey === 'TRACES_EXPLORER' || routeKey === 'TRACES_SAVE_VIEWS';
|
||||
|
||||
@@ -357,6 +359,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
const isInfraMonitoring = (): boolean =>
|
||||
routeKey === 'INFRASTRUCTURE_MONITORING_HOSTS' ||
|
||||
routeKey === 'INFRASTRUCTURE_MONITORING_KUBERNETES';
|
||||
const isTracesFunnels = (): boolean => routeKey === 'TRACES_FUNNELS';
|
||||
const isPathMatch = (regex: RegExp): boolean => regex.test(pathname);
|
||||
|
||||
const isDashboardView = (): boolean =>
|
||||
@@ -657,11 +660,12 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
isAlertOverview() ||
|
||||
isMessagingQueues() ||
|
||||
isCloudIntegrationPage() ||
|
||||
isInfraMonitoring()
|
||||
isInfraMonitoring() ||
|
||||
isApiMonitoringView()
|
||||
? 0
|
||||
: '0 1rem',
|
||||
|
||||
...(isTraceDetailsView() ? { margin: 0 } : {}),
|
||||
...(isTraceDetailsView() || isTracesFunnels() ? { margin: 0 } : {}),
|
||||
}}
|
||||
>
|
||||
{isToDisplayLayout && !renderFullScreen && <TopNav />}
|
||||
|
||||
@@ -61,7 +61,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
isLoading: isLoadingDeploymentsData,
|
||||
isFetching: isFetchingDeploymentsData,
|
||||
refetch: refetchDeploymentsData,
|
||||
} = useGetDeploymentsData();
|
||||
} = useGetDeploymentsData(true);
|
||||
|
||||
const {
|
||||
mutate: updateSubDomain,
|
||||
|
||||
@@ -223,7 +223,7 @@ function ExplorerOptions({
|
||||
const viewName = useGetSearchQueryParam(QueryParams.viewName) || '';
|
||||
const viewKey = useGetSearchQueryParam(QueryParams.viewKey) || '';
|
||||
|
||||
const extraData = viewsData?.data?.data?.find((view) => view.uuid === viewKey)
|
||||
const extraData = viewsData?.data?.data?.find((view) => view.id === viewKey)
|
||||
?.extraData;
|
||||
|
||||
const extraDataColor = extraData ? JSON.parse(extraData).color : '';
|
||||
@@ -357,17 +357,12 @@ function ExplorerOptions({
|
||||
viewsData?.data?.data,
|
||||
);
|
||||
if (!currentViewDetails) return;
|
||||
const {
|
||||
query,
|
||||
name,
|
||||
uuid,
|
||||
panelType: currentPanelType,
|
||||
} = currentViewDetails;
|
||||
const { query, name, id, panelType: currentPanelType } = currentViewDetails;
|
||||
|
||||
handleExplorerTabChange(currentPanelType, {
|
||||
query,
|
||||
name,
|
||||
uuid,
|
||||
id,
|
||||
});
|
||||
},
|
||||
[viewsData, handleExplorerTabChange],
|
||||
@@ -694,7 +689,7 @@ function ExplorerOptions({
|
||||
bgColor = extraData.color;
|
||||
}
|
||||
return (
|
||||
<Select.Option key={view.uuid} value={view.name}>
|
||||
<Select.Option key={view.id} value={view.name}>
|
||||
<div className="render-options">
|
||||
<span
|
||||
className="dot"
|
||||
|
||||
@@ -49,6 +49,7 @@ function FullView({
|
||||
isDependedDataLoaded = false,
|
||||
onToggleModelHandler,
|
||||
onClickHandler,
|
||||
customOnDragSelect,
|
||||
setCurrentGraphRef,
|
||||
}: FullViewProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
@@ -252,7 +253,7 @@ function FullView({
|
||||
onToggleModelHandler={onToggleModelHandler}
|
||||
setGraphVisibility={setGraphsVisibilityStates}
|
||||
graphVisibility={graphsVisibilityStates}
|
||||
onDragSelect={onDragSelect}
|
||||
onDragSelect={customOnDragSelect ?? onDragSelect}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
searchTerm={searchTerm}
|
||||
onClickHandler={onClickHandler}
|
||||
|
||||
@@ -50,6 +50,7 @@ export interface FullViewProps {
|
||||
widget: Widgets;
|
||||
fullViewOptions?: boolean;
|
||||
onClickHandler?: OnClickPluginOpts['onClick'];
|
||||
customOnDragSelect?: (start: number, end: number) => void;
|
||||
name: string;
|
||||
tableProcessedDataRef: MutableRefObject<RowData[]>;
|
||||
version?: string;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user