Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
567bb8cced | ||
|
|
0f7e6da613 | ||
|
|
eec45da84b |
@@ -1,7 +1,6 @@
|
||||
.git
|
||||
.github
|
||||
.vscode
|
||||
.devenv
|
||||
README.md
|
||||
deploy
|
||||
sample-apps
|
||||
|
||||
81
.github/workflows/build-community.yaml
vendored
81
.github/workflows/build-community.yaml
vendored
@@ -1,81 +0,0 @@
|
||||
name: build-community
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./pkg/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: dockerhub
|
||||
113
.github/workflows/build-enterprise.yaml
vendored
113
.github/workflows/build-enterprise.yaml
vendored
@@ -1,113 +0,0 @@
|
||||
name: build-enterprise
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker_providers: ${{ steps.set-docker-providers.outputs.providers }}
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
id: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
- name: set-docker-providers
|
||||
id: set-docker-providers
|
||||
run: |
|
||||
if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ || ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$ ]]; then
|
||||
echo "providers=dockerhub gcp" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "providers=gcp" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: create-dotenv
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: frontend/.env
|
||||
key: enterprise-dotenv-${{ github.sha }}
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}
|
||||
122
.github/workflows/build-staging.yaml
vendored
122
.github/workflows/build-staging.yaml
vendored
@@ -1,122 +0,0 @@
|
||||
name: build-staging
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
|
||||
outputs:
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
deployment: ${{ steps.build-info.outputs.deployment }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
id: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
|
||||
staging_label="${{ github.event.label.name }}"
|
||||
if [[ "${staging_label}" == "staging:"* ]]; then
|
||||
deployment=${staging_label#"staging:"}
|
||||
elif [[ "${{ github.event.ref }}" == "refs/heads/main" ]]; then
|
||||
deployment="staging"
|
||||
else
|
||||
echo "error: not able to determine deployment - please verify the PR label or the branch"
|
||||
exit 1
|
||||
fi
|
||||
echo "deployment=${deployment}" >> $GITHUB_OUTPUT
|
||||
- name: create-dotenv
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'TUNNEL_URL=https://telemetry.staging.signoz.cloud/tunnel' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN=https://telemetry.staging.signoz.cloud' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: frontend/.env
|
||||
key: staging-dotenv-${{ github.sha }}
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: staging-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: gcp
|
||||
staging:
|
||||
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
|
||||
uses: signoz/primus.workflows/.github/workflows/github-trigger.yaml@main
|
||||
secrets: inherit
|
||||
needs: [prepare, go-build]
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GITHUB_ENVIRONMENT: staging
|
||||
GITHUB_SILENT: true
|
||||
GITHUB_REPOSITORY_NAME: charts-saas-v3-staging
|
||||
GITHUB_EVENT_NAME: releaser
|
||||
GITHUB_EVENT_PAYLOAD: "{\"deployment\": \"${{ needs.prepare.outputs.deployment }}\", \"signoz_version\": \"${{ needs.prepare.outputs.version }}\"}"
|
||||
122
.github/workflows/build.yaml
vendored
Normal file
122
.github/workflows/build.yaml
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
enterprise:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: create-env-file
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-enterprise
|
||||
|
||||
community:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-community
|
||||
55
.github/workflows/integrationci.yaml
vendored
55
.github/workflows/integrationci.yaml
vendored
@@ -1,55 +0,0 @@
|
||||
name: integrationci
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
src:
|
||||
- bootstrap
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 24.1.2-alpine
|
||||
- 24.12-alpine
|
||||
schema-migrator-version:
|
||||
- v0.111.38
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: poetry
|
||||
run: |
|
||||
python -m pip install poetry==2.1.2
|
||||
python -m poetry config virtualenvs.in-project true
|
||||
cd tests/integration && poetry install --no-root
|
||||
- name: run
|
||||
run: |
|
||||
cd tests/integration && \
|
||||
poetry run pytest -ra \
|
||||
--basetemp=./tmp/ \
|
||||
-vv \
|
||||
--capture=no \
|
||||
src/${{matrix.src}} \
|
||||
--sqlstore-provider ${{matrix.sqlstore-provider}} \
|
||||
--postgres-version ${{matrix.postgres-version}} \
|
||||
--clickhouse-version ${{matrix.clickhouse-version}} \
|
||||
--schema-migrator-version ${{matrix.schema-migrator-version}}
|
||||
4
.github/workflows/prereleaser.yaml
vendored
4
.github/workflows/prereleaser.yaml
vendored
@@ -1,9 +1,9 @@
|
||||
name: prereleaser
|
||||
|
||||
on:
|
||||
# schedule every wednesday 6:30 AM UTC (12:00 PM IST)
|
||||
# schedule every wednesday 9:30 AM UTC (3pm IST)
|
||||
schedule:
|
||||
- cron: '30 6 * * 3'
|
||||
- cron: '30 9 * * 3'
|
||||
|
||||
# allow manual triggering of the workflow by a maintainer
|
||||
workflow_dispatch:
|
||||
|
||||
13
.github/workflows/staging-deployment.yaml
vendored
13
.github/workflows/staging-deployment.yaml
vendored
@@ -36,17 +36,12 @@ jobs:
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export OTELCOL_TAG="main"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
export KAFKA_SPAN_EVAL="true"
|
||||
docker system prune --force --all
|
||||
OTELCOL_TAG=$(curl -s https://api.github.com/repos/SigNoz/signoz-otel-collector/releases/latest | jq -r '.tag_name // "not-found"')
|
||||
if [[ "${OTELCOL_TAG}" == "not-found" ]]; then
|
||||
echo "warning: unable to determine latest SigNoz OtelCollector release tag, skipping latest otelcol deployment"
|
||||
else
|
||||
export OTELCOL_TAG=${OTELCOL_TAG}
|
||||
docker pull signoz/signoz-otel-collector:${OTELCOL_TAG}
|
||||
docker pull signoz/signoz-schema-migrator:${OTELCOL_TAG}
|
||||
fi
|
||||
docker system prune --force
|
||||
docker pull signoz/signoz-otel-collector:main
|
||||
docker pull signoz/signoz-schema-migrator:main
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
|
||||
2
.github/workflows/testing-deployment.yaml
vendored
2
.github/workflows/testing-deployment.yaml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export DEV_BUILD="1"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
docker system prune --force --all
|
||||
docker system prune --force
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
|
||||
148
.gitignore
vendored
148
.gitignore
vendored
@@ -54,7 +54,6 @@ ee/query-service/tests/test-deploy/data/
|
||||
bin/
|
||||
.local/
|
||||
*/query-service/queries.active
|
||||
ee/query-service/db
|
||||
|
||||
# e2e
|
||||
|
||||
@@ -80,153 +79,6 @@ deploy/common/clickhouse/user_scripts/
|
||||
|
||||
queries.active
|
||||
|
||||
# tmp
|
||||
**/tmp/**
|
||||
|
||||
# .devenv tmp files
|
||||
.devenv/**/tmp/**
|
||||
.qodo
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
### Python Patch ###
|
||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||
poetry.toml
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
@@ -1,17 +0,0 @@
|
||||
#### Auto generated by make docker-version-alpine. DO NOT EDIT! ####
|
||||
amd64=029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85
|
||||
unknown=5fea95373b9ec85974843f31446fa6a9df4492dddae4e1cb056193c34a20a5be
|
||||
arm=b4aef1a899e0271f06d948c9a8fa626ecdb2202d3a178bc14775dd559e23df8e
|
||||
unknown=a4d1e27e63a9d6353046eb25a2f0ec02945012b217f4364cd83a73fe6dfb0b15
|
||||
arm=4fdafe217d0922f3c3e2b4f64cf043f8403a4636685cd9c51fea2cbd1f419740
|
||||
unknown=7f21ac2018d95b2c51a5779c1d5ca6c327504adc3b0fdc747a6725d30b3f13c2
|
||||
arm64=ea3c5a9671f7b3f7eb47eab06f73bc6591df978b0d5955689a9e6f943aa368c0
|
||||
unknown=a8ba68c1a9e6eea8041b4b8f996c235163440808b9654a865976fdcbede0f433
|
||||
386=dea9f02e103e837849f984d5679305c758aba7fea1b95b7766218597f61a05ab
|
||||
unknown=3c6629bec05c8273a927d46b77428bf4a378dad911a0ae284887becdc149b734
|
||||
ppc64le=0880443bffa028dfbbc4094a32dd6b7ac25684e4c0a3d50da9e0acae355c5eaf
|
||||
unknown=bb48308f976b266e3ab39bbf9af84521959bd9c295d3c763690cf41f8df2a626
|
||||
riscv64=d76e6fbe348ff20c2931bb7f101e49379648e026de95dd37f96e00ce1909dcf7
|
||||
unknown=dd807544365f6dc187cbe6de0806adce2ea9de3e7124717d1d8e8b7a18b77b64
|
||||
s390x=b815fadf80495594eb6296a6af0bc647ae5f193e0044e07acec7e5b378c9ce2d
|
||||
unknown=74681be74a280a88abb53ff1e048eb1fb624b30d0066730df6d8afd02ba82e01
|
||||
@@ -77,4 +77,4 @@ Need assistance? Join our Slack community:
|
||||
## Where do I go from here?
|
||||
|
||||
- Set up your [development environment](docs/contributing/development.md)
|
||||
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo-docs.md)
|
||||
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo/otel-demo-docs.md)
|
||||
|
||||
36
Makefile
36
Makefile
@@ -10,7 +10,7 @@ COMMIT_SHORT_SHA ?= $(shell git rev-parse --short HEAD)
|
||||
BRANCH_NAME ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
|
||||
VERSION ?= $(BRANCH_NAME)-$(COMMIT_SHORT_SHA)
|
||||
TIMESTAMP ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
ARCHS ?= amd64 arm64
|
||||
ARCHS = amd64 arm64
|
||||
TARGET_DIR ?= $(shell pwd)/target
|
||||
|
||||
ZEUS_URL ?= https://api.signoz.cloud
|
||||
@@ -23,7 +23,6 @@ GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/pkg/query-service
|
||||
GO_BUILD_LDFLAGS_COMMUNITY = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
GO_BUILD_ARCHS_ENTERPRISE = $(addprefix go-build-enterprise-,$(ARCHS))
|
||||
GO_BUILD_ARCHS_ENTERPRISE_RACE = $(addprefix go-build-enterprise-race-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/ee/query-service
|
||||
GO_BUILD_LDFLAGS_ENTERPRISE = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=enterprise $(GO_BUILD_LDFLAG_ZEUS_URL) $(GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO)
|
||||
|
||||
@@ -75,10 +74,6 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
--use-logs-new-schema true \
|
||||
--use-trace-new-schema true
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@go test -race ./...
|
||||
|
||||
.PHONY: go-run-community
|
||||
go-run-community: ## Runs the community go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
@@ -120,18 +115,6 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
go-build-enterprise-race: ## Builds the go backend server for enterprise with race
|
||||
go-build-enterprise-race: $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
$(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
##############################################################
|
||||
# js commands
|
||||
##############################################################
|
||||
@@ -180,20 +163,3 @@ docker-buildx-enterprise: go-build-enterprise js-build
|
||||
--platform linux/arm64,linux/amd64 \
|
||||
--push \
|
||||
--tag $(DOCKER_REGISTRY_ENTERPRISE):$(VERSION) $(SRC)
|
||||
|
||||
##############################################################
|
||||
# python commands
|
||||
##############################################################
|
||||
.PHONY: py-fmt
|
||||
py-fmt: ## Run black for integration tests
|
||||
@cd tests/integration && poetry run black .
|
||||
|
||||
.PHONY: py-lint
|
||||
py-lint: ## Run lint for integration tests
|
||||
@cd tests/integration && poetry run isort .
|
||||
@cd tests/integration && poetry run autoflake .
|
||||
@cd tests/integration && poetry run pylint .
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@@ -72,6 +72,7 @@ sqlstore:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
|
||||
|
||||
##################### APIServer #####################
|
||||
apiserver:
|
||||
timeout:
|
||||
@@ -90,29 +91,20 @@ apiserver:
|
||||
- /api/v1/version
|
||||
- /
|
||||
|
||||
|
||||
##################### TelemetryStore #####################
|
||||
telemetrystore:
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
# Maximum number of idle connections in the connection pool.
|
||||
max_idle_conns: 50
|
||||
# Maximum number of open connections to the database.
|
||||
max_open_conns: 100
|
||||
# Maximum time to wait for a connection to be established.
|
||||
dial_timeout: 5s
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
clickhouse:
|
||||
# The DSN to use for clickhouse.
|
||||
dsn: tcp://localhost:9000
|
||||
|
||||
##################### Prometheus #####################
|
||||
prometheus:
|
||||
active_query_tracker:
|
||||
# Whether to enable the active query tracker.
|
||||
enabled: true
|
||||
# The path to use for the active query tracker.
|
||||
path: ""
|
||||
# The maximum number of concurrent queries.
|
||||
max_concurrent: 20
|
||||
# The DSN to use for ClickHouse.
|
||||
dsn: http://localhost:9000
|
||||
|
||||
##################### Alertmanager #####################
|
||||
alertmanager:
|
||||
@@ -125,7 +117,7 @@ alertmanager:
|
||||
# The poll interval for periodically syncing the alertmanager with the config in the store.
|
||||
poll_interval: 1m
|
||||
# The URL under which Alertmanager is externally reachable (for example, if Alertmanager is served via a reverse proxy). Used for generating relative and absolute links back to Alertmanager itself.
|
||||
external_url: http://localhost:8080
|
||||
external_url: http://localhost:9093
|
||||
# The global configuration for the alertmanager. All the exahustive fields can be found in the upstream: https://github.com/prometheus/alertmanager/blob/efa05feffd644ba4accb526e98a8c6545d26a783/config/config.go#L833
|
||||
global:
|
||||
# ResolveTimeout is the time after which an alert is declared resolved if it has not been updated.
|
||||
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.79.1
|
||||
image: signoz/signoz:v0.76.2
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -208,7 +208,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
image: signoz/signoz-otel-collector:v0.111.34
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -232,7 +232,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
image: signoz/signoz-schema-migrator:v0.111.34
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.79.1
|
||||
image: signoz/signoz:v0.76.2
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -143,7 +143,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
image: signoz/signoz-otel-collector:v0.111.34
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -167,7 +167,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
image: signoz/signoz-schema-migrator:v0.111.34
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.79.1}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -212,7 +212,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -238,7 +238,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -249,7 +249,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.79.1}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -146,7 +146,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -168,7 +168,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -180,7 +180,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.79.1}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
@@ -25,7 +24,7 @@ func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var values []string
|
||||
var patToken string
|
||||
var pat eeTypes.StorablePersonalAccessToken
|
||||
var pat types.StorablePersonalAccessToken
|
||||
|
||||
for _, header := range p.headers {
|
||||
values = append(values, r.Header.Get(header))
|
||||
|
||||
@@ -18,4 +18,4 @@ COPY frontend/build/ /etc/signoz/web/
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
FROM golang:1.22-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
ARG ZEUSURL
|
||||
|
||||
# This path is important for stacktraces
|
||||
WORKDIR $GOPATH/src/github.com/signoz/signoz
|
||||
WORKDIR /root
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
g++ \
|
||||
gcc \
|
||||
libc6-dev \
|
||||
make \
|
||||
pkg-config \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
RUN go mod download
|
||||
|
||||
COPY ./ee/ ./ee/
|
||||
COPY ./pkg/ ./pkg/
|
||||
COPY ./templates/email /root/templates
|
||||
|
||||
COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz"]
|
||||
@@ -1,22 +0,0 @@
|
||||
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
|
||||
|
||||
FROM alpine@sha256:${ALPINE_SHA}
|
||||
LABEL maintainer="signoz"
|
||||
WORKDIR /root
|
||||
|
||||
ARG OS="linux"
|
||||
ARG ARCH
|
||||
|
||||
RUN apk update && \
|
||||
apk add ca-certificates && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
|
||||
COPY ./conf/prometheus.yml /root/config/prometheus.yml
|
||||
COPY ./templates/email /root/templates
|
||||
COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
@@ -28,10 +28,11 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
|
||||
}
|
||||
|
||||
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
FeatureLookup: dp.ff,
|
||||
})
|
||||
|
||||
return dp
|
||||
|
||||
@@ -28,10 +28,11 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
|
||||
}
|
||||
|
||||
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
FeatureLookup: hp.ff,
|
||||
})
|
||||
|
||||
return hp
|
||||
|
||||
@@ -38,6 +38,12 @@ func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericPr
|
||||
}
|
||||
}
|
||||
|
||||
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().ff = ff
|
||||
}
|
||||
}
|
||||
|
||||
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().reader = reader
|
||||
@@ -50,6 +56,7 @@ type BaseSeasonalProvider struct {
|
||||
fluxInterval time.Duration
|
||||
cache cache.Cache
|
||||
keyGenerator cache.KeyGenerator
|
||||
ff interfaces.FeatureLookup
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
||||
@@ -306,9 +313,6 @@ func (p *BaseSeasonalProvider) getScore(
|
||||
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
|
||||
) float64 {
|
||||
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
|
||||
if expectedValue < 0 {
|
||||
expectedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
}
|
||||
return (value - expectedValue) / p.getStdDev(weekSeries)
|
||||
}
|
||||
|
||||
|
||||
@@ -27,10 +27,11 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
|
||||
}
|
||||
|
||||
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
FeatureLookup: wp.ff,
|
||||
})
|
||||
|
||||
return wp
|
||||
|
||||
@@ -11,8 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -23,7 +21,6 @@ import (
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -57,7 +54,6 @@ type APIHandler struct {
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(signoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
|
||||
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
@@ -75,7 +71,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
Signoz: signoz,
|
||||
Preference: preference,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -162,6 +157,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
|
||||
|
||||
// PAT APIs
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
@@ -135,12 +135,19 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newPAT := eeTypes.NewGettablePAT(
|
||||
integrationPATName,
|
||||
baseconstants.ViewerGroup,
|
||||
integrationUser.ID,
|
||||
0,
|
||||
)
|
||||
newPAT := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Token: generatePATToken(),
|
||||
UserID: integrationUser.ID,
|
||||
Name: integrationPATName,
|
||||
Role: baseconstants.ViewerGroup,
|
||||
ExpiresAt: 0,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
},
|
||||
}
|
||||
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
@@ -153,11 +160,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) (*types.User, *basemodel.ApiError) {
|
||||
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
|
||||
cloudIntegrationUserId := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
|
||||
// TODO(nitya): there should be orgId here
|
||||
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
|
||||
integrationUserResult, apiErr := ah.AppDao().GetUser(ctx, cloudIntegrationUserId)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
|
||||
}
|
||||
@@ -172,9 +177,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
)
|
||||
|
||||
newUser := &types.User{
|
||||
ID: uuid.New().String(),
|
||||
Name: cloudIntegrationUser,
|
||||
Email: email,
|
||||
ID: cloudIntegrationUserId,
|
||||
Name: fmt.Sprintf("%s integration", cloudProvider),
|
||||
Email: fmt.Sprintf("%s@signoz.io", cloudIntegrationUserId),
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
|
||||
@@ -2,26 +2,31 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func generatePATToken() string {
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
return encodedToken
|
||||
}
|
||||
|
||||
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
@@ -38,18 +43,31 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
pat := eeTypes.NewGettablePAT(
|
||||
req.Name,
|
||||
req.Role,
|
||||
user.ID,
|
||||
req.ExpiresInDays,
|
||||
)
|
||||
pat := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Name: req.Name,
|
||||
Role: req.Role,
|
||||
ExpiresAt: req.ExpiresInDays,
|
||||
},
|
||||
}
|
||||
err = validatePATRequest(pat)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// All the PATs are associated with the user creating the PAT.
|
||||
pat.UserID = user.ID
|
||||
pat.CreatedAt = time.Now()
|
||||
pat.UpdatedAt = time.Now()
|
||||
pat.LastUsed = 0
|
||||
pat.Token = generatePATToken()
|
||||
|
||||
if pat.ExpiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
|
||||
var apierr basemodel.BaseApiError
|
||||
if pat, apierr = ah.AppDao().CreatePAT(ctx, user.OrgID, pat); apierr != nil {
|
||||
@@ -60,7 +78,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ah.Respond(w, &pat)
|
||||
}
|
||||
|
||||
func validatePATRequest(req eeTypes.GettablePAT) error {
|
||||
func validatePATRequest(req model.PAT) error {
|
||||
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
|
||||
return fmt.Errorf("valid role is required")
|
||||
}
|
||||
@@ -76,19 +94,12 @@ func validatePATRequest(req eeTypes.GettablePAT) error {
|
||||
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
req := eeTypes.GettablePAT{}
|
||||
req := model.PAT{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
@@ -98,25 +109,6 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
err = validatePATRequest(req)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
@@ -124,6 +116,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
req.UpdatedByUserID = user.ID
|
||||
id := mux.Vars(r)["id"]
|
||||
req.UpdatedAt = time.Now()
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
@@ -156,12 +149,7 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
id := mux.Vars(r)["id"]
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
@@ -171,26 +159,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id))
|
||||
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
|
||||
@@ -88,24 +88,28 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityDaily:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityHourly:
|
||||
provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
default:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
}
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
|
||||
33
ee/query-service/app/api/traces.go
Normal file
33
ee/query-service/app/api/traces.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/db"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
|
||||
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
ah.APIHandler.SearchTraces(w, r)
|
||||
return
|
||||
}
|
||||
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
||||
return
|
||||
}
|
||||
|
||||
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
|
||||
if ah.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, result)
|
||||
|
||||
}
|
||||
@@ -5,33 +5,38 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
)
|
||||
|
||||
type ClickhouseReader struct {
|
||||
conn clickhouse.Conn
|
||||
appdb sqlstore.SQLStore
|
||||
appdb *sqlx.DB
|
||||
*basechr.ClickHouseReader
|
||||
}
|
||||
|
||||
func NewDataConnector(
|
||||
sqlDB sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
localDB *sqlx.DB,
|
||||
ch clickhouse.Conn,
|
||||
promConfigPath string,
|
||||
lm interfaces.FeatureLookup,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
fluxIntervalForTraceDetail time.Duration,
|
||||
cache cache.Cache,
|
||||
) *ClickhouseReader {
|
||||
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
chReader := basechr.NewReader(localDB, ch, promConfigPath, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
return &ClickhouseReader{
|
||||
conn: telemetryStore.ClickhouseDB(),
|
||||
appdb: sqlDB,
|
||||
conn: ch,
|
||||
appdb: localDB,
|
||||
ClickHouseReader: chReader,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickhouseReader) Start(readerReady chan bool) {
|
||||
r.ClickHouseReader.Start(readerReady)
|
||||
}
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
package smart
|
||||
package db
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// SmartTraceAlgorithm is an algorithm to find the target span and build a tree of spans around it with the given levelUp and levelDown parameters and the given spanLimit
|
||||
func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]basemodel.SearchSpansResult, error) {
|
||||
var spans []*SpanForTraceDetails
|
||||
var spans []*model.SpanForTraceDetails
|
||||
|
||||
// if targetSpanId is null or not present then randomly select a span as targetSpanId
|
||||
if (targetSpanId == "" || targetSpanId == "null") && len(payload) > 0 {
|
||||
@@ -23,7 +24,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" {
|
||||
parentID = spanItem.References[0].SpanId
|
||||
}
|
||||
span := &SpanForTraceDetails{
|
||||
span := &model.SpanForTraceDetails{
|
||||
TimeUnixNano: spanItem.TimeUnixNano,
|
||||
SpanID: spanItem.SpanID,
|
||||
TraceID: spanItem.TraceID,
|
||||
@@ -44,7 +45,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
targetSpan := &SpanForTraceDetails{}
|
||||
targetSpan := &model.SpanForTraceDetails{}
|
||||
|
||||
// Find the target span in the span trees
|
||||
for _, root := range roots {
|
||||
@@ -64,7 +65,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Build the final result
|
||||
parents := []*SpanForTraceDetails{}
|
||||
parents := []*model.SpanForTraceDetails{}
|
||||
|
||||
// Get the parent spans of the target span up to the given levelUp parameter and spanLimit
|
||||
preParent := targetSpan
|
||||
@@ -89,11 +90,11 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Get the child spans of the target span until the given levelDown and spanLimit
|
||||
preParents := []*SpanForTraceDetails{targetSpan}
|
||||
children := []*SpanForTraceDetails{}
|
||||
preParents := []*model.SpanForTraceDetails{targetSpan}
|
||||
children := []*model.SpanForTraceDetails{}
|
||||
|
||||
for i := 0; i < levelDown && len(preParents) != 0 && spanLimit > 0; i++ {
|
||||
parents := []*SpanForTraceDetails{}
|
||||
parents := []*model.SpanForTraceDetails{}
|
||||
for _, parent := range preParents {
|
||||
if spanLimit-len(parent.Children) <= 0 {
|
||||
children = append(children, parent.Children[:spanLimit]...)
|
||||
@@ -107,7 +108,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Store the final list of spans in the resultSpanSet map to avoid duplicates
|
||||
resultSpansSet := make(map[*SpanForTraceDetails]struct{})
|
||||
resultSpansSet := make(map[*model.SpanForTraceDetails]struct{})
|
||||
resultSpansSet[targetSpan] = struct{}{}
|
||||
for _, parent := range parents {
|
||||
resultSpansSet[parent] = struct{}{}
|
||||
@@ -168,12 +169,12 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// buildSpanTrees builds trees of spans from a list of spans.
|
||||
func buildSpanTrees(spansPtr *[]*SpanForTraceDetails) ([]*SpanForTraceDetails, error) {
|
||||
func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTraceDetails, error) {
|
||||
|
||||
// Build a map of spanID to span for fast lookup
|
||||
var roots []*SpanForTraceDetails
|
||||
var roots []*model.SpanForTraceDetails
|
||||
spans := *spansPtr
|
||||
mapOfSpans := make(map[string]*SpanForTraceDetails, len(spans))
|
||||
mapOfSpans := make(map[string]*model.SpanForTraceDetails, len(spans))
|
||||
|
||||
for _, span := range spans {
|
||||
if span.ParentID == "" {
|
||||
@@ -205,8 +206,8 @@ func buildSpanTrees(spansPtr *[]*SpanForTraceDetails) ([]*SpanForTraceDetails, e
|
||||
}
|
||||
|
||||
// breadthFirstSearch performs a breadth-first search on the span tree to find the target span.
|
||||
func breadthFirstSearch(spansPtr *SpanForTraceDetails, targetId string) (*SpanForTraceDetails, error) {
|
||||
queue := []*SpanForTraceDetails{spansPtr}
|
||||
func breadthFirstSearch(spansPtr *model.SpanForTraceDetails, targetId string) (*model.SpanForTraceDetails, error) {
|
||||
queue := []*model.SpanForTraceDetails{spansPtr}
|
||||
visited := make(map[string]bool)
|
||||
|
||||
for len(queue) > 0 {
|
||||
@@ -18,14 +18,13 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/web"
|
||||
@@ -44,11 +43,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
pqle "github.com/SigNoz/signoz/pkg/query-service/pqlEngine"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
@@ -115,6 +116,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := preferences.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -132,22 +137,27 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
// set license manager as feature flag provider in dao
|
||||
modelDao.SetFlagProvider(lm)
|
||||
readerReady := make(chan bool)
|
||||
|
||||
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
var reader interfaces.DataConnector
|
||||
qb := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
serverOptions.SigNoz.TelemetryStore.ClickHouseDB(),
|
||||
serverOptions.PromConfigPath,
|
||||
lm,
|
||||
serverOptions.Cluster,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
fluxIntervalForTraceDetail,
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
go qb.Start(readerReady)
|
||||
reader = qb
|
||||
|
||||
skipConfig := &basemodel.SkipConfig{}
|
||||
if serverOptions.SkipTopLvlOpsPath != "" {
|
||||
@@ -166,18 +176,19 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
<-readerReady
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.PromConfigPath,
|
||||
serverOptions.RuleRepoURL,
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
lm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -222,7 +233,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// start the usagemanager
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.Config.TelemetryStore.Clickhouse.DSN)
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickHouseDB(), serverOptions.Config.TelemetryStore.ClickHouse.DSN)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -293,7 +304,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
&opAmpModel.AllAgents, agentConfMgr,
|
||||
)
|
||||
|
||||
errorList := reader.PreloadMetricsMetadata(context.Background())
|
||||
errorList := qb.PreloadMetricsMetadata(context.Background())
|
||||
for _, er := range errorList {
|
||||
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
|
||||
}
|
||||
@@ -374,6 +385,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
apiHandler.MetricExplorerRoutes(r, am)
|
||||
apiHandler.RegisterTraceFunnelsRoutes(r, am)
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: []string{"*"},
|
||||
@@ -526,27 +538,33 @@ func (s *Server) Stop() error {
|
||||
}
|
||||
|
||||
func makeRulesManager(
|
||||
promConfigPath,
|
||||
ruleRepoURL string,
|
||||
db *sqlx.DB,
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm baseint.FeatureLookup,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
) (*baserules.Manager, error) {
|
||||
// create engine
|
||||
pqle, err := pqle.FromConfigPath(promConfigPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create pql engine : %v", err)
|
||||
}
|
||||
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
TelemetryStore: telemetryStore,
|
||||
Prometheus: prometheus,
|
||||
PqlEngine: pqle,
|
||||
RepoURL: ruleRepoURL,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
FeatureFlags: fm,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
|
||||
@@ -4,12 +4,13 @@ import (
|
||||
"context"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -35,10 +36,11 @@ type ModelDao interface {
|
||||
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
|
||||
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
|
||||
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
|
||||
CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError)
|
||||
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
@@ -43,7 +44,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
|
||||
}
|
||||
|
||||
user := &types.User{
|
||||
ID: uuid.New().String(),
|
||||
ID: uuid.NewString(),
|
||||
Name: "",
|
||||
Email: email,
|
||||
Password: hash,
|
||||
@@ -161,7 +162,12 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
// find domain from email
|
||||
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
|
||||
var emailDomain string
|
||||
emailComponents := strings.Split(email, "@")
|
||||
if len(emailComponents) > 0 {
|
||||
emailDomain = emailComponents[1]
|
||||
}
|
||||
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
|
||||
return resp, apierr
|
||||
}
|
||||
|
||||
|
||||
@@ -6,53 +6,45 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError) {
|
||||
p.StorablePersonalAccessToken.OrgID = orgID
|
||||
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
|
||||
_, err := m.DB().NewInsert().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Returning("id").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, p.UserID)
|
||||
if createdByUser == nil {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
p.CreatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
p.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError {
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Column("role", "name", "updated_at", "updated_by_user_id").
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Exec(ctx)
|
||||
@@ -63,7 +55,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.Gettable
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
@@ -76,51 +68,41 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
|
||||
}
|
||||
|
||||
patsWithUsers := []types.GettablePAT{}
|
||||
patsWithUsers := []model.PAT{}
|
||||
for i := range pats {
|
||||
patWithUser := types.GettablePAT{
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[i],
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
|
||||
if createdByUser == nil {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
|
||||
if updatedByUser == nil {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: updatedByUser.CreatedAt,
|
||||
UpdatedAt: updatedByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
Id: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
CreatedAt: updatedByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,14 +111,14 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
|
||||
return patsWithUsers, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
|
||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError {
|
||||
updatedAt := time.Now().Unix()
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&types.StorablePersonalAccessToken{}).
|
||||
Set("revoked = ?", true).
|
||||
Set("updated_by_user_id = ?", userID).
|
||||
Set("updated_at = ?", updatedAt).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
@@ -146,7 +128,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID,
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
@@ -164,19 +146,19 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Scan(ctx); err != nil {
|
||||
@@ -190,9 +172,33 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID)
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
// deprecated
|
||||
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*types.GettableUser, basemodel.BaseApiError) {
|
||||
users := []types.GettableUser{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&users).
|
||||
Column("u.id", "u.name", "u.email", "u.password", "u.created_at", "u.profile_picture_url", "u.org_id", "u.group_id").
|
||||
Join("JOIN personal_access_tokens p ON u.id = p.user_id").
|
||||
Where("p.token = ?", token).
|
||||
Where("p.expires_at >= strftime('%s', 'now')").
|
||||
Where("p.org_id = ?", orgID).
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
|
||||
}
|
||||
|
||||
if len(users) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
|
||||
}
|
||||
}
|
||||
return &users[0], nil
|
||||
}
|
||||
|
||||
@@ -7,5 +7,6 @@ import (
|
||||
// Connector defines methods for interaction
|
||||
// with o11y data. for example - clickhouse
|
||||
type DataConnector interface {
|
||||
Start(readerReady chan bool)
|
||||
baseint.Reader
|
||||
}
|
||||
|
||||
@@ -7,29 +7,34 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
|
||||
prommodel "github.com/prometheus/common/model"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
)
|
||||
|
||||
func initZapLog() *zap.Logger {
|
||||
config := zap.NewProductionConfig()
|
||||
config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
logger, _ := config.Build()
|
||||
return logger
|
||||
}
|
||||
|
||||
func init() {
|
||||
prommodel.NameValidationScheme = prommodel.UTF8Validation
|
||||
}
|
||||
|
||||
func main() {
|
||||
var promConfigPath, skipTopLvlOpsPath string
|
||||
|
||||
@@ -83,7 +88,6 @@ func main() {
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
Config: promConfigPath,
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create config", zap.Error(err))
|
||||
@@ -91,21 +95,16 @@ func main() {
|
||||
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
sqlStoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlStoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewSQLStoreProviderFactories(),
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
zap.L().Fatal("Failed to create signoz struct", zap.Error(err))
|
||||
}
|
||||
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
|
||||
@@ -157,6 +157,8 @@ func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
|
||||
}
|
||||
|
||||
switch planName {
|
||||
case PlanNameTeams:
|
||||
features = append(features, ProPlan...)
|
||||
case PlanNameEnterprise:
|
||||
features = append(features, EnterprisePlan...)
|
||||
case PlanNameBasic:
|
||||
|
||||
@@ -74,21 +74,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Parse the entire license properly",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
PlanName: PlanNameTeams,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "ACTIVE",
|
||||
@@ -98,14 +98,14 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Fallback to basic plan if license status is invalid",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "INVALID",
|
||||
@@ -122,21 +122,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "fallback states for validFrom and validUntil",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"valid_from": 1234.456,
|
||||
"valid_until": 5678.567,
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
PlanName: PlanNameTeams,
|
||||
ValidFrom: 1234,
|
||||
ValidUntil: 5678,
|
||||
Status: "ACTIVE",
|
||||
|
||||
@@ -1,7 +1,25 @@
|
||||
package model
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types"
|
||||
|
||||
type User struct {
|
||||
Id string `json:"id" db:"id"`
|
||||
Name string `json:"name" db:"name"`
|
||||
Email string `json:"email" db:"email"`
|
||||
CreatedAt int64 `json:"createdAt" db:"created_at"`
|
||||
ProfilePictureURL string `json:"profilePictureURL" db:"profile_picture_url"`
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
type CreatePATRequestBody struct {
|
||||
Name string `json:"name"`
|
||||
Role string `json:"role"`
|
||||
ExpiresInDays int64 `json:"expiresInDays"`
|
||||
}
|
||||
|
||||
type PAT struct {
|
||||
CreatedByUser User `json:"createdByUser"`
|
||||
UpdatedByUser User `json:"updatedByUser"`
|
||||
|
||||
types.StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
@@ -1,26 +1,30 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
const SSO = "SSO"
|
||||
const Basic = "BASIC_PLAN"
|
||||
const Pro = "PRO_PLAN"
|
||||
const Enterprise = "ENTERPRISE_PLAN"
|
||||
|
||||
var (
|
||||
PlanNameEnterprise = "ENTERPRISE"
|
||||
PlanNameTeams = "TEAMS"
|
||||
PlanNameBasic = "BASIC"
|
||||
)
|
||||
|
||||
var (
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameTeams: Pro, PlanNameEnterprise: Enterprise}
|
||||
)
|
||||
|
||||
var (
|
||||
LicenseStatusInvalid = "INVALID"
|
||||
)
|
||||
|
||||
const DisableUpsell = "DISABLE_UPSELL"
|
||||
const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
const Gateway = "GATEWAY"
|
||||
@@ -34,6 +38,90 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: DisableUpsell,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -63,12 +151,134 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var ProPlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
Name: SSO,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Gateway,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: PremiumSupport,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var EnterprisePlan = basemodel.FeatureSet{
|
||||
@@ -79,6 +289,83 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -122,8 +409,8 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package smart
|
||||
package model
|
||||
|
||||
type SpanForTraceDetails struct {
|
||||
TimeUnixNano uint64 `json:"timestamp"`
|
||||
@@ -15,3 +15,8 @@ type SpanForTraceDetails struct {
|
||||
HasError bool `json:"hasError"`
|
||||
Children []*SpanForTraceDetails `json:"children"`
|
||||
}
|
||||
|
||||
type GetSpansSubQueryDBResponse struct {
|
||||
SpanID string `ch:"spanID"`
|
||||
TraceID string `ch:"traceID"`
|
||||
}
|
||||
@@ -53,6 +53,7 @@ type AnomalyRule struct {
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
p *baserules.PostableRule,
|
||||
featureFlags interfaces.FeatureLookup,
|
||||
reader interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
opts ...baserules.RuleOption,
|
||||
@@ -88,9 +89,10 @@ func NewAnomalyRule(
|
||||
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FeatureLookup: featureFlags,
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
@@ -100,18 +102,21 @@ func NewAnomalyRule(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
|
||||
)
|
||||
}
|
||||
return &t, nil
|
||||
|
||||
@@ -23,6 +23,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
@@ -47,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Rule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
|
||||
@@ -65,6 +66,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
@@ -121,6 +123,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
rule, err = baserules.NewThresholdRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
@@ -142,7 +145,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
@@ -157,6 +160,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
rule, err = NewAnomalyRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithSendAlways(),
|
||||
|
||||
@@ -1,370 +0,0 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
Identity = "id"
|
||||
Integer = "bigint"
|
||||
Text = "text"
|
||||
)
|
||||
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
CloudIntegration = "cloud_integration"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
)
|
||||
|
||||
type dialect struct {
|
||||
}
|
||||
|
||||
func (dialect *dialect) MigrateIntToTimestamp(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// bigint for postgres and INTEGER for sqlite
|
||||
if columnType != "bigint" {
|
||||
return nil
|
||||
}
|
||||
|
||||
// if the columns is integer then do this
|
||||
if _, err := bun.
|
||||
ExecContext(ctx, `ALTER TABLE `+table+` RENAME COLUMN `+column+` TO `+column+`_old`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// add new timestamp column
|
||||
if _, err := bun.
|
||||
NewAddColumn().
|
||||
Table(table).
|
||||
ColumnExpr(column + " TIMESTAMP").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := bun.
|
||||
NewUpdate().
|
||||
Table(table).
|
||||
Set(column + " = to_timestamp(cast(" + column + "_old as INTEGER))").
|
||||
Where("1=1").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// drop old column
|
||||
if _, err := bun.
|
||||
NewDropColumn().
|
||||
Table(table).
|
||||
Column(column + "_old").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) MigrateIntToBoolean(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if columnType != "bigint" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, err := bun.
|
||||
ExecContext(ctx, `ALTER TABLE `+table+` RENAME COLUMN `+column+` TO `+column+`_old`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// add new boolean column
|
||||
if _, err := bun.
|
||||
NewAddColumn().
|
||||
Table(table).
|
||||
ColumnExpr(column + " BOOLEAN").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// copy data from old column to new column, converting from int to boolean
|
||||
if _, err := bun.NewUpdate().
|
||||
Table(table).
|
||||
Set(column + " = CASE WHEN " + column + "_old = 1 THEN true ELSE false END").
|
||||
Where("1=1").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// drop old column
|
||||
if _, err := bun.NewDropColumn().Table(table).Column(column + "_old").Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) GetColumnType(ctx context.Context, bun bun.IDB, table string, column string) (string, error) {
|
||||
var columnType string
|
||||
|
||||
err := bun.NewSelect().
|
||||
ColumnExpr("data_type").
|
||||
TableExpr("information_schema.columns").
|
||||
Where("table_name = ?", table).
|
||||
Where("column_name = ?", column).
|
||||
Scan(ctx, &columnType)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return columnType, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) ColumnExists(ctx context.Context, bun bun.IDB, table string, column string) (bool, error) {
|
||||
var count int
|
||||
err := bun.NewSelect().
|
||||
ColumnExpr("COUNT(*)").
|
||||
TableExpr("information_schema.columns").
|
||||
Where("table_name = ?", table).
|
||||
Where("column_name = ?", column).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table string, oldColumnName string, newColumnName string) (bool, error) {
|
||||
oldColumnExists, err := dialect.ColumnExists(ctx, bun, table, oldColumnName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
newColumnExists, err := dialect.ColumnExists(ctx, bun, table, newColumnName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if !oldColumnExists && newColumnExists {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, "ALTER TABLE "+table+" RENAME COLUMN "+oldColumnName+" TO "+newColumnName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table interface{}) (bool, error) {
|
||||
|
||||
count := 0
|
||||
err := bun.
|
||||
NewSelect().
|
||||
ColumnExpr("count(*)").
|
||||
Table("pg_catalog.pg_tables").
|
||||
Where("tablename = ?", bun.Dialect().Tables().Get(reflect.TypeOf(table)).Name).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if count == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, references []string, cb func(context.Context) error) error {
|
||||
if len(references) == 0 {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
var fkReferences []string
|
||||
for _, reference := range references {
|
||||
if reference == Org && !slices.Contains(fkReferences, OrgReference) {
|
||||
fkReferences = append(fkReferences, OrgReference)
|
||||
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||
fkReferences = append(fkReferences, UserReference)
|
||||
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||
}
|
||||
}
|
||||
|
||||
createTable := bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel)
|
||||
|
||||
for _, fk := range fkReferences {
|
||||
createTable = createTable.ForeignKey(fk)
|
||||
}
|
||||
|
||||
_, err = createTable.Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddNotNullDefaultToColumn(ctx context.Context, bun bun.IDB, table string, column, columnType, defaultValue string) error {
|
||||
query := fmt.Sprintf("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT %s, ALTER COLUMN %s SET NOT NULL", table, column, defaultValue, column)
|
||||
if _, err := bun.ExecContext(ctx, query); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if columnType == Text {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
identityExists, err := dialect.ColumnExists(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if identityExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type GettablePAT struct {
|
||||
CreatedByUser PatUser `json:"createdByUser"`
|
||||
UpdatedByUser PatUser `json:"updatedByUser"`
|
||||
|
||||
StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
type PatUser struct {
|
||||
types.User
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
|
||||
return GettablePAT{
|
||||
StorablePersonalAccessToken: NewStorablePersonalAccessToken(name, role, userID, expiresAt),
|
||||
}
|
||||
}
|
||||
|
||||
type StorablePersonalAccessToken struct {
|
||||
bun.BaseModel `bun:"table:personal_access_token"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
|
||||
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
|
||||
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
|
||||
Token string `json:"token" bun:"token,type:text,notnull,unique"`
|
||||
Name string `json:"name" bun:"name,type:text,notnull"`
|
||||
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
|
||||
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
|
||||
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
|
||||
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
|
||||
}
|
||||
|
||||
func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64) StorablePersonalAccessToken {
|
||||
now := time.Now()
|
||||
if expiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
expiresAt = now.Unix() + (expiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
|
||||
return StorablePersonalAccessToken{
|
||||
Token: encodedToken,
|
||||
Name: name,
|
||||
Role: role,
|
||||
UserID: userID,
|
||||
ExpiresAt: expiresAt,
|
||||
LastUsed: 0,
|
||||
Revoked: false,
|
||||
UpdatedByUserID: "",
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
},
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -90,7 +90,7 @@
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "0.427.0",
|
||||
"lucide-react": "0.379.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"motion": "12.4.13",
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
|
||||
@@ -18,13 +18,6 @@
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
|
||||
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
|
||||
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
|
||||
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
|
||||
"tooltip_email_to": "Enter email addresses separated by commas.",
|
||||
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
|
||||
|
||||
"field_slack_recipient": "Recipient",
|
||||
"field_slack_title": "Title",
|
||||
"field_slack_description": "Description",
|
||||
|
||||
@@ -18,12 +18,6 @@
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
|
||||
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
|
||||
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
|
||||
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
|
||||
"tooltip_email_to": "Enter email addresses separated by commas.",
|
||||
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
|
||||
"field_slack_recipient": "Recipient",
|
||||
"field_slack_title": "Title",
|
||||
"field_slack_description": "Description",
|
||||
|
||||
@@ -60,14 +60,10 @@
|
||||
"INTEGRATIONS": "SigNoz | Integrations",
|
||||
"ALERT_HISTORY": "SigNoz | Alert Rule History",
|
||||
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
|
||||
"MESSAGING_QUEUES_OVERVIEW": "SigNoz | Messaging Queues",
|
||||
"MESSAGING_QUEUES_KAFKA": "SigNoz | Messaging Queues | Kafka",
|
||||
"MESSAGING_QUEUES_KAFKA_DETAIL": "SigNoz | Messaging Queues | Kafka",
|
||||
"MESSAGING_QUEUES_CELERY_TASK": "SigNoz | Messaging Queues | Celery",
|
||||
"MESSAGING_QUEUES": "SigNoz | Messaging Queues",
|
||||
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
||||
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | API Monitoring"
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer"
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { ConfigProvider } from 'antd';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
@@ -16,7 +15,6 @@ import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import history from 'lib/history';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import posthog from 'posthog-js';
|
||||
import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
@@ -28,7 +26,6 @@ import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
import { Home } from './pageComponents';
|
||||
import PrivateRoute from './Private';
|
||||
import defaultRoutes, {
|
||||
AppRoutes,
|
||||
@@ -48,6 +45,7 @@ function App(): JSX.Element {
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicenseV3,
|
||||
userFetchError,
|
||||
licensesFetchError,
|
||||
featureFlagsFetchError,
|
||||
isLoggedIn: isLoggedInState,
|
||||
featureFlags,
|
||||
@@ -57,7 +55,10 @@ function App(): JSX.Element {
|
||||
|
||||
const { hostname, pathname } = window.location;
|
||||
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
const {
|
||||
isCloudUser: isCloudUserVal,
|
||||
isEECloudUser: isEECloudUserVal,
|
||||
} = useGetTenantLicense();
|
||||
|
||||
const enableAnalytics = useCallback(
|
||||
(user: IUser): void => {
|
||||
@@ -167,7 +168,7 @@ function App(): JSX.Element {
|
||||
|
||||
let updatedRoutes = defaultRoutes;
|
||||
// if the user is a cloud user
|
||||
if (isCloudUser || isEnterpriseSelfHostedUser) {
|
||||
if (isCloudUserVal || isEECloudUserVal) {
|
||||
// if the user is on basic plan then remove billing
|
||||
if (isOnBasicPlan) {
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
@@ -189,10 +190,10 @@ function App(): JSX.Element {
|
||||
isLoggedInState,
|
||||
user,
|
||||
licenses,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isCloudUserVal,
|
||||
isFetchingLicenses,
|
||||
isFetchingUser,
|
||||
isEECloudUserVal,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -207,7 +208,6 @@ function App(): JSX.Element {
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
// feature flag shouldn't be loading and featureFlags or fetchError any one of this should be true indicating that req is complete
|
||||
// licenses should also be present. there is no check for licenses for loading and error as that is mandatory if not present then routing
|
||||
@@ -233,12 +233,7 @@ function App(): JSX.Element {
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled && !trialInfo?.trialConvertedToSubscription;
|
||||
|
||||
if (
|
||||
isLoggedInState &&
|
||||
isChatSupportEnabled &&
|
||||
!showAddCreditCardModal &&
|
||||
(isCloudUser || isEnterpriseSelfHostedUser)
|
||||
) {
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
@@ -257,53 +252,13 @@ function App(): JSX.Element {
|
||||
licenses,
|
||||
activeLicenseV3,
|
||||
trialInfo,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingUser && isCloudUser && user && user.email) {
|
||||
if (!isFetchingUser && isCloudUserVal && user && user.email) {
|
||||
enableAnalytics(user);
|
||||
}
|
||||
}, [user, isFetchingUser, isCloudUser, enableAnalytics]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isCloudUser || isEnterpriseSelfHostedUser) {
|
||||
if (process.env.POSTHOG_KEY) {
|
||||
posthog.init(process.env.POSTHOG_KEY, {
|
||||
api_host: 'https://us.i.posthog.com',
|
||||
person_profiles: 'identified_only', // or 'always' to create profiles for anonymous users as well
|
||||
});
|
||||
}
|
||||
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
environment: 'production',
|
||||
integrations: [
|
||||
Sentry.browserTracingIntegration(),
|
||||
Sentry.replayIntegration({
|
||||
maskAllText: false,
|
||||
blockAllMedia: false,
|
||||
}),
|
||||
],
|
||||
// Performance Monitoring
|
||||
tracesSampleRate: 1.0, // Capture 100% of the transactions
|
||||
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
|
||||
tracePropagationTargets: [],
|
||||
// Session Replay
|
||||
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
||||
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
||||
});
|
||||
} else {
|
||||
posthog.reset();
|
||||
Sentry.close();
|
||||
|
||||
if (window.cioanalytics && typeof window.cioanalytics.reset === 'function') {
|
||||
window.cioanalytics.reset();
|
||||
}
|
||||
}
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
}, [user, isFetchingUser, isCloudUserVal, enableAnalytics]);
|
||||
|
||||
// if the user is in logged in state
|
||||
if (isLoggedInState) {
|
||||
@@ -315,55 +270,60 @@ function App(): JSX.Element {
|
||||
// if the required calls fails then return a something went wrong error
|
||||
// this needs to be on top of data missing error because if there is an error, data will never be loaded and it will
|
||||
// move to indefinitive loading
|
||||
if (userFetchError && pathname !== ROUTES.SOMETHING_WENT_WRONG) {
|
||||
if (
|
||||
(userFetchError || licensesFetchError) &&
|
||||
pathname !== ROUTES.SOMETHING_WENT_WRONG
|
||||
) {
|
||||
history.replace(ROUTES.SOMETHING_WENT_WRONG);
|
||||
}
|
||||
|
||||
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
|
||||
if ((!licenses || !user.email || !featureFlags) && !userFetchError) {
|
||||
if (
|
||||
(!licenses || !user.email || !featureFlags) &&
|
||||
!userFetchError &&
|
||||
!licensesFetchError
|
||||
) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
</Sentry.ErrorBoundary>
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -295,7 +295,3 @@ export const MetricsExplorer = Loadable(
|
||||
() =>
|
||||
import(/* webpackChunkName: "MetricsExplorer" */ 'pages/MetricsExplorer'),
|
||||
);
|
||||
|
||||
export const ApiMonitoring = Loadable(
|
||||
() => import(/* webpackChunkName: "ApiMonitoring" */ 'pages/ApiMonitoring'),
|
||||
);
|
||||
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
AllAlertChannels,
|
||||
AllErrors,
|
||||
APIKeys,
|
||||
ApiMonitoring,
|
||||
BillingPage,
|
||||
CreateAlertChannelAlerts,
|
||||
CreateNewAlerts,
|
||||
@@ -498,13 +497,6 @@ const routes: AppRoutes[] = [
|
||||
key: 'METRICS_EXPLORER_VIEWS',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.API_MONITORING,
|
||||
exact: true,
|
||||
component: ApiMonitoring,
|
||||
key: 'API_MONITORING',
|
||||
isPrivate: true,
|
||||
},
|
||||
];
|
||||
|
||||
export const SUPPORT_ROUTE: AppRoutes = {
|
||||
|
||||
@@ -11,12 +11,9 @@ const logEvent = async (
|
||||
rateLimited?: boolean,
|
||||
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
// add tenant_url to attributes
|
||||
const { hostname } = window.location;
|
||||
const updatedAttributes = { ...attributes, tenant_url: hostname };
|
||||
const response = await axios.post('/event', {
|
||||
eventName,
|
||||
attributes: updatedAttributes,
|
||||
attributes,
|
||||
eventType: eventType || 'track',
|
||||
rateLimited: rateLimited || false, // TODO: Update this once we have a proper way to handle rate limiting
|
||||
});
|
||||
|
||||
@@ -521,7 +521,7 @@ export default function CeleryOverviewTable({
|
||||
locale={{
|
||||
emptyText: isLoading ? null : <Typography.Text>No data</Typography.Text>,
|
||||
}}
|
||||
scroll={{ x: 'max-content' }}
|
||||
scroll={{ x: true }}
|
||||
showSorterTooltip
|
||||
onDragColumn={handleDragColumn}
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
|
||||
@@ -199,12 +199,12 @@ function ExplorerCard({
|
||||
value={viewName || undefined}
|
||||
>
|
||||
{viewsData?.data.data.map((view) => (
|
||||
<Select.Option key={view.id} value={view.name}>
|
||||
<Select.Option key={view.uuid} value={view.name}>
|
||||
<MenuItemGenerator
|
||||
viewName={view.name}
|
||||
viewKey={viewKey}
|
||||
createdBy={view.createdBy}
|
||||
uuid={view.id}
|
||||
uuid={view.uuid}
|
||||
refetchAllView={refetchAllView}
|
||||
viewData={viewsData.data.data}
|
||||
sourcePage={sourcepage}
|
||||
|
||||
@@ -53,12 +53,17 @@ function MenuItemGenerator({
|
||||
({ key }: { key: string }): void => {
|
||||
const currentViewDetails = getViewDetailsUsingViewKey(key, viewData);
|
||||
if (!currentViewDetails) return;
|
||||
const { query, name, id, panelType: currentPanelType } = currentViewDetails;
|
||||
const {
|
||||
query,
|
||||
name,
|
||||
uuid,
|
||||
panelType: currentPanelType,
|
||||
} = currentViewDetails;
|
||||
|
||||
handleExplorerTabChange(currentPanelType, {
|
||||
query,
|
||||
name,
|
||||
id,
|
||||
uuid,
|
||||
});
|
||||
},
|
||||
[viewData, handleExplorerTabChange],
|
||||
|
||||
@@ -4,7 +4,7 @@ import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
export const viewMockData: ViewProps[] = [
|
||||
{
|
||||
id: 'view1',
|
||||
uuid: 'view1',
|
||||
name: 'View 1',
|
||||
createdBy: 'User 1',
|
||||
category: 'category 1',
|
||||
@@ -17,7 +17,7 @@ export const viewMockData: ViewProps[] = [
|
||||
updatedBy: 'User 1',
|
||||
},
|
||||
{
|
||||
id: 'view2',
|
||||
uuid: 'view2',
|
||||
name: 'View 2',
|
||||
createdBy: 'User 2',
|
||||
category: 'category 2',
|
||||
|
||||
@@ -25,9 +25,9 @@ describe('MenuItemGenerator', () => {
|
||||
<MockQueryClientProvider>
|
||||
<MenuItemGenerator
|
||||
viewName={viewMockData[0].name}
|
||||
viewKey={viewMockData[0].id}
|
||||
viewKey={viewMockData[0].uuid}
|
||||
createdBy={viewMockData[0].createdBy}
|
||||
uuid={viewMockData[0].id}
|
||||
uuid={viewMockData[0].uuid}
|
||||
refetchAllView={jest.fn()}
|
||||
viewData={viewMockData}
|
||||
sourcePage={DataSource.TRACES}
|
||||
@@ -43,9 +43,9 @@ describe('MenuItemGenerator', () => {
|
||||
<MockQueryClientProvider>
|
||||
<MenuItemGenerator
|
||||
viewName={viewMockData[0].name}
|
||||
viewKey={viewMockData[0].id}
|
||||
viewKey={viewMockData[0].uuid}
|
||||
createdBy={viewMockData[0].createdBy}
|
||||
uuid={viewMockData[0].id}
|
||||
uuid={viewMockData[0].uuid}
|
||||
refetchAllView={jest.fn()}
|
||||
viewData={viewMockData}
|
||||
sourcePage={DataSource.TRACES}
|
||||
|
||||
@@ -26,7 +26,7 @@ export type GetViewDetailsUsingViewKey = (
|
||||
| {
|
||||
query: Query;
|
||||
name: string;
|
||||
id: string;
|
||||
uuid: string;
|
||||
panelType: PANEL_TYPES;
|
||||
extraData?: string;
|
||||
}
|
||||
|
||||
@@ -27,11 +27,11 @@ export const getViewDetailsUsingViewKey: GetViewDetailsUsingViewKey = (
|
||||
viewKey,
|
||||
data,
|
||||
) => {
|
||||
const selectedView = data?.find((view) => view.id === viewKey);
|
||||
const selectedView = data?.find((view) => view.uuid === viewKey);
|
||||
if (selectedView) {
|
||||
const { compositeQuery, name, id, extraData } = selectedView;
|
||||
const { compositeQuery, name, uuid, extraData } = selectedView;
|
||||
const query = mapQueryDataFromApi(compositeQuery);
|
||||
return { query, name, id, panelType: compositeQuery.panelType, extraData };
|
||||
return { query, name, uuid, panelType: compositeQuery.panelType, extraData };
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
@@ -18,7 +18,6 @@ function CopyClipboardHOC({
|
||||
|
||||
notifications.success({
|
||||
message: notificationMessage,
|
||||
key: notificationMessage,
|
||||
});
|
||||
}
|
||||
}, [value, notifications, entityKey]);
|
||||
|
||||
@@ -63,31 +63,30 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<div className="quick-filters">
|
||||
{source !== QuickFiltersSource.INFRA_MONITORING &&
|
||||
source !== QuickFiltersSource.API_MONITORING && (
|
||||
<section className="header">
|
||||
<section className="left-actions">
|
||||
<FilterOutlined />
|
||||
<Typography.Text className="text">Filters for</Typography.Text>
|
||||
<Tooltip title={`Filter currently in sync with query ${lastQueryName}`}>
|
||||
<Typography.Text className="sync-tag">{lastQueryName}</Typography.Text>
|
||||
</Tooltip>
|
||||
</section>
|
||||
|
||||
<section className="right-actions">
|
||||
<Tooltip title="Reset All">
|
||||
<SyncOutlined className="sync-icon" onClick={handleReset} />
|
||||
</Tooltip>
|
||||
<div className="divider-filter" />
|
||||
<Tooltip title="Collapse Filters">
|
||||
<VerticalAlignTopOutlined
|
||||
rotate={270}
|
||||
onClick={handleFilterVisibilityChange}
|
||||
/>
|
||||
</Tooltip>
|
||||
</section>
|
||||
{source !== QuickFiltersSource.INFRA_MONITORING && (
|
||||
<section className="header">
|
||||
<section className="left-actions">
|
||||
<FilterOutlined />
|
||||
<Typography.Text className="text">Filters for</Typography.Text>
|
||||
<Tooltip title={`Filter currently in sync with query ${lastQueryName}`}>
|
||||
<Typography.Text className="sync-tag">{lastQueryName}</Typography.Text>
|
||||
</Tooltip>
|
||||
</section>
|
||||
)}
|
||||
|
||||
<section className="right-actions">
|
||||
<Tooltip title="Reset All">
|
||||
<SyncOutlined className="sync-icon" onClick={handleReset} />
|
||||
</Tooltip>
|
||||
<div className="divider-filter" />
|
||||
<Tooltip title="Collapse Filters">
|
||||
<VerticalAlignTopOutlined
|
||||
rotate={270}
|
||||
onClick={handleFilterVisibilityChange}
|
||||
/>
|
||||
</Tooltip>
|
||||
</section>
|
||||
</section>
|
||||
)}
|
||||
|
||||
<section className="filters">
|
||||
{config.map((filter) => {
|
||||
|
||||
@@ -39,5 +39,4 @@ export enum QuickFiltersSource {
|
||||
LOGS_EXPLORER = 'logs-explorer',
|
||||
INFRA_MONITORING = 'infra-monitoring',
|
||||
TRACES_EXPLORER = 'traces-explorer',
|
||||
API_MONITORING = 'api-monitoring',
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import './ResizeTable.styles.scss';
|
||||
|
||||
import { SyntheticEvent, useMemo } from 'react';
|
||||
import { Resizable, ResizeCallbackData } from 'react-resizable';
|
||||
|
||||
@@ -12,8 +10,8 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
const handle = useMemo(
|
||||
() => (
|
||||
<SpanStyle
|
||||
className="react-resizable-handle"
|
||||
onClick={(e): void => e.stopPropagation()}
|
||||
className="resize-handle"
|
||||
/>
|
||||
),
|
||||
[],
|
||||
@@ -21,7 +19,7 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
|
||||
if (!width) {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
return <th {...restProps} className="resizable-header" />;
|
||||
return <th {...restProps} />;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -31,10 +29,9 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
handle={handle}
|
||||
onResize={onResize}
|
||||
draggableOpts={enableUserSelectHack}
|
||||
minConstraints={[150, 0]}
|
||||
>
|
||||
{/* eslint-disable-next-line react/jsx-props-no-spreading */}
|
||||
<th {...restProps} className="resizable-header" />
|
||||
<th {...restProps} />
|
||||
</Resizable>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
.resizable-header {
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
position: relative;
|
||||
|
||||
.ant-table-column-title {
|
||||
white-space: normal;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
}
|
||||
|
||||
.resize-main-table {
|
||||
.ant-table-body {
|
||||
.ant-table-tbody {
|
||||
.ant-table-row {
|
||||
.ant-table-cell {
|
||||
.ant-typography {
|
||||
white-space: unset;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.logs-table,
|
||||
.traces-table {
|
||||
.resize-table {
|
||||
.resize-handle {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
inset-inline-end: -5px;
|
||||
width: 10px;
|
||||
cursor: col-resize;
|
||||
|
||||
&::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
width: 1px;
|
||||
height: 1.6em;
|
||||
background-color: var(--bg-slate-200);
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,63 +2,35 @@
|
||||
|
||||
import { Table } from 'antd';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import cx from 'classnames';
|
||||
import { dragColumnParams } from 'hooks/useDragColumns/configs';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { debounce, set } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { set } from 'lodash-es';
|
||||
import {
|
||||
SyntheticEvent,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import ReactDragListView from 'react-drag-listview';
|
||||
import { ResizeCallbackData } from 'react-resizable';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
import ResizableHeader from './ResizableHeader';
|
||||
import { DragSpanStyle } from './styles';
|
||||
import { ResizeTableProps } from './types';
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function ResizeTable({
|
||||
columns,
|
||||
onDragColumn,
|
||||
pagination,
|
||||
widgetId,
|
||||
shouldPersistColumnWidths = false,
|
||||
...restProps
|
||||
}: ResizeTableProps): JSX.Element {
|
||||
const [columnsData, setColumns] = useState<ColumnsType>([]);
|
||||
const { setColumnWidths, selectedDashboard } = useDashboard();
|
||||
|
||||
const columnWidths = shouldPersistColumnWidths
|
||||
? (selectedDashboard?.data?.widgets?.find(
|
||||
(widget) => widget.id === widgetId,
|
||||
) as Widgets)?.columnWidths
|
||||
: undefined;
|
||||
|
||||
const updateAllColumnWidths = useRef(
|
||||
debounce((widthsConfig: Record<string, number>) => {
|
||||
if (!widgetId || !shouldPersistColumnWidths) return;
|
||||
setColumnWidths?.((prev) => ({
|
||||
...prev,
|
||||
[widgetId]: widthsConfig,
|
||||
}));
|
||||
}, 1000),
|
||||
).current;
|
||||
|
||||
const handleResize = useCallback(
|
||||
(index: number) => (
|
||||
e: SyntheticEvent<Element>,
|
||||
_e: SyntheticEvent<Element>,
|
||||
{ size }: ResizeCallbackData,
|
||||
): void => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
const newColumns = [...columnsData];
|
||||
newColumns[index] = {
|
||||
...newColumns[index],
|
||||
@@ -93,7 +65,6 @@ function ResizeTable({
|
||||
...restProps,
|
||||
components: { header: { cell: ResizableHeader } },
|
||||
columns: mergedColumns,
|
||||
className: cx('resize-main-table', restProps.className),
|
||||
};
|
||||
|
||||
set(
|
||||
@@ -107,39 +78,9 @@ function ResizeTable({
|
||||
|
||||
useEffect(() => {
|
||||
if (columns) {
|
||||
// Apply stored column widths from widget configuration
|
||||
const columnsWithStoredWidths = columns.map((col) => {
|
||||
const dataIndex = (col as RowData).dataIndex as string;
|
||||
if (dataIndex && columnWidths && columnWidths[dataIndex]) {
|
||||
return {
|
||||
...col,
|
||||
width: columnWidths[dataIndex], // Apply stored width
|
||||
};
|
||||
}
|
||||
return col;
|
||||
});
|
||||
|
||||
setColumns(columnsWithStoredWidths);
|
||||
setColumns(columns);
|
||||
}
|
||||
}, [columns, columnWidths]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!shouldPersistColumnWidths) return;
|
||||
// Collect all column widths in a single object
|
||||
const newColumnWidths: Record<string, number> = {};
|
||||
|
||||
mergedColumns.forEach((col) => {
|
||||
if (col.width && (col as RowData).dataIndex) {
|
||||
const dataIndex = (col as RowData).dataIndex as string;
|
||||
newColumnWidths[dataIndex] = col.width as number;
|
||||
}
|
||||
});
|
||||
|
||||
// Only update if there are actual widths to set
|
||||
if (Object.keys(newColumnWidths).length > 0) {
|
||||
updateAllColumnWidths(newColumnWidths);
|
||||
}
|
||||
}, [mergedColumns, updateAllColumnWidths, shouldPersistColumnWidths]);
|
||||
}, [columns]);
|
||||
|
||||
return onDragColumn ? (
|
||||
<ReactDragListView.DragColumn {...dragColumnParams} onDragEnd={onDragColumn}>
|
||||
|
||||
@@ -8,8 +8,6 @@ export const SpanStyle = styled.span`
|
||||
width: 0.625rem;
|
||||
height: 100%;
|
||||
cursor: col-resize;
|
||||
margin-left: 4px;
|
||||
margin-right: 4px;
|
||||
`;
|
||||
|
||||
export const DragSpanStyle = styled.span`
|
||||
|
||||
@@ -9,8 +9,6 @@ import { TableDataSource } from './contants';
|
||||
|
||||
export interface ResizeTableProps extends TableProps<any> {
|
||||
onDragColumn?: (fromIndex: number, toIndex: number) => void;
|
||||
widgetId?: string;
|
||||
shouldPersistColumnWidths?: boolean;
|
||||
}
|
||||
export interface DynamicColumnTableProps extends TableProps<any> {
|
||||
tablesource: typeof TableDataSource[keyof typeof TableDataSource];
|
||||
|
||||
@@ -1,12 +1,28 @@
|
||||
// keep this consistent with backend constants.go
|
||||
export enum FeatureKeys {
|
||||
SSO = 'SSO',
|
||||
ENTERPRISE_PLAN = 'ENTERPRISE_PLAN',
|
||||
BASIC_PLAN = 'BASIC_PLAN',
|
||||
ALERT_CHANNEL_SLACK = 'ALERT_CHANNEL_SLACK',
|
||||
ALERT_CHANNEL_WEBHOOK = 'ALERT_CHANNEL_WEBHOOK',
|
||||
ALERT_CHANNEL_PAGERDUTY = 'ALERT_CHANNEL_PAGERDUTY',
|
||||
ALERT_CHANNEL_OPSGENIE = 'ALERT_CHANNEL_OPSGENIE',
|
||||
ALERT_CHANNEL_MSTEAMS = 'ALERT_CHANNEL_MSTEAMS',
|
||||
DurationSort = 'DurationSort',
|
||||
TimestampSort = 'TimestampSort',
|
||||
SMART_TRACE_DETAIL = 'SMART_TRACE_DETAIL',
|
||||
CUSTOM_METRICS_FUNCTION = 'CUSTOM_METRICS_FUNCTION',
|
||||
QUERY_BUILDER_PANELS = 'QUERY_BUILDER_PANELS',
|
||||
QUERY_BUILDER_ALERTS = 'QUERY_BUILDER_ALERTS',
|
||||
DISABLE_UPSELL = 'DISABLE_UPSELL',
|
||||
USE_SPAN_METRICS = 'USE_SPAN_METRICS',
|
||||
OSS = 'OSS',
|
||||
ONBOARDING = 'ONBOARDING',
|
||||
CHAT_SUPPORT = 'CHAT_SUPPORT',
|
||||
GATEWAY = 'GATEWAY',
|
||||
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
|
||||
QUERY_BUILDER_SEARCH_V2 = 'QUERY_BUILDER_SEARCH_V2',
|
||||
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
||||
AWS_INTEGRATION = 'AWS_INTEGRATION',
|
||||
ONBOARDING_V3 = 'ONBOARDING_V3',
|
||||
TRACE_FUNNELS = 'TRACE_FUNNELS',
|
||||
}
|
||||
|
||||
@@ -51,21 +51,6 @@ export const REACT_QUERY_KEY = {
|
||||
GET_METRICS_LIST_FILTER_VALUES: 'GET_METRICS_LIST_FILTER_VALUES',
|
||||
GET_METRIC_DETAILS: 'GET_METRIC_DETAILS',
|
||||
GET_RELATED_METRICS: 'GET_RELATED_METRICS',
|
||||
|
||||
// API Monitoring Query Keys
|
||||
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',
|
||||
GET_ENDPOINTS_LIST_BY_DOMAIN: 'GET_ENDPOINTS_LIST_BY_DOMAIN',
|
||||
GET_NESTED_ENDPOINTS_LIST: 'GET_NESTED_ENDPOINTS_LIST',
|
||||
GET_ENDPOINT_METRICS_DATA: 'GET_ENDPOINT_METRICS_DATA',
|
||||
GET_ENDPOINT_STATUS_CODE_DATA: 'GET_ENDPOINT_STATUS_CODE_DATA',
|
||||
GET_ENDPOINT_RATE_OVER_TIME_DATA: 'GET_ENDPOINT_RATE_OVER_TIME_DATA',
|
||||
GET_ENDPOINT_LATENCY_OVER_TIME_DATA: 'GET_ENDPOINT_LATENCY_OVER_TIME_DATA',
|
||||
GET_ENDPOINT_DROPDOWN_DATA: 'GET_ENDPOINT_DROPDOWN_DATA',
|
||||
GET_ENDPOINT_DEPENDENT_SERVICES_DATA: 'GET_ENDPOINT_DEPENDENT_SERVICES_DATA',
|
||||
GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA:
|
||||
'GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA',
|
||||
GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA:
|
||||
'GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA',
|
||||
GET_FUNNELS_LIST: 'GET_FUNNELS_LIST',
|
||||
GET_FUNNEL_DETAILS: 'GET_FUNNEL_DETAILS',
|
||||
} as const;
|
||||
|
||||
@@ -71,7 +71,6 @@ const ROUTES = {
|
||||
METRICS_EXPLORER: '/metrics-explorer/summary',
|
||||
METRICS_EXPLORER_EXPLORER: '/metrics-explorer/explorer',
|
||||
METRICS_EXPLORER_VIEWS: '/metrics-explorer/views',
|
||||
API_MONITORING: '/api-monitoring/explorer',
|
||||
METRICS_EXPLORER_BASE: '/metrics-explorer',
|
||||
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',
|
||||
HOME_PAGE: '/',
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import AlertChannels from 'container/AllAlertChannels';
|
||||
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
|
||||
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
@@ -21,13 +20,6 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('Alert Channels Settings List page', () => {
|
||||
beforeEach(() => {
|
||||
render(<AlertChannels />);
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import AlertChannels from 'container/AllAlertChannels';
|
||||
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
|
||||
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
@@ -26,13 +25,6 @@ jest.mock('hooks/useComponentPermission', () => ({
|
||||
default: jest.fn().mockImplementation(() => [false]),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('Alert Channels Settings List page (Normal User)', () => {
|
||||
beforeEach(() => {
|
||||
render(<AlertChannels />);
|
||||
|
||||
@@ -31,10 +31,6 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||
}));
|
||||
|
||||
describe('Create Alert Channel', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
@@ -18,10 +18,6 @@ import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import { testLabelInputAndHelpValue } from './testUtils';
|
||||
|
||||
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||
}));
|
||||
|
||||
describe('Create Alert Channel (Normal User)', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
@@ -20,10 +20,6 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||
}));
|
||||
|
||||
describe('Should check if the edit alert channel is properly displayed ', () => {
|
||||
beforeEach(() => {
|
||||
render(<EditAlertChannels initialValue={editAlertChannelInitialValue} />);
|
||||
|
||||
@@ -1,241 +0,0 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Select, Spin, Table, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
EndPointsTableRowData,
|
||||
formatEndPointsDataForTable,
|
||||
getEndPointsColumnsConfig,
|
||||
getEndPointsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import ErrorState from './components/ErrorState';
|
||||
import ExpandedRow from './components/ExpandedRow';
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
|
||||
function AllEndPoints({
|
||||
domainName,
|
||||
setSelectedEndPointName,
|
||||
setSelectedView,
|
||||
groupBy,
|
||||
setGroupBy,
|
||||
}: {
|
||||
domainName: string;
|
||||
setSelectedEndPointName: (name: string) => void;
|
||||
setSelectedView: (tab: VIEWS) => void;
|
||||
groupBy: IBuilderQuery['groupBy'];
|
||||
setGroupBy: (groupBy: IBuilderQuery['groupBy']) => void;
|
||||
}): JSX.Element {
|
||||
const {
|
||||
data: groupByFiltersData,
|
||||
isLoading: isLoadingGroupByFilters,
|
||||
} = useGetAggregateKeys({
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateAttribute: '',
|
||||
aggregateOperator: 'noop',
|
||||
searchText: '',
|
||||
tagType: '',
|
||||
});
|
||||
|
||||
const [groupByOptions, setGroupByOptions] = useState<
|
||||
{ value: string; label: string }[]
|
||||
>([]);
|
||||
|
||||
const [expandedRowKeys, setExpandedRowKeys] = useState<React.Key[]>([]);
|
||||
|
||||
const handleGroupByChange = useCallback(
|
||||
(value: IBuilderQuery['groupBy']) => {
|
||||
const groupBy = [];
|
||||
|
||||
for (let index = 0; index < value.length; index++) {
|
||||
const element = (value[index] as unknown) as string;
|
||||
|
||||
const key = groupByFiltersData?.payload?.attributeKeys?.find(
|
||||
(key) => key.key === element,
|
||||
);
|
||||
|
||||
if (key) {
|
||||
groupBy.push(key);
|
||||
}
|
||||
}
|
||||
setGroupBy(groupBy);
|
||||
},
|
||||
[groupByFiltersData, setGroupBy],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (groupByFiltersData?.payload) {
|
||||
setGroupByOptions(
|
||||
groupByFiltersData?.payload?.attributeKeys?.map((filter) => ({
|
||||
value: filter.key,
|
||||
label: filter.key,
|
||||
})) || [],
|
||||
);
|
||||
}
|
||||
}, [groupByFiltersData]);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getEndPointsQueryPayload(
|
||||
groupBy,
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
),
|
||||
[groupBy, domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
// Since only one query here
|
||||
const endPointsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_ENDPOINTS_LIST_BY_DOMAIN,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
groupBy,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||
})),
|
||||
);
|
||||
|
||||
const endPointsDataQuery = endPointsDataQueries[0];
|
||||
const {
|
||||
data: allEndPointsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = endPointsDataQuery;
|
||||
|
||||
const endPointsColumnsConfig = useMemo(
|
||||
() => getEndPointsColumnsConfig(groupBy.length > 0, expandedRowKeys),
|
||||
[groupBy.length, expandedRowKeys],
|
||||
);
|
||||
|
||||
const expandedRowRender = (record: EndPointsTableRowData): JSX.Element => (
|
||||
<ExpandedRow
|
||||
domainName={domainName}
|
||||
selectedRowData={record}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
setSelectedView={setSelectedView}
|
||||
/>
|
||||
);
|
||||
|
||||
const handleGroupByRowClick = (record: EndPointsTableRowData): void => {
|
||||
if (expandedRowKeys.includes(record.key)) {
|
||||
setExpandedRowKeys(expandedRowKeys.filter((key) => key !== record.key));
|
||||
} else {
|
||||
setExpandedRowKeys((expandedRowKeys) => [...expandedRowKeys, record.key]);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRowClick = (record: EndPointsTableRowData): void => {
|
||||
if (groupBy.length === 0) {
|
||||
setSelectedEndPointName(record.endpointName); // this will open up the endpoint details tab
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
} else {
|
||||
handleGroupByRowClick(record); // this will prepare the nested query payload
|
||||
}
|
||||
};
|
||||
|
||||
const formattedEndPointsData = useMemo(
|
||||
() =>
|
||||
formatEndPointsDataForTable(
|
||||
allEndPointsData?.payload?.data?.result[0]?.table?.rows,
|
||||
groupBy,
|
||||
),
|
||||
[groupBy, allEndPointsData],
|
||||
);
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="all-endpoints-error-state-wrapper">
|
||||
<ErrorState refetch={refetch} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="all-endpoints-container">
|
||||
<div className="group-by-container">
|
||||
<div className="group-by-label"> Group by </div>
|
||||
<Select
|
||||
className="group-by-select"
|
||||
loading={isLoadingGroupByFilters}
|
||||
mode="multiple"
|
||||
value={groupBy}
|
||||
allowClear
|
||||
maxTagCount="responsive"
|
||||
placeholder="Search for attribute"
|
||||
options={groupByOptions}
|
||||
onChange={handleGroupByChange}
|
||||
/>{' '}
|
||||
</div>
|
||||
<div className="endpoints-table-container">
|
||||
<div className="endpoints-table-header">Endpoint overview</div>
|
||||
<Table
|
||||
columns={endPointsColumnsConfig}
|
||||
loading={{
|
||||
spinning: isLoading || isRefetching,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
dataSource={isLoading || isRefetching ? [] : formattedEndPointsData}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-filtered-endpoints-message-container">
|
||||
<div className="no-filtered-endpoints-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-filtered-endpoints-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => handleRowClick(record),
|
||||
className: 'clickable-row',
|
||||
})}
|
||||
expandable={{
|
||||
expandedRowRender: groupBy.length > 0 ? expandedRowRender : undefined,
|
||||
expandedRowKeys,
|
||||
expandIconColumnIndex: -1,
|
||||
}}
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default AllEndPoints;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,146 +0,0 @@
|
||||
import './DomainDetails.styles.scss';
|
||||
|
||||
import { Color, Spacing } from '@signozhq/design-tokens';
|
||||
import { Button, Divider, Drawer, Radio, Typography } from 'antd';
|
||||
import { RadioChangeEvent } from 'antd/lib';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { ArrowDown, ArrowUp, X } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import AllEndPoints from './AllEndPoints';
|
||||
import DomainMetrics from './components/DomainMetrics';
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
import EndPointDetailsWrapper from './EndPointDetailsWrapper';
|
||||
|
||||
function DomainDetails({
|
||||
domainData,
|
||||
handleClose,
|
||||
selectedDomainIndex,
|
||||
setSelectedDomainIndex,
|
||||
domainListLength,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainData: any;
|
||||
handleClose: () => void;
|
||||
selectedDomainIndex: number;
|
||||
setSelectedDomainIndex: (index: number) => void;
|
||||
domainListLength: number;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const [selectedView, setSelectedView] = useState<VIEWS>(VIEWS.ALL_ENDPOINTS);
|
||||
const [selectedEndPointName, setSelectedEndPointName] = useState<string>('');
|
||||
const [endPointsGroupBy, setEndPointsGroupBy] = useState<
|
||||
IBuilderQuery['groupBy']
|
||||
>([]);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const handleTabChange = (e: RadioChangeEvent): void => {
|
||||
setSelectedView(e.target.value);
|
||||
};
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
title={
|
||||
<div className="domain-details-drawer-header">
|
||||
<div className="domain-details-drawer-header-title">
|
||||
<Divider type="vertical" />
|
||||
<Typography.Text className="title">
|
||||
{domainData.domainName}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<Button.Group className="domain-details-drawer-header-ctas">
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex - 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowUp size={16} />}
|
||||
disabled={selectedDomainIndex === 0}
|
||||
title="Previous domain"
|
||||
/>
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex + 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowDown size={16} />}
|
||||
disabled={selectedDomainIndex === domainListLength - 1}
|
||||
title="Next domain"
|
||||
/>
|
||||
</Button.Group>
|
||||
</div>
|
||||
}
|
||||
placement="right"
|
||||
onClose={handleClose}
|
||||
open={!!domainData}
|
||||
style={{
|
||||
overscrollBehavior: 'contain',
|
||||
background: isDarkMode ? Color.BG_INK_400 : Color.BG_VANILLA_100,
|
||||
}}
|
||||
className="domain-detail-drawer"
|
||||
destroyOnClose
|
||||
closeIcon={<X size={16} style={{ marginTop: Spacing.MARGIN_1 }} />}
|
||||
>
|
||||
{domainData && (
|
||||
<>
|
||||
<DomainMetrics domainData={domainData} />
|
||||
<div className="views-tabs-container">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
onChange={handleTabChange}
|
||||
value={selectedView}
|
||||
>
|
||||
<Radio.Button
|
||||
className={
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
selectedView === VIEW_TYPES.ALL_ENDPOINTS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.ALL_ENDPOINTS}
|
||||
>
|
||||
<div className="view-title">All Endpoints</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.ENDPOINT_DETAILS
|
||||
? 'tab selected_view'
|
||||
: 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.ENDPOINT_DETAILS}
|
||||
>
|
||||
<div className="view-title">Endpoint Details</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
{selectedView === VIEW_TYPES.ALL_ENDPOINTS && (
|
||||
<AllEndPoints
|
||||
domainName={domainData.domainName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
setSelectedView={setSelectedView}
|
||||
groupBy={endPointsGroupBy}
|
||||
setGroupBy={setEndPointsGroupBy}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.ENDPOINT_DETAILS && (
|
||||
<EndPointDetailsWrapper
|
||||
domainName={domainData.domainName}
|
||||
endPointName={selectedEndPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
export default DomainDetails;
|
||||
@@ -1,199 +0,0 @@
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
extractPortAndEndpoint,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import DependentServices from './components/DependentServices';
|
||||
import EndPointMetrics from './components/EndPointMetrics';
|
||||
import EndPointsDropDown from './components/EndPointsDropDown';
|
||||
import MetricOverTimeGraph from './components/MetricOverTimeGraph';
|
||||
import StatusCodeBarCharts from './components/StatusCodeBarCharts';
|
||||
import StatusCodeTable from './components/StatusCodeTable';
|
||||
|
||||
function EndPointDetails({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const currentQuery = initialQueriesMap[DataSource.TRACES];
|
||||
|
||||
const [filters, setFilters] = useState<IBuilderQuery['filters']>({
|
||||
op: 'AND',
|
||||
items: [],
|
||||
});
|
||||
|
||||
// Manually update the query to include the filters
|
||||
// Because using the hook is causing the global domain
|
||||
// query to be updated and causing main domain list to
|
||||
// refetch with the filters of endpoints
|
||||
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
filters,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[filters, currentQuery],
|
||||
);
|
||||
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
const isServicesFilterApplied = useMemo(
|
||||
() => filters.items.some((item) => item.key?.key === 'service.name'),
|
||||
[filters],
|
||||
);
|
||||
|
||||
const endPointDetailsQueryPayload = useMemo(
|
||||
() =>
|
||||
getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
endPointName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
filters,
|
||||
),
|
||||
[domainName, endPointName, filters, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointDetailsDataQueries = useQueries(
|
||||
endPointDetailsQueryPayload.map((payload, index) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
filters.items,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const [
|
||||
endPointMetricsDataQuery,
|
||||
endPointStatusCodeDataQuery,
|
||||
endPointDropDownDataQuery,
|
||||
endPointDependentServicesDataQuery,
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
] = useMemo(
|
||||
() => [
|
||||
endPointDetailsDataQueries[0],
|
||||
endPointDetailsDataQueries[1],
|
||||
endPointDetailsDataQueries[2],
|
||||
endPointDetailsDataQueries[3],
|
||||
endPointDetailsDataQueries[4],
|
||||
endPointDetailsDataQueries[5],
|
||||
],
|
||||
[endPointDetailsDataQueries],
|
||||
);
|
||||
|
||||
const { endpoint, port } = useMemo(
|
||||
() => extractPortAndEndpoint(endPointName),
|
||||
[endPointName],
|
||||
);
|
||||
|
||||
const [rateOverTimeWidget, latencyOverTimeWidget] = useMemo(
|
||||
() => [
|
||||
getRateOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
getLatencyOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
],
|
||||
[domainName, endPointName, filters, domainListFilters],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="endpoint-details-container">
|
||||
<div className="endpoint-details-filters-container">
|
||||
<div className="endpoint-details-filters-container-dropdown">
|
||||
<EndPointsDropDown
|
||||
selectedEndPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".endpoint-details-filters-container"
|
||||
dropdownStyle={{ width: 'calc(100% - 36px)' }}
|
||||
/>
|
||||
</div>
|
||||
<div className="endpoint-details-filters-container-search">
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={(searchFilters): void => {
|
||||
setFilters(searchFilters);
|
||||
}}
|
||||
placeholder="Search for filters..."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data">
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Endpoint</div>
|
||||
<div className="endpoint-meta-data-value">{endpoint || '-'}</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Port</div>
|
||||
<div className="endpoint-meta-data-value">{port || '-'}</div>
|
||||
</div>
|
||||
</div>
|
||||
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
|
||||
{!isServicesFilterApplied && (
|
||||
<DependentServices
|
||||
dependentServicesQuery={endPointDependentServicesDataQuery}
|
||||
/>
|
||||
)}
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={endPointStatusCodeBarChartsDataQuery}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={
|
||||
endPointStatusCodeLatencyBarChartsDataQuery
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
filters={filters}
|
||||
/>
|
||||
<StatusCodeTable endPointStatusCodeDataQuery={endPointStatusCodeDataQuery} />
|
||||
<MetricOverTimeGraph widget={rateOverTimeWidget} />
|
||||
<MetricOverTimeGraph widget={latencyOverTimeWidget} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointDetails;
|
||||
@@ -1,80 +0,0 @@
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { getEndPointZeroStateQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import EndPointDetailsZeroState from './components/EndPointDetailsZeroState';
|
||||
import EndPointDetails from './EndPointDetails';
|
||||
|
||||
function EndPointDetailsWrapper({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const endPointZeroStateQueryPayload = useMemo(
|
||||
() =>
|
||||
getEndPointZeroStateQueryPayload(
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
),
|
||||
[domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointZeroStateDataQueries = useQueries(
|
||||
endPointZeroStateQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
// Since only one query here
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DROPDOWN_DATA,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const [endPointZeroStateDataQuery] = useMemo(
|
||||
() => [endPointZeroStateDataQueries[0]],
|
||||
[endPointZeroStateDataQueries],
|
||||
);
|
||||
|
||||
if (endPointName === '') {
|
||||
return (
|
||||
<EndPointDetailsZeroState
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointZeroStateDataQuery}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<EndPointDetails
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointDetailsWrapper;
|
||||
@@ -1,108 +0,0 @@
|
||||
import { Typography } from 'antd';
|
||||
import Skeleton from 'antd/lib/skeleton';
|
||||
import { getFormattedDependentServicesData } from 'container/ApiMonitoring/utils';
|
||||
import { UnfoldVertical } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
interface DependentServicesProps {
|
||||
dependentServicesQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
}
|
||||
|
||||
function DependentServices({
|
||||
dependentServicesQuery,
|
||||
}: DependentServicesProps): JSX.Element {
|
||||
const {
|
||||
data,
|
||||
refetch,
|
||||
isError,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
} = dependentServicesQuery;
|
||||
|
||||
const [currentRenderCount, setCurrentRenderCount] = useState(0);
|
||||
|
||||
const dependentServicesData = useMemo(() => {
|
||||
const formattedDependentServicesData = getFormattedDependentServicesData(
|
||||
data?.payload?.data?.result[0].table.rows,
|
||||
);
|
||||
setCurrentRenderCount(Math.min(formattedDependentServicesData.length, 5));
|
||||
return formattedDependentServicesData;
|
||||
}, [data]);
|
||||
|
||||
const renderItems = useMemo(
|
||||
() => dependentServicesData.slice(0, currentRenderCount),
|
||||
[currentRenderCount, dependentServicesData],
|
||||
);
|
||||
|
||||
if (isLoading || isRefetching) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
if (isError) {
|
||||
return <ErrorState refetch={refetch} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="top-services-content">
|
||||
<div className="top-services-title">
|
||||
<span className="title-wrapper">Dependent Services</span>
|
||||
</div>
|
||||
<div className="dependent-services-container">
|
||||
{renderItems.length === 0 ? (
|
||||
<div className="no-dependent-services-message-container">
|
||||
<div className="no-dependent-services-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-dependent-services-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
renderItems.map((item) => (
|
||||
<div className="top-services-item" key={item.key}>
|
||||
<div className="top-services-item-progress">
|
||||
<div className="top-services-item-key">{item.serviceName}</div>
|
||||
<div className="top-services-item-count">{item.count}</div>
|
||||
<div
|
||||
className="top-services-item-progress-bar"
|
||||
style={{ width: `${item.percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
<div className="top-services-item-percentage">
|
||||
{item.percentage.toFixed(2)}%
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
|
||||
{currentRenderCount < dependentServicesData.length && (
|
||||
<div
|
||||
className="top-services-load-more"
|
||||
onClick={(): void => setCurrentRenderCount(dependentServicesData.length)}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
setCurrentRenderCount(dependentServicesData.length);
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
<UnfoldVertical size={14} />
|
||||
Show more...
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DependentServices;
|
||||
@@ -1,82 +0,0 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Tooltip, Typography } from 'antd';
|
||||
import { getLastUsedRelativeTime } from 'container/ApiMonitoring/utils';
|
||||
|
||||
function DomainMetrics({ domainData }: { domainData: any }): JSX.Element {
|
||||
return (
|
||||
<div className="domain-detail-drawer__endpoint">
|
||||
<div className="domain-details-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
EXTERNAL API
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
AVERAGE LATENCY
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
ERROR RATE
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
LAST USED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
<div className="values-row">
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.endpointCount}>
|
||||
<span className="round-metric-tag">{domainData.endpointCount}</span>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.latency}>
|
||||
<span className="round-metric-tag">
|
||||
{(domainData.latency / 1000).toFixed(3)}s
|
||||
</span>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value error-rate">
|
||||
<Tooltip title={domainData.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number((domainData.errorRate * 100).toFixed(1))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
(domainData.errorRate * 100).toFixed(1),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.lastUsed}>
|
||||
{getLastUsedRelativeTime(domainData.lastUsed)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DomainMetrics;
|
||||
@@ -1,40 +0,0 @@
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import EndPointsDropDown from './EndPointsDropDown';
|
||||
|
||||
function EndPointDetailsZeroState({
|
||||
setSelectedEndPointName,
|
||||
endPointDropDownDataQuery,
|
||||
}: {
|
||||
setSelectedEndPointName: (endPointName: string) => void;
|
||||
endPointDropDownDataQuery: UseQueryResult<SuccessResponse<any>>;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<div className="end-point-details-zero-state-wrapper">
|
||||
<div className="end-point-details-zero-state-content">
|
||||
<img
|
||||
src="/Icons/no-data.svg"
|
||||
alt="no-data"
|
||||
width={32}
|
||||
height={32}
|
||||
className="end-point-details-zero-state-icon"
|
||||
/>
|
||||
<div className="end-point-details-zero-state-content-wrapper">
|
||||
<div className="end-point-details-zero-state-text-content">
|
||||
<div className="title">No endpoint selected yet</div>
|
||||
<div className="description">Select an endpoint to see the details</div>
|
||||
</div>
|
||||
<EndPointsDropDown
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".end-point-details-zero-state-wrapper"
|
||||
dropdownStyle={{ width: '60%' }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointDetailsZeroState;
|
||||
@@ -1,121 +0,0 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function EndPointMetrics({
|
||||
endPointMetricsDataQuery,
|
||||
}: {
|
||||
endPointMetricsDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
}): JSX.Element {
|
||||
const {
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
data,
|
||||
refetch,
|
||||
} = endPointMetricsDataQuery;
|
||||
|
||||
const metricsData = useMemo(() => {
|
||||
if (isLoading || isRefetching || isError) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return getFormattedEndPointMetricsData(
|
||||
data?.payload?.data?.result[0].table.rows,
|
||||
);
|
||||
}, [data?.payload?.data?.result, isLoading, isRefetching, isError]);
|
||||
|
||||
if (isError) {
|
||||
return <ErrorState refetch={refetch} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="domain-detail-drawer__endpoint">
|
||||
<div className="domain-details-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
Rate
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
AVERAGE LATENCY
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
ERROR RATE
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
LAST USED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
<div className="values-row">
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.rate}>
|
||||
<span className="round-metric-tag">{metricsData?.rate} ops/sec</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.latency}>
|
||||
<span className="round-metric-tag">{metricsData?.latency}ms</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="domain-details-metadata-value error-rate">
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.errorRate}>
|
||||
<Progress
|
||||
percent={Number((metricsData?.errorRate ?? 0 * 100).toFixed(1))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
(metricsData?.errorRate ?? 0 * 100).toFixed(1),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.lastUsed}>{metricsData?.lastUsed}</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointMetrics;
|
||||
@@ -1,61 +0,0 @@
|
||||
import { Select } from 'antd';
|
||||
import { getFormattedEndPointDropDownData } from 'container/ApiMonitoring/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
interface EndPointsDropDownProps {
|
||||
selectedEndPointName?: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
endPointDropDownDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
parentContainerDiv?: string;
|
||||
dropdownStyle?: React.CSSProperties;
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
selectedEndPointName: '',
|
||||
parentContainerDiv: '',
|
||||
dropdownStyle: {},
|
||||
};
|
||||
|
||||
function EndPointsDropDown({
|
||||
selectedEndPointName,
|
||||
setSelectedEndPointName,
|
||||
endPointDropDownDataQuery,
|
||||
parentContainerDiv,
|
||||
dropdownStyle,
|
||||
}: EndPointsDropDownProps): JSX.Element {
|
||||
const { data, isLoading, isFetching } = endPointDropDownDataQuery;
|
||||
|
||||
const handleChange = (value: string): void => {
|
||||
setSelectedEndPointName(value);
|
||||
};
|
||||
|
||||
const formattedData = useMemo(
|
||||
() =>
|
||||
getFormattedEndPointDropDownData(data?.payload.data.result[0].table.rows),
|
||||
[data?.payload.data.result],
|
||||
);
|
||||
|
||||
return (
|
||||
<Select
|
||||
value={selectedEndPointName || undefined}
|
||||
placeholder="Select endpoint"
|
||||
loading={isLoading || isFetching}
|
||||
style={{ width: '100%' }}
|
||||
onChange={handleChange}
|
||||
options={formattedData}
|
||||
getPopupContainer={
|
||||
parentContainerDiv
|
||||
? (): HTMLElement =>
|
||||
document.querySelector(parentContainerDiv) as HTMLElement
|
||||
: (triggerNode): HTMLElement => triggerNode.parentNode as HTMLElement
|
||||
}
|
||||
dropdownStyle={dropdownStyle}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
EndPointsDropDown.defaultProps = defaultProps;
|
||||
|
||||
export default EndPointsDropDown;
|
||||
@@ -1,31 +0,0 @@
|
||||
import { Button, Typography } from 'antd';
|
||||
import { RotateCw } from 'lucide-react';
|
||||
|
||||
function ErrorState({ refetch }: { refetch: () => void }): JSX.Element {
|
||||
return (
|
||||
<div className="error-state-container">
|
||||
<div className="error-state-content-wrapper">
|
||||
<div className="error-state-content">
|
||||
<div className="icon">
|
||||
<img src="/Icons/awwSnap.svg" alt="awwSnap" width={32} height={32} />
|
||||
</div>
|
||||
<div className="error-state-text">
|
||||
<Typography.Text>Uh-oh :/ We ran into an error.</Typography.Text>
|
||||
<Typography.Text type="secondary">
|
||||
Please refresh this panel.
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
<Button
|
||||
className="refresh-cta"
|
||||
onClick={(): void => refetch()}
|
||||
icon={<RotateCw size={16} />}
|
||||
>
|
||||
Refresh this panel
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ErrorState;
|
||||
@@ -1,129 +0,0 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table } from 'antd';
|
||||
import { ColumnType } from 'antd/lib/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
createFiltersForSelectedRowData,
|
||||
EndPointsTableRowData,
|
||||
formatEndPointsDataForTable,
|
||||
getEndPointsColumnsConfig,
|
||||
getEndPointsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import LoadingContainer from 'container/InfraMonitoringK8s/LoadingContainer';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { VIEW_TYPES, VIEWS } from '../constants';
|
||||
|
||||
function ExpandedRow({
|
||||
domainName,
|
||||
selectedRowData,
|
||||
setSelectedEndPointName,
|
||||
setSelectedView,
|
||||
}: {
|
||||
domainName: string;
|
||||
selectedRowData: EndPointsTableRowData;
|
||||
setSelectedEndPointName: (name: string) => void;
|
||||
setSelectedView: (view: VIEWS) => void;
|
||||
}): JSX.Element {
|
||||
const nestedColumns = useMemo(() => getEndPointsColumnsConfig(false, []), []);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const groupedByRowDataQueryPayload = useMemo(() => {
|
||||
if (!selectedRowData) return null;
|
||||
|
||||
const filters = createFiltersForSelectedRowData(selectedRowData);
|
||||
|
||||
const baseQueryPayload = getEndPointsQueryPayload(
|
||||
[],
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
);
|
||||
|
||||
return baseQueryPayload.map((currentQueryPayload) => ({
|
||||
...currentQueryPayload,
|
||||
query: {
|
||||
...currentQueryPayload.query,
|
||||
builder: {
|
||||
...currentQueryPayload.query.builder,
|
||||
queryData: currentQueryPayload.query.builder.queryData.map(
|
||||
(queryData) => ({
|
||||
...queryData,
|
||||
filters: {
|
||||
items: [...(queryData.filters?.items || []), ...filters.items],
|
||||
op: 'AND',
|
||||
},
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
}));
|
||||
}, [domainName, minTime, maxTime, selectedRowData]);
|
||||
|
||||
const groupedByRowQueries = useQueries(
|
||||
groupedByRowDataQueryPayload
|
||||
? groupedByRowDataQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
`${REACT_QUERY_KEY.GET_NESTED_ENDPOINTS_LIST}-${domainName}-${selectedRowData?.key}`,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
selectedRowData?.key,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload && !!selectedRowData,
|
||||
}))
|
||||
: [],
|
||||
);
|
||||
|
||||
const groupedByRowQuery = groupedByRowQueries[0];
|
||||
return (
|
||||
<div className="expanded-table-container">
|
||||
{groupedByRowQuery?.isFetching || groupedByRowQuery?.isLoading ? (
|
||||
<LoadingContainer />
|
||||
) : (
|
||||
<div className="expanded-table">
|
||||
<Table
|
||||
columns={nestedColumns as ColumnType<EndPointsTableRowData>[]}
|
||||
dataSource={
|
||||
groupedByRowQuery?.data
|
||||
? formatEndPointsDataForTable(
|
||||
groupedByRowQuery.data?.payload.data.result[0].table?.rows,
|
||||
[],
|
||||
)
|
||||
: []
|
||||
}
|
||||
pagination={false}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
showHeader={false}
|
||||
loading={{
|
||||
spinning: groupedByRowQuery?.isFetching || groupedByRowQuery?.isLoading,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => {
|
||||
setSelectedEndPointName(record.endpointName);
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
})}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ExpandedRow;
|
||||
@@ -1,22 +0,0 @@
|
||||
import { Card } from 'antd';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
function MetricOverTimeGraph({ widget }: { widget: Widgets }): JSX.Element {
|
||||
return (
|
||||
<div>
|
||||
<Card bordered className="endpoint-details-card">
|
||||
<div className="graph-container">
|
||||
<GridCard
|
||||
widget={widget}
|
||||
isQueryEnabled
|
||||
onDragSelect={(): void => {}}
|
||||
customOnDragSelect={(): void => {}}
|
||||
/>
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MetricOverTimeGraph;
|
||||
@@ -1,257 +0,0 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Card, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { useGetGraphCustomSeries } from 'components/CeleryTask/useGetGraphCustomSeries';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
getCustomFiltersForBarChart,
|
||||
getFormattedEndPointStatusCodeChartData,
|
||||
getStatusCodeBarChartWidgetData,
|
||||
statusCodeWidgetInfo,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { handleGraphClick } from 'container/GridCardLayout/GridCard/utils';
|
||||
import { useGraphClickToShowButton } from 'container/GridCardLayout/useGraphClickToShowButton';
|
||||
import useNavigateToExplorerPages from 'container/GridCardLayout/useNavigateToExplorerPages';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { Options } from 'uplot';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function StatusCodeBarCharts({
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
domainListFilters,
|
||||
filters,
|
||||
}: {
|
||||
endPointStatusCodeBarChartsDataQuery: UseQueryResult<
|
||||
SuccessResponse<any>,
|
||||
unknown
|
||||
>;
|
||||
endPointStatusCodeLatencyBarChartsDataQuery: UseQueryResult<
|
||||
SuccessResponse<any>,
|
||||
unknown
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
filters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
// 0 : Status Code Count
|
||||
// 1 : Status Code Latency
|
||||
const [currentWidgetInfoIndex, setCurrentWidgetInfoIndex] = useState(0);
|
||||
|
||||
const {
|
||||
data: endPointStatusCodeBarChartsData,
|
||||
} = endPointStatusCodeBarChartsDataQuery;
|
||||
|
||||
const {
|
||||
data: endPointStatusCodeLatencyBarChartsData,
|
||||
} = endPointStatusCodeLatencyBarChartsDataQuery;
|
||||
|
||||
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
const formattedEndPointStatusCodeBarChartsDataPayload = useMemo(
|
||||
() =>
|
||||
getFormattedEndPointStatusCodeChartData(
|
||||
endPointStatusCodeBarChartsData?.payload,
|
||||
'sum',
|
||||
),
|
||||
[endPointStatusCodeBarChartsData?.payload],
|
||||
);
|
||||
|
||||
const formattedEndPointStatusCodeLatencyBarChartsDataPayload = useMemo(
|
||||
() =>
|
||||
getFormattedEndPointStatusCodeChartData(
|
||||
endPointStatusCodeLatencyBarChartsData?.payload,
|
||||
'average',
|
||||
),
|
||||
[endPointStatusCodeLatencyBarChartsData?.payload],
|
||||
);
|
||||
|
||||
const chartData = useMemo(
|
||||
() =>
|
||||
getUPlotChartData(
|
||||
currentWidgetInfoIndex === 0
|
||||
? formattedEndPointStatusCodeBarChartsDataPayload
|
||||
: formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
),
|
||||
[
|
||||
currentWidgetInfoIndex,
|
||||
formattedEndPointStatusCodeBarChartsDataPayload,
|
||||
formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
],
|
||||
);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const graphClick = useGraphClickToShowButton({
|
||||
graphRef,
|
||||
isButtonEnabled: true,
|
||||
buttonClassName: 'view-onclick-show-button',
|
||||
});
|
||||
|
||||
const navigateToExplorer = useNavigateToExplorer();
|
||||
|
||||
const navigateToExplorerPages = useNavigateToExplorerPages();
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { getCustomSeries } = useGetGraphCustomSeries({
|
||||
isDarkMode,
|
||||
drawStyle: 'bars',
|
||||
colorMapping: {
|
||||
'200-299': Color.BG_FOREST_500,
|
||||
'300-399': Color.BG_AMBER_400,
|
||||
'400-499': Color.BG_CHERRY_500,
|
||||
'500-599': Color.BG_ROBIN_500,
|
||||
Other: Color.BG_SIENNA_500,
|
||||
},
|
||||
});
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
[domainName, endPointName, domainListFilters, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
metric?: { [key: string]: string },
|
||||
queryData?: { queryName: string; inFocusOrNot: boolean },
|
||||
): void => {
|
||||
const customFilters = getCustomFiltersForBarChart(metric);
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
metric,
|
||||
queryData,
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
customFilters,
|
||||
});
|
||||
},
|
||||
[
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
],
|
||||
);
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
getUPlotChartOptions({
|
||||
apiResponse:
|
||||
currentWidgetInfoIndex === 0
|
||||
? formattedEndPointStatusCodeBarChartsDataPayload
|
||||
: formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
isDarkMode,
|
||||
dimensions,
|
||||
yAxisUnit: statusCodeWidgetInfo[currentWidgetInfoIndex].yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
minTimeScale: Math.floor(minTime / 1e9),
|
||||
maxTimeScale: Math.floor(maxTime / 1e9),
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
onClickHandler: graphClickHandler,
|
||||
customSeries: getCustomSeries,
|
||||
}),
|
||||
[
|
||||
minTime,
|
||||
maxTime,
|
||||
currentWidgetInfoIndex,
|
||||
dimensions,
|
||||
formattedEndPointStatusCodeBarChartsDataPayload,
|
||||
formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
isDarkMode,
|
||||
graphClickHandler,
|
||||
getCustomSeries,
|
||||
],
|
||||
);
|
||||
|
||||
const renderCardContent = useCallback(
|
||||
(query: UseQueryResult<SuccessResponse<any>, unknown>): JSX.Element => {
|
||||
if (query.isLoading) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
if (query.error) {
|
||||
return <ErrorState refetch={query.refetch} />;
|
||||
}
|
||||
return (
|
||||
<div
|
||||
className={cx('chart-container', {
|
||||
'no-data-container':
|
||||
!query.isLoading && !query?.data?.payload?.data?.result?.length,
|
||||
})}
|
||||
>
|
||||
<Uplot options={options as Options} data={chartData} />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[options, chartData],
|
||||
);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Card bordered className="endpoint-details-card">
|
||||
<div className="header">
|
||||
<Typography.Text>Call response status</Typography.Text>
|
||||
<Button.Group className="views-tabs">
|
||||
<Button
|
||||
value={0}
|
||||
className={currentWidgetInfoIndex === 0 ? 'selected_view tab' : 'tab'}
|
||||
disabled={false}
|
||||
onClick={(): void => setCurrentWidgetInfoIndex(0)}
|
||||
>
|
||||
Number of calls
|
||||
</Button>
|
||||
<Button
|
||||
value={1}
|
||||
className={currentWidgetInfoIndex === 1 ? 'selected_view tab' : 'tab'}
|
||||
onClick={(): void => setCurrentWidgetInfoIndex(1)}
|
||||
>
|
||||
Latency
|
||||
</Button>
|
||||
</Button.Group>
|
||||
</div>
|
||||
<div className="graph-container" ref={graphRef}>
|
||||
{renderCardContent(endPointStatusCodeBarChartsDataQuery)}
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
export default StatusCodeBarCharts;
|
||||
@@ -1,72 +0,0 @@
|
||||
import { Table, Typography } from 'antd';
|
||||
import {
|
||||
endPointStatusCodeColumns,
|
||||
getFormattedEndPointStatusCodeData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function StatusCodeTable({
|
||||
endPointStatusCodeDataQuery,
|
||||
}: {
|
||||
endPointStatusCodeDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
}): JSX.Element {
|
||||
const {
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
data,
|
||||
refetch,
|
||||
} = endPointStatusCodeDataQuery;
|
||||
|
||||
const statusCodeData = useMemo(() => {
|
||||
if (isLoading || isRefetching || isError) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return getFormattedEndPointStatusCodeData(
|
||||
data?.payload?.data?.result[0].table.rows,
|
||||
);
|
||||
}, [data?.payload?.data?.result, isLoading, isRefetching, isError]);
|
||||
|
||||
if (isError) {
|
||||
return <ErrorState refetch={refetch} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="status-code-table-container">
|
||||
<Table
|
||||
loading={isLoading || isRefetching}
|
||||
dataSource={statusCodeData || []}
|
||||
columns={endPointStatusCodeColumns}
|
||||
pagination={false}
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-status-code-data-message-container">
|
||||
<div className="no-status-code-data-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-status-code-data-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default StatusCodeTable;
|
||||
@@ -1,9 +0,0 @@
|
||||
export enum VIEWS {
|
||||
ALL_ENDPOINTS = 'all_endpoints',
|
||||
ENDPOINT_DETAILS = 'endpoint_details',
|
||||
}
|
||||
|
||||
export const VIEW_TYPES = {
|
||||
ALL_ENDPOINTS: VIEWS.ALL_ENDPOINTS,
|
||||
ENDPOINT_DETAILS: VIEWS.ENDPOINT_DETAILS,
|
||||
};
|
||||
@@ -1,159 +0,0 @@
|
||||
import '../Explorer.styles.scss';
|
||||
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table, Typography } from 'antd';
|
||||
import axios from 'api';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import cx from 'classnames';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { HandleChangeQueryData } from 'types/common/operations.types';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
columnsConfig,
|
||||
formatDataForTable,
|
||||
hardcodedAttributeKeys,
|
||||
} from '../../utils';
|
||||
import DomainDetails from './DomainDetails/DomainDetails';
|
||||
|
||||
function DomainList({
|
||||
query,
|
||||
showIP,
|
||||
handleChangeQueryData,
|
||||
}: {
|
||||
query: IBuilderQuery;
|
||||
showIP: boolean;
|
||||
handleChangeQueryData: HandleChangeQueryData;
|
||||
}): JSX.Element {
|
||||
const [selectedDomainIndex, setSelectedDomainIndex] = useState<number>(-1);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const fetchApiOverview = async (): Promise<
|
||||
SuccessResponse<any> | ErrorResponse
|
||||
> => {
|
||||
const requestBody = {
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
show_ip: showIP,
|
||||
filters: {
|
||||
op: 'AND',
|
||||
items: query?.filters.items,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await axios.post(
|
||||
'/third-party-apis/overview/list',
|
||||
requestBody,
|
||||
);
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
const { data, isLoading, isFetching } = useQuery(
|
||||
[REACT_QUERY_KEY.GET_DOMAINS_LIST, minTime, maxTime, query, showIP],
|
||||
fetchApiOverview,
|
||||
);
|
||||
|
||||
const formattedDataForTable = useMemo(
|
||||
() => formatDataForTable(data?.payload?.data?.result[0]?.table?.rows),
|
||||
[data],
|
||||
);
|
||||
|
||||
return (
|
||||
<section className={cx('api-module-right-section')}>
|
||||
<div className={cx('api-monitoring-list-header')}>
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={(searchFilters): void =>
|
||||
handleChangeQueryData('filters', searchFilters)
|
||||
}
|
||||
placeholder="Search filters..."
|
||||
hardcodedAttributeKeys={hardcodedAttributeKeys}
|
||||
/>
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
showRefreshText={false}
|
||||
hideShareModal
|
||||
/>
|
||||
</div>
|
||||
<Table
|
||||
className={cx('api-monitoring-domain-list-table')}
|
||||
dataSource={isFetching || isLoading ? [] : formattedDataForTable}
|
||||
columns={columnsConfig}
|
||||
loading={{
|
||||
spinning: isFetching || isLoading,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
locale={{
|
||||
emptyText:
|
||||
isFetching || isLoading ? null : (
|
||||
<div className="no-filtered-domains-message-container">
|
||||
<div className="no-filtered-domains-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-filtered-domains-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
onRow={(record, index): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => {
|
||||
if (index !== undefined) {
|
||||
const dataIndex = formattedDataForTable.findIndex(
|
||||
(item) => item.key === record.key,
|
||||
);
|
||||
setSelectedDomainIndex(dataIndex);
|
||||
logEvent('API Monitoring: Domain name row clicked', {});
|
||||
}
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
})}
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
/>
|
||||
{selectedDomainIndex !== -1 && (
|
||||
<DomainDetails
|
||||
domainData={formattedDataForTable[selectedDomainIndex]}
|
||||
selectedDomainIndex={selectedDomainIndex}
|
||||
setSelectedDomainIndex={setSelectedDomainIndex}
|
||||
domainListLength={formattedDataForTable.length}
|
||||
handleClose={(): void => {
|
||||
setSelectedDomainIndex(-1);
|
||||
}}
|
||||
domainListFilters={query?.filters}
|
||||
/>
|
||||
)}
|
||||
</section>
|
||||
);
|
||||
}
|
||||
|
||||
export default DomainList;
|
||||
@@ -1,219 +0,0 @@
|
||||
.api-monitoring-page {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
|
||||
.api-quick-filter-left-section {
|
||||
width: 0%;
|
||||
flex-shrink: 0;
|
||||
|
||||
.api-quick-filters-header {
|
||||
padding: 12px;
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
|
||||
font-size: 14px;
|
||||
line-height: 18px;
|
||||
}
|
||||
}
|
||||
|
||||
.api-module-right-section {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
width: 100%;
|
||||
|
||||
.api-monitoring-list-header {
|
||||
width: 100%;
|
||||
padding: 8px;
|
||||
display: flex;
|
||||
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.query-builder-search-v2 {
|
||||
min-width: 80%;
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.api-monitoring-domain-list-table {
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
padding: 12px;
|
||||
font-weight: 500;
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
border-bottom: none;
|
||||
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 18px;
|
||||
/* 163.636% */
|
||||
letter-spacing: 0.44px;
|
||||
text-transform: uppercase;
|
||||
background: none;
|
||||
|
||||
&::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-thead > tr > th:has(.domain-list-name-col-header) {
|
||||
background: var(--bg-ink-300);
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
padding: 12px;
|
||||
font-size: 13px;
|
||||
line-height: 20px;
|
||||
color: var(--bg-vanilla-100);
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.domain-list-name-col-value) {
|
||||
background: var(--bg-ink-300);
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.round-metric-tag {
|
||||
display: inline-flex;
|
||||
padding: 2px 8px;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
width: fit-content;
|
||||
|
||||
border-radius: 50px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-slate-500);
|
||||
text-transform: lowercase;
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(255, 255, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-cell:first-child {
|
||||
text-align: justify;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(2) {
|
||||
padding-left: 16px;
|
||||
padding-right: 16px;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(n + 3) {
|
||||
padding-right: 24px;
|
||||
}
|
||||
|
||||
.column-header-right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr > td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.ant-table-thead
|
||||
> tr
|
||||
> th:not(:last-child):not(.ant-table-selection-column):not(.ant-table-row-expand-icon-cell):not([colspan])::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.ant-empty-normal {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.table-row-light {
|
||||
background: none;
|
||||
}
|
||||
|
||||
.table-row-dark {
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.error-rate {
|
||||
width: 120px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&.filter-visible {
|
||||
.api-quick-filter-left-section {
|
||||
width: 260px;
|
||||
}
|
||||
|
||||
.api-module-right-section {
|
||||
width: calc(100% - 260px);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.no-filtered-domains-message-container {
|
||||
height: 30vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
.no-filtered-domains-message-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
justify-content: center;
|
||||
|
||||
width: fit-content;
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.no-filtered-domains-message {
|
||||
margin-top: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.api-monitoring-domain-list-table {
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-thead > tr > th:has(.domain-list-name-col-header) {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.domain-list-name-col-value) {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(0, 0, 0, 0.04);
|
||||
}
|
||||
|
||||
.table-row-light {
|
||||
background: none;
|
||||
}
|
||||
|
||||
.table-row-dark {
|
||||
background: none;
|
||||
}
|
||||
|
||||
.round-metric-tag {
|
||||
color: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,101 +0,0 @@
|
||||
import './Explorer.styles.scss';
|
||||
|
||||
import { FilterOutlined } from '@ant-design/icons';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Switch, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import QuickFilters from 'components/QuickFilters/QuickFilters';
|
||||
import { QuickFiltersSource } from 'components/QuickFilters/types';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { ApiMonitoringQuickFiltersConfig } from '../utils';
|
||||
import DomainList from './Domains/DomainList';
|
||||
|
||||
function Explorer(): JSX.Element {
|
||||
const [showIP, setShowIP] = useState<boolean>(true);
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('API Monitoring: Landing page visited', {});
|
||||
}, []);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query: currentQuery.builder.queryData[0],
|
||||
entityVersion: '',
|
||||
});
|
||||
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
...currentQuery.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[currentQuery],
|
||||
);
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className={cx('api-monitoring-page', 'filter-visible')}>
|
||||
<section className="api-quick-filter-left-section">
|
||||
<div className="api-quick-filters-header">
|
||||
<FilterOutlined />
|
||||
<Typography.Text>Filters</Typography.Text>
|
||||
</div>
|
||||
|
||||
<div className="api-quick-filters-header">
|
||||
<Typography.Text>Show IP addresses</Typography.Text>
|
||||
<Switch
|
||||
size="small"
|
||||
style={{ marginLeft: 'auto' }}
|
||||
checked={showIP}
|
||||
onClick={(): void => {
|
||||
setShowIP((showIP): boolean => {
|
||||
logEvent('API Monitoring: Show IP addresses clicked', {
|
||||
showIP: !showIP,
|
||||
});
|
||||
return !showIP;
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<QuickFilters
|
||||
source={QuickFiltersSource.API_MONITORING}
|
||||
config={ApiMonitoringQuickFiltersConfig}
|
||||
handleFilterVisibilityChange={(): void => {}}
|
||||
onFilterChange={(query: Query): void =>
|
||||
handleChangeQueryData('filters', query.builder.queryData[0].filters)
|
||||
}
|
||||
/>
|
||||
</section>
|
||||
<DomainList
|
||||
query={query}
|
||||
showIP={showIP}
|
||||
handleChangeQueryData={handleChangeQueryData}
|
||||
/>
|
||||
</div>
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
}
|
||||
|
||||
export default Explorer;
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user