Compare commits
2 Commits
react-rout
...
signoz-tai
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5b65be0567 | ||
|
|
d5aae260df |
@@ -1,7 +1,6 @@
|
||||
.git
|
||||
.github
|
||||
.vscode
|
||||
.devenv
|
||||
README.md
|
||||
deploy
|
||||
sample-apps
|
||||
|
||||
81
.github/workflows/build-community.yaml
vendored
81
.github/workflows/build-community.yaml
vendored
@@ -1,81 +0,0 @@
|
||||
name: build-community
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./pkg/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: dockerhub
|
||||
113
.github/workflows/build-enterprise.yaml
vendored
113
.github/workflows/build-enterprise.yaml
vendored
@@ -1,113 +0,0 @@
|
||||
name: build-enterprise
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker_providers: ${{ steps.set-docker-providers.outputs.providers }}
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
id: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
- name: set-docker-providers
|
||||
id: set-docker-providers
|
||||
run: |
|
||||
if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ || ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$ ]]; then
|
||||
echo "providers=dockerhub gcp" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "providers=gcp" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: create-dotenv
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: frontend/.env
|
||||
key: enterprise-dotenv-${{ github.sha }}
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}
|
||||
122
.github/workflows/build-staging.yaml
vendored
122
.github/workflows/build-staging.yaml
vendored
@@ -1,122 +0,0 @@
|
||||
name: build-staging
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
|
||||
outputs:
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
deployment: ${{ steps.build-info.outputs.deployment }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
id: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
|
||||
staging_label="${{ github.event.label.name }}"
|
||||
if [[ "${staging_label}" == "staging:"* ]]; then
|
||||
deployment=${staging_label#"staging:"}
|
||||
elif [[ "${{ github.event.ref }}" == "refs/heads/main" ]]; then
|
||||
deployment="staging"
|
||||
else
|
||||
echo "error: not able to determine deployment - please verify the PR label or the branch"
|
||||
exit 1
|
||||
fi
|
||||
echo "deployment=${deployment}" >> $GITHUB_OUTPUT
|
||||
- name: create-dotenv
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'TUNNEL_URL=https://telemetry.staging.signoz.cloud/tunnel' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN=https://telemetry.staging.signoz.cloud' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: frontend/.env
|
||||
key: staging-dotenv-${{ github.sha }}
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: staging-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: gcp
|
||||
staging:
|
||||
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
|
||||
uses: signoz/primus.workflows/.github/workflows/github-trigger.yaml@main
|
||||
secrets: inherit
|
||||
needs: [prepare, go-build]
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GITHUB_ENVIRONMENT: staging
|
||||
GITHUB_SILENT: true
|
||||
GITHUB_REPOSITORY_NAME: charts-saas-v3-staging
|
||||
GITHUB_EVENT_NAME: releaser
|
||||
GITHUB_EVENT_PAYLOAD: "{\"deployment\": \"${{ needs.prepare.outputs.deployment }}\", \"signoz_version\": \"${{ needs.prepare.outputs.version }}\"}"
|
||||
122
.github/workflows/build.yaml
vendored
Normal file
122
.github/workflows/build.yaml
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
enterprise:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: create-env-file
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-enterprise
|
||||
|
||||
community:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-community
|
||||
55
.github/workflows/integrationci.yaml
vendored
55
.github/workflows/integrationci.yaml
vendored
@@ -1,55 +0,0 @@
|
||||
name: integrationci
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
src:
|
||||
- bootstrap
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 24.1.2-alpine
|
||||
- 24.12-alpine
|
||||
schema-migrator-version:
|
||||
- v0.111.38
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: poetry
|
||||
run: |
|
||||
python -m pip install poetry==2.1.2
|
||||
python -m poetry config virtualenvs.in-project true
|
||||
cd tests/integration && poetry install --no-root
|
||||
- name: run
|
||||
run: |
|
||||
cd tests/integration && \
|
||||
poetry run pytest -ra \
|
||||
--basetemp=./tmp/ \
|
||||
-vv \
|
||||
--capture=no \
|
||||
src/${{matrix.src}} \
|
||||
--sqlstore-provider ${{matrix.sqlstore-provider}} \
|
||||
--postgres-version ${{matrix.postgres-version}} \
|
||||
--clickhouse-version ${{matrix.clickhouse-version}} \
|
||||
--schema-migrator-version ${{matrix.schema-migrator-version}}
|
||||
4
.github/workflows/prereleaser.yaml
vendored
4
.github/workflows/prereleaser.yaml
vendored
@@ -1,9 +1,9 @@
|
||||
name: prereleaser
|
||||
|
||||
on:
|
||||
# schedule every wednesday 6:30 AM UTC (12:00 PM IST)
|
||||
# schedule every wednesday 9:30 AM UTC (3pm IST)
|
||||
schedule:
|
||||
- cron: '30 6 * * 3'
|
||||
- cron: '30 9 * * 3'
|
||||
|
||||
# allow manual triggering of the workflow by a maintainer
|
||||
workflow_dispatch:
|
||||
|
||||
56
.github/workflows/staging-deployment.yaml
vendored
Normal file
56
.github/workflows/staging-deployment.yaml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: staging-deployment
|
||||
# Trigger deployment only on push to main branch
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy latest main branch to staging
|
||||
runs-on: ubuntu-latest
|
||||
environment: staging
|
||||
permissions:
|
||||
contents: 'read'
|
||||
id-token: 'write'
|
||||
steps:
|
||||
- id: 'auth'
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
|
||||
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: 'sdk'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'ssh'
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
|
||||
GCP_ZONE: ${{ secrets.GCP_ZONE }}
|
||||
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
|
||||
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
|
||||
run: |
|
||||
read -r -d '' COMMAND <<EOF || true
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export OTELCOL_TAG="main"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
export KAFKA_SPAN_EVAL="true"
|
||||
docker system prune --force
|
||||
docker pull signoz/signoz-otel-collector:main
|
||||
docker pull signoz/signoz-schema-migrator:main
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout ${GITHUB_BRANCH}
|
||||
git pull
|
||||
make docker-build-enterprise-amd64
|
||||
export VERSION="${GITHUB_SHA:0:7}-amd64"
|
||||
docker-compose -f deploy/docker/docker-compose.testing.yaml up --build -d
|
||||
EOF
|
||||
gcloud beta compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
|
||||
56
.github/workflows/testing-deployment.yaml
vendored
Normal file
56
.github/workflows/testing-deployment.yaml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: testing-deployment
|
||||
# Trigger deployment only on testing-deploy label on pull request
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy PR branch to testing
|
||||
runs-on: ubuntu-latest
|
||||
environment: testing
|
||||
if: ${{ github.event.label.name == 'testing-deploy' }}
|
||||
permissions:
|
||||
contents: 'read'
|
||||
id-token: 'write'
|
||||
steps:
|
||||
- id: 'auth'
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
|
||||
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: 'sdk'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'ssh'
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
|
||||
GCP_ZONE: ${{ secrets.GCP_ZONE }}
|
||||
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
|
||||
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
|
||||
run: |
|
||||
read -r -d '' COMMAND <<EOF || true
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export DEV_BUILD="1"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
docker system prune --force
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
git stash push -m "stashed on $(date --iso-8601=seconds)"
|
||||
git fetch origin
|
||||
git checkout main
|
||||
git pull
|
||||
# This is added to include the scenerio when new commit in PR is force-pushed
|
||||
git branch -D ${GITHUB_BRANCH}
|
||||
git checkout --track origin/${GITHUB_BRANCH}
|
||||
make docker-build-enterprise-amd64
|
||||
export VERSION="${GITHUB_SHA:0:7}-amd64"
|
||||
docker-compose -f deploy/docker/docker-compose.testing.yaml up --build -d
|
||||
EOF
|
||||
gcloud beta compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
|
||||
147
.gitignore
vendored
147
.gitignore
vendored
@@ -80,153 +80,6 @@ deploy/common/clickhouse/user_scripts/
|
||||
|
||||
queries.active
|
||||
|
||||
# tmp
|
||||
**/tmp/**
|
||||
|
||||
# .devenv tmp files
|
||||
.devenv/**/tmp/**
|
||||
.qodo
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
### Python Patch ###
|
||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||
poetry.toml
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
@@ -1,17 +0,0 @@
|
||||
#### Auto generated by make docker-version-alpine. DO NOT EDIT! ####
|
||||
amd64=029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85
|
||||
unknown=5fea95373b9ec85974843f31446fa6a9df4492dddae4e1cb056193c34a20a5be
|
||||
arm=b4aef1a899e0271f06d948c9a8fa626ecdb2202d3a178bc14775dd559e23df8e
|
||||
unknown=a4d1e27e63a9d6353046eb25a2f0ec02945012b217f4364cd83a73fe6dfb0b15
|
||||
arm=4fdafe217d0922f3c3e2b4f64cf043f8403a4636685cd9c51fea2cbd1f419740
|
||||
unknown=7f21ac2018d95b2c51a5779c1d5ca6c327504adc3b0fdc747a6725d30b3f13c2
|
||||
arm64=ea3c5a9671f7b3f7eb47eab06f73bc6591df978b0d5955689a9e6f943aa368c0
|
||||
unknown=a8ba68c1a9e6eea8041b4b8f996c235163440808b9654a865976fdcbede0f433
|
||||
386=dea9f02e103e837849f984d5679305c758aba7fea1b95b7766218597f61a05ab
|
||||
unknown=3c6629bec05c8273a927d46b77428bf4a378dad911a0ae284887becdc149b734
|
||||
ppc64le=0880443bffa028dfbbc4094a32dd6b7ac25684e4c0a3d50da9e0acae355c5eaf
|
||||
unknown=bb48308f976b266e3ab39bbf9af84521959bd9c295d3c763690cf41f8df2a626
|
||||
riscv64=d76e6fbe348ff20c2931bb7f101e49379648e026de95dd37f96e00ce1909dcf7
|
||||
unknown=dd807544365f6dc187cbe6de0806adce2ea9de3e7124717d1d8e8b7a18b77b64
|
||||
s390x=b815fadf80495594eb6296a6af0bc647ae5f193e0044e07acec7e5b378c9ce2d
|
||||
unknown=74681be74a280a88abb53ff1e048eb1fb624b30d0066730df6d8afd02ba82e01
|
||||
36
Makefile
36
Makefile
@@ -10,7 +10,7 @@ COMMIT_SHORT_SHA ?= $(shell git rev-parse --short HEAD)
|
||||
BRANCH_NAME ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
|
||||
VERSION ?= $(BRANCH_NAME)-$(COMMIT_SHORT_SHA)
|
||||
TIMESTAMP ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
ARCHS ?= amd64 arm64
|
||||
ARCHS = amd64 arm64
|
||||
TARGET_DIR ?= $(shell pwd)/target
|
||||
|
||||
ZEUS_URL ?= https://api.signoz.cloud
|
||||
@@ -23,7 +23,6 @@ GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/pkg/query-service
|
||||
GO_BUILD_LDFLAGS_COMMUNITY = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
GO_BUILD_ARCHS_ENTERPRISE = $(addprefix go-build-enterprise-,$(ARCHS))
|
||||
GO_BUILD_ARCHS_ENTERPRISE_RACE = $(addprefix go-build-enterprise-race-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/ee/query-service
|
||||
GO_BUILD_LDFLAGS_ENTERPRISE = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=enterprise $(GO_BUILD_LDFLAG_ZEUS_URL) $(GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO)
|
||||
|
||||
@@ -75,10 +74,6 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
--use-logs-new-schema true \
|
||||
--use-trace-new-schema true
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@go test -race ./...
|
||||
|
||||
.PHONY: go-run-community
|
||||
go-run-community: ## Runs the community go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
@@ -120,18 +115,6 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
go-build-enterprise-race: ## Builds the go backend server for enterprise with race
|
||||
go-build-enterprise-race: $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
$(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
##############################################################
|
||||
# js commands
|
||||
##############################################################
|
||||
@@ -180,20 +163,3 @@ docker-buildx-enterprise: go-build-enterprise js-build
|
||||
--platform linux/arm64,linux/amd64 \
|
||||
--push \
|
||||
--tag $(DOCKER_REGISTRY_ENTERPRISE):$(VERSION) $(SRC)
|
||||
|
||||
##############################################################
|
||||
# python commands
|
||||
##############################################################
|
||||
.PHONY: py-fmt
|
||||
py-fmt: ## Run black for integration tests
|
||||
@cd tests/integration && poetry run black .
|
||||
|
||||
.PHONY: py-lint
|
||||
py-lint: ## Run lint for integration tests
|
||||
@cd tests/integration && poetry run isort .
|
||||
@cd tests/integration && poetry run autoflake .
|
||||
@cd tests/integration && poetry run pylint .
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@@ -72,6 +72,7 @@ sqlstore:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
|
||||
|
||||
##################### APIServer #####################
|
||||
apiserver:
|
||||
timeout:
|
||||
@@ -90,29 +91,20 @@ apiserver:
|
||||
- /api/v1/version
|
||||
- /
|
||||
|
||||
|
||||
##################### TelemetryStore #####################
|
||||
telemetrystore:
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
# Maximum number of idle connections in the connection pool.
|
||||
max_idle_conns: 50
|
||||
# Maximum number of open connections to the database.
|
||||
max_open_conns: 100
|
||||
# Maximum time to wait for a connection to be established.
|
||||
dial_timeout: 5s
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
clickhouse:
|
||||
# The DSN to use for clickhouse.
|
||||
dsn: tcp://localhost:9000
|
||||
|
||||
##################### Prometheus #####################
|
||||
prometheus:
|
||||
active_query_tracker:
|
||||
# Whether to enable the active query tracker.
|
||||
enabled: true
|
||||
# The path to use for the active query tracker.
|
||||
path: ""
|
||||
# The maximum number of concurrent queries.
|
||||
max_concurrent: 20
|
||||
# The DSN to use for ClickHouse.
|
||||
dsn: http://localhost:9000
|
||||
|
||||
##################### Alertmanager #####################
|
||||
alertmanager:
|
||||
@@ -125,7 +117,7 @@ alertmanager:
|
||||
# The poll interval for periodically syncing the alertmanager with the config in the store.
|
||||
poll_interval: 1m
|
||||
# The URL under which Alertmanager is externally reachable (for example, if Alertmanager is served via a reverse proxy). Used for generating relative and absolute links back to Alertmanager itself.
|
||||
external_url: http://localhost:8080
|
||||
external_url: http://localhost:9093
|
||||
# The global configuration for the alertmanager. All the exahustive fields can be found in the upstream: https://github.com/prometheus/alertmanager/blob/efa05feffd644ba4accb526e98a8c6545d26a783/config/config.go#L833
|
||||
global:
|
||||
# ResolveTimeout is the time after which an alert is declared resolved if it has not been updated.
|
||||
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.79.1
|
||||
image: signoz/signoz:v0.76.2
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -208,7 +208,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
image: signoz/signoz-otel-collector:v0.111.34
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -232,7 +232,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
image: signoz/signoz-schema-migrator:v0.111.34
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.79.1
|
||||
image: signoz/signoz:v0.76.2
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -143,7 +143,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
image: signoz/signoz-otel-collector:v0.111.34
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -167,7 +167,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
image: signoz/signoz-schema-migrator:v0.111.34
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.79.1}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -212,7 +212,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -238,7 +238,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -249,7 +249,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
199
deploy/docker/docker-compose.testing.yaml
Normal file
199
deploy/docker/docker-compose.testing.yaml
Normal file
@@ -0,0 +1,199 @@
|
||||
version: "3"
|
||||
x-common: &common
|
||||
networks:
|
||||
- signoz-net
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
signoz.io/port: "9363"
|
||||
signoz.io/path: "/metrics"
|
||||
depends_on:
|
||||
init-clickhouse:
|
||||
condition: service_completed_successfully
|
||||
zookeeper-1:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
- wget
|
||||
- --spider
|
||||
- -q
|
||||
- 0.0.0.0:8123/ping
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
user: root
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
signoz.io/port: "9141"
|
||||
signoz.io/path: "/metrics"
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD-SHELL
|
||||
- curl -s -m 2 http://localhost:8080/commands/ruok | grep error | grep null
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
x-db-depend: &db-depend
|
||||
!!merge <<: *common
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: signoz-init-clickhouse
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
- |
|
||||
version="v0.0.1"
|
||||
node_os=$$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
node_arch=$$(uname -m | sed s/aarch64/arm64/ | sed s/x86_64/amd64/)
|
||||
echo "Fetching histogram-binary for $${node_os}/$${node_arch}"
|
||||
cd /tmp
|
||||
wget -O histogram-quantile.tar.gz "https://github.com/SigNoz/signoz/releases/download/histogram-quantile%2F$${version}/histogram-quantile_$${node_os}_$${node_arch}.tar.gz"
|
||||
tar -xvzf histogram-quantile.tar.gz
|
||||
mv histogram-quantile /var/lib/clickhouse/user_scripts/histogramQuantile
|
||||
restart: on-failure
|
||||
volumes:
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
zookeeper-1:
|
||||
!!merge <<: *zookeeper-defaults
|
||||
container_name: signoz-zookeeper-1
|
||||
ports:
|
||||
- "2181:2181"
|
||||
- "2888:2888"
|
||||
- "3888:3888"
|
||||
volumes:
|
||||
- zookeeper-1:/bitnami/zookeeper
|
||||
environment:
|
||||
- ZOO_SERVER_ID=1
|
||||
- ALLOW_ANONYMOUS_LOGIN=yes
|
||||
- ZOO_AUTOPURGE_INTERVAL=1
|
||||
- ZOO_ENABLE_PROMETHEUS_METRICS=yes
|
||||
- ZOO_PROMETHEUS_METRICS_PORT_NUMBER=9141
|
||||
clickhouse:
|
||||
!!merge <<: *clickhouse-defaults
|
||||
container_name: signoz-clickhouse
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "8123:8123"
|
||||
- "9181:9181"
|
||||
volumes:
|
||||
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
- ../common/clickhouse/cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
- clickhouse:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --gateway-url=https://api.staging.signoz.cloud
|
||||
- --use-logs-new-schema=true
|
||||
- --use-trace-new-schema=true
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
volumes:
|
||||
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
|
||||
- ../common/dashboards:/root/config/dashboards
|
||||
- sqlite:/var/lib/signoz/
|
||||
environment:
|
||||
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
||||
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_SQLSTORE_SQLITE_PATH=/var/lib/signoz/signoz.db
|
||||
- DASHBOARDS_PATH=/root/config/dashboards
|
||||
- STORAGE=clickhouse
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
- KAFKA_SPAN_EVAL=${KAFKA_SPAN_EVAL:-false}
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
- wget
|
||||
- --spider
|
||||
- -q
|
||||
- localhost:8080/api/v1/health
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
depends_on:
|
||||
signoz:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --up=
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --up=
|
||||
restart: on-failure
|
||||
networks:
|
||||
signoz-net:
|
||||
name: signoz-net
|
||||
volumes:
|
||||
clickhouse:
|
||||
name: signoz-clickhouse
|
||||
sqlite:
|
||||
name: signoz-sqlite
|
||||
zookeeper-1:
|
||||
name: signoz-zookeeper-1
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.79.1}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
@@ -25,7 +24,7 @@ func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var values []string
|
||||
var patToken string
|
||||
var pat eeTypes.StorablePersonalAccessToken
|
||||
var pat types.StorablePersonalAccessToken
|
||||
|
||||
for _, header := range p.headers {
|
||||
values = append(values, r.Header.Get(header))
|
||||
|
||||
@@ -18,4 +18,4 @@ COPY frontend/build/ /etc/signoz/web/
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
FROM golang:1.22-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
ARG ZEUSURL
|
||||
|
||||
# This path is important for stacktraces
|
||||
WORKDIR $GOPATH/src/github.com/signoz/signoz
|
||||
WORKDIR /root
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
g++ \
|
||||
gcc \
|
||||
libc6-dev \
|
||||
make \
|
||||
pkg-config \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
RUN go mod download
|
||||
|
||||
COPY ./ee/ ./ee/
|
||||
COPY ./pkg/ ./pkg/
|
||||
COPY ./templates/email /root/templates
|
||||
|
||||
COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz"]
|
||||
@@ -1,22 +0,0 @@
|
||||
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
|
||||
|
||||
FROM alpine@sha256:${ALPINE_SHA}
|
||||
LABEL maintainer="signoz"
|
||||
WORKDIR /root
|
||||
|
||||
ARG OS="linux"
|
||||
ARG ARCH
|
||||
|
||||
RUN apk update && \
|
||||
apk add ca-certificates && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
|
||||
COPY ./conf/prometheus.yml /root/config/prometheus.yml
|
||||
COPY ./templates/email /root/templates
|
||||
COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
@@ -28,10 +28,11 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
|
||||
}
|
||||
|
||||
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
FeatureLookup: dp.ff,
|
||||
})
|
||||
|
||||
return dp
|
||||
|
||||
@@ -28,10 +28,11 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
|
||||
}
|
||||
|
||||
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
FeatureLookup: hp.ff,
|
||||
})
|
||||
|
||||
return hp
|
||||
|
||||
@@ -38,6 +38,12 @@ func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericPr
|
||||
}
|
||||
}
|
||||
|
||||
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().ff = ff
|
||||
}
|
||||
}
|
||||
|
||||
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().reader = reader
|
||||
@@ -50,6 +56,7 @@ type BaseSeasonalProvider struct {
|
||||
fluxInterval time.Duration
|
||||
cache cache.Cache
|
||||
keyGenerator cache.KeyGenerator
|
||||
ff interfaces.FeatureLookup
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
||||
|
||||
@@ -27,10 +27,11 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
|
||||
}
|
||||
|
||||
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
FeatureLookup: wp.ff,
|
||||
})
|
||||
|
||||
return wp
|
||||
|
||||
@@ -11,9 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -24,7 +21,6 @@ import (
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -58,7 +54,6 @@ type APIHandler struct {
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(signoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
|
||||
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
@@ -75,9 +70,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
Preference: preference,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -164,6 +157,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
|
||||
|
||||
// PAT APIs
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
@@ -135,12 +135,19 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newPAT := eeTypes.NewGettablePAT(
|
||||
integrationPATName,
|
||||
baseconstants.ViewerGroup,
|
||||
integrationUser.ID,
|
||||
0,
|
||||
)
|
||||
newPAT := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Token: generatePATToken(),
|
||||
UserID: integrationUser.ID,
|
||||
Name: integrationPATName,
|
||||
Role: baseconstants.ViewerGroup,
|
||||
ExpiresAt: 0,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
},
|
||||
}
|
||||
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
@@ -153,11 +160,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) (*types.User, *basemodel.ApiError) {
|
||||
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
|
||||
cloudIntegrationUserId := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
|
||||
// TODO(nitya): there should be orgId here
|
||||
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
|
||||
integrationUserResult, apiErr := ah.AppDao().GetUser(ctx, cloudIntegrationUserId)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
|
||||
}
|
||||
@@ -172,9 +177,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
)
|
||||
|
||||
newUser := &types.User{
|
||||
ID: uuid.New().String(),
|
||||
Name: cloudIntegrationUser,
|
||||
Email: email,
|
||||
ID: cloudIntegrationUserId,
|
||||
Name: fmt.Sprintf("%s integration", cloudProvider),
|
||||
Email: fmt.Sprintf("%s@signoz.io", cloudIntegrationUserId),
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
|
||||
@@ -2,26 +2,31 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func generatePATToken() string {
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
return encodedToken
|
||||
}
|
||||
|
||||
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
@@ -38,18 +43,31 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
pat := eeTypes.NewGettablePAT(
|
||||
req.Name,
|
||||
req.Role,
|
||||
user.ID,
|
||||
req.ExpiresInDays,
|
||||
)
|
||||
pat := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Name: req.Name,
|
||||
Role: req.Role,
|
||||
ExpiresAt: req.ExpiresInDays,
|
||||
},
|
||||
}
|
||||
err = validatePATRequest(pat)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// All the PATs are associated with the user creating the PAT.
|
||||
pat.UserID = user.ID
|
||||
pat.CreatedAt = time.Now()
|
||||
pat.UpdatedAt = time.Now()
|
||||
pat.LastUsed = 0
|
||||
pat.Token = generatePATToken()
|
||||
|
||||
if pat.ExpiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
|
||||
var apierr basemodel.BaseApiError
|
||||
if pat, apierr = ah.AppDao().CreatePAT(ctx, user.OrgID, pat); apierr != nil {
|
||||
@@ -60,7 +78,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ah.Respond(w, &pat)
|
||||
}
|
||||
|
||||
func validatePATRequest(req eeTypes.GettablePAT) error {
|
||||
func validatePATRequest(req model.PAT) error {
|
||||
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
|
||||
return fmt.Errorf("valid role is required")
|
||||
}
|
||||
@@ -76,19 +94,12 @@ func validatePATRequest(req eeTypes.GettablePAT) error {
|
||||
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
req := eeTypes.GettablePAT{}
|
||||
req := model.PAT{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
@@ -98,25 +109,6 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
err = validatePATRequest(req)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
@@ -124,6 +116,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
req.UpdatedByUserID = user.ID
|
||||
id := mux.Vars(r)["id"]
|
||||
req.UpdatedAt = time.Now()
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
@@ -156,12 +149,7 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
id := mux.Vars(r)["id"]
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
@@ -171,26 +159,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id))
|
||||
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
|
||||
@@ -88,24 +88,28 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityDaily:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityHourly:
|
||||
provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
default:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
}
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
|
||||
33
ee/query-service/app/api/traces.go
Normal file
33
ee/query-service/app/api/traces.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/db"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
|
||||
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
ah.APIHandler.SearchTraces(w, r)
|
||||
return
|
||||
}
|
||||
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
||||
return
|
||||
}
|
||||
|
||||
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
|
||||
if ah.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, result)
|
||||
|
||||
}
|
||||
@@ -5,33 +5,38 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
)
|
||||
|
||||
type ClickhouseReader struct {
|
||||
conn clickhouse.Conn
|
||||
appdb sqlstore.SQLStore
|
||||
appdb *sqlx.DB
|
||||
*basechr.ClickHouseReader
|
||||
}
|
||||
|
||||
func NewDataConnector(
|
||||
sqlDB sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
localDB *sqlx.DB,
|
||||
ch clickhouse.Conn,
|
||||
promConfigPath string,
|
||||
lm interfaces.FeatureLookup,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
fluxIntervalForTraceDetail time.Duration,
|
||||
cache cache.Cache,
|
||||
) *ClickhouseReader {
|
||||
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
chReader := basechr.NewReader(localDB, ch, promConfigPath, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
return &ClickhouseReader{
|
||||
conn: telemetryStore.ClickhouseDB(),
|
||||
appdb: sqlDB,
|
||||
conn: ch,
|
||||
appdb: localDB,
|
||||
ClickHouseReader: chReader,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickhouseReader) Start(readerReady chan bool) {
|
||||
r.ClickHouseReader.Start(readerReady)
|
||||
}
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
package smart
|
||||
package db
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// SmartTraceAlgorithm is an algorithm to find the target span and build a tree of spans around it with the given levelUp and levelDown parameters and the given spanLimit
|
||||
func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]basemodel.SearchSpansResult, error) {
|
||||
var spans []*SpanForTraceDetails
|
||||
var spans []*model.SpanForTraceDetails
|
||||
|
||||
// if targetSpanId is null or not present then randomly select a span as targetSpanId
|
||||
if (targetSpanId == "" || targetSpanId == "null") && len(payload) > 0 {
|
||||
@@ -23,7 +24,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" {
|
||||
parentID = spanItem.References[0].SpanId
|
||||
}
|
||||
span := &SpanForTraceDetails{
|
||||
span := &model.SpanForTraceDetails{
|
||||
TimeUnixNano: spanItem.TimeUnixNano,
|
||||
SpanID: spanItem.SpanID,
|
||||
TraceID: spanItem.TraceID,
|
||||
@@ -44,7 +45,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
targetSpan := &SpanForTraceDetails{}
|
||||
targetSpan := &model.SpanForTraceDetails{}
|
||||
|
||||
// Find the target span in the span trees
|
||||
for _, root := range roots {
|
||||
@@ -64,7 +65,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Build the final result
|
||||
parents := []*SpanForTraceDetails{}
|
||||
parents := []*model.SpanForTraceDetails{}
|
||||
|
||||
// Get the parent spans of the target span up to the given levelUp parameter and spanLimit
|
||||
preParent := targetSpan
|
||||
@@ -89,11 +90,11 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Get the child spans of the target span until the given levelDown and spanLimit
|
||||
preParents := []*SpanForTraceDetails{targetSpan}
|
||||
children := []*SpanForTraceDetails{}
|
||||
preParents := []*model.SpanForTraceDetails{targetSpan}
|
||||
children := []*model.SpanForTraceDetails{}
|
||||
|
||||
for i := 0; i < levelDown && len(preParents) != 0 && spanLimit > 0; i++ {
|
||||
parents := []*SpanForTraceDetails{}
|
||||
parents := []*model.SpanForTraceDetails{}
|
||||
for _, parent := range preParents {
|
||||
if spanLimit-len(parent.Children) <= 0 {
|
||||
children = append(children, parent.Children[:spanLimit]...)
|
||||
@@ -107,7 +108,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Store the final list of spans in the resultSpanSet map to avoid duplicates
|
||||
resultSpansSet := make(map[*SpanForTraceDetails]struct{})
|
||||
resultSpansSet := make(map[*model.SpanForTraceDetails]struct{})
|
||||
resultSpansSet[targetSpan] = struct{}{}
|
||||
for _, parent := range parents {
|
||||
resultSpansSet[parent] = struct{}{}
|
||||
@@ -168,12 +169,12 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// buildSpanTrees builds trees of spans from a list of spans.
|
||||
func buildSpanTrees(spansPtr *[]*SpanForTraceDetails) ([]*SpanForTraceDetails, error) {
|
||||
func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTraceDetails, error) {
|
||||
|
||||
// Build a map of spanID to span for fast lookup
|
||||
var roots []*SpanForTraceDetails
|
||||
var roots []*model.SpanForTraceDetails
|
||||
spans := *spansPtr
|
||||
mapOfSpans := make(map[string]*SpanForTraceDetails, len(spans))
|
||||
mapOfSpans := make(map[string]*model.SpanForTraceDetails, len(spans))
|
||||
|
||||
for _, span := range spans {
|
||||
if span.ParentID == "" {
|
||||
@@ -205,8 +206,8 @@ func buildSpanTrees(spansPtr *[]*SpanForTraceDetails) ([]*SpanForTraceDetails, e
|
||||
}
|
||||
|
||||
// breadthFirstSearch performs a breadth-first search on the span tree to find the target span.
|
||||
func breadthFirstSearch(spansPtr *SpanForTraceDetails, targetId string) (*SpanForTraceDetails, error) {
|
||||
queue := []*SpanForTraceDetails{spansPtr}
|
||||
func breadthFirstSearch(spansPtr *model.SpanForTraceDetails, targetId string) (*model.SpanForTraceDetails, error) {
|
||||
queue := []*model.SpanForTraceDetails{spansPtr}
|
||||
visited := make(map[string]bool)
|
||||
|
||||
for len(queue) > 0 {
|
||||
@@ -18,14 +18,13 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/web"
|
||||
@@ -44,11 +43,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
pqle "github.com/SigNoz/signoz/pkg/query-service/pqlEngine"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
@@ -115,6 +116,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := preferences.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -132,22 +137,27 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
// set license manager as feature flag provider in dao
|
||||
modelDao.SetFlagProvider(lm)
|
||||
readerReady := make(chan bool)
|
||||
|
||||
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
var reader interfaces.DataConnector
|
||||
qb := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
serverOptions.SigNoz.TelemetryStore.ClickHouseDB(),
|
||||
serverOptions.PromConfigPath,
|
||||
lm,
|
||||
serverOptions.Cluster,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
fluxIntervalForTraceDetail,
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
go qb.Start(readerReady)
|
||||
reader = qb
|
||||
|
||||
skipConfig := &basemodel.SkipConfig{}
|
||||
if serverOptions.SkipTopLvlOpsPath != "" {
|
||||
@@ -166,18 +176,19 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
<-readerReady
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.PromConfigPath,
|
||||
serverOptions.RuleRepoURL,
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
lm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -222,7 +233,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// start the usagemanager
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.Config.TelemetryStore.Clickhouse.DSN)
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickHouseDB(), serverOptions.Config.TelemetryStore.ClickHouse.DSN)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -293,7 +304,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
&opAmpModel.AllAgents, agentConfMgr,
|
||||
)
|
||||
|
||||
errorList := reader.PreloadMetricsMetadata(context.Background())
|
||||
errorList := qb.PreloadMetricsMetadata(context.Background())
|
||||
for _, er := range errorList {
|
||||
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
|
||||
}
|
||||
@@ -367,7 +378,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
apiHandler.RegisterIntegrationRoutes(r, am)
|
||||
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
|
||||
apiHandler.RegisterFieldsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
@@ -429,11 +439,11 @@ func (s *Server) initListeners() error {
|
||||
}
|
||||
|
||||
// Start listening on http and private http port concurrently
|
||||
func (s *Server) Start(ctx context.Context) error {
|
||||
func (s *Server) Start() error {
|
||||
|
||||
// initiate rule manager first
|
||||
if !s.serverOptions.DisableRules {
|
||||
s.ruleManager.Start(ctx)
|
||||
s.ruleManager.Start()
|
||||
} else {
|
||||
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
}
|
||||
@@ -517,7 +527,7 @@ func (s *Server) Stop() error {
|
||||
s.opampServer.Stop()
|
||||
|
||||
if s.ruleManager != nil {
|
||||
s.ruleManager.Stop(context.Background())
|
||||
s.ruleManager.Stop()
|
||||
}
|
||||
|
||||
// stop usage manager
|
||||
@@ -527,27 +537,33 @@ func (s *Server) Stop() error {
|
||||
}
|
||||
|
||||
func makeRulesManager(
|
||||
promConfigPath,
|
||||
ruleRepoURL string,
|
||||
db *sqlx.DB,
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm baseint.FeatureLookup,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
) (*baserules.Manager, error) {
|
||||
// create engine
|
||||
pqle, err := pqle.FromConfigPath(promConfigPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create pql engine : %v", err)
|
||||
}
|
||||
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
TelemetryStore: telemetryStore,
|
||||
Prometheus: prometheus,
|
||||
PqlEngine: pqle,
|
||||
RepoURL: ruleRepoURL,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
FeatureFlags: fm,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
|
||||
@@ -4,12 +4,13 @@ import (
|
||||
"context"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -35,10 +36,11 @@ type ModelDao interface {
|
||||
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
|
||||
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
|
||||
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
|
||||
CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError)
|
||||
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
@@ -43,7 +44,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
|
||||
}
|
||||
|
||||
user := &types.User{
|
||||
ID: uuid.New().String(),
|
||||
ID: uuid.NewString(),
|
||||
Name: "",
|
||||
Email: email,
|
||||
Password: hash,
|
||||
@@ -161,7 +162,12 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
// find domain from email
|
||||
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
|
||||
var emailDomain string
|
||||
emailComponents := strings.Split(email, "@")
|
||||
if len(emailComponents) > 0 {
|
||||
emailDomain = emailComponents[1]
|
||||
}
|
||||
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
|
||||
return resp, apierr
|
||||
}
|
||||
|
||||
|
||||
@@ -6,53 +6,45 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError) {
|
||||
p.StorablePersonalAccessToken.OrgID = orgID
|
||||
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
|
||||
_, err := m.DB().NewInsert().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Returning("id").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, p.UserID)
|
||||
if createdByUser == nil {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
p.CreatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
p.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError {
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Column("role", "name", "updated_at", "updated_by_user_id").
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Exec(ctx)
|
||||
@@ -63,7 +55,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.Gettable
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
@@ -76,51 +68,41 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
|
||||
}
|
||||
|
||||
patsWithUsers := []types.GettablePAT{}
|
||||
patsWithUsers := []model.PAT{}
|
||||
for i := range pats {
|
||||
patWithUser := types.GettablePAT{
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[i],
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
|
||||
if createdByUser == nil {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
|
||||
if updatedByUser == nil {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: updatedByUser.CreatedAt,
|
||||
UpdatedAt: updatedByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
Id: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
CreatedAt: updatedByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,14 +111,14 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
|
||||
return patsWithUsers, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
|
||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError {
|
||||
updatedAt := time.Now().Unix()
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&types.StorablePersonalAccessToken{}).
|
||||
Set("revoked = ?", true).
|
||||
Set("updated_by_user_id = ?", userID).
|
||||
Set("updated_at = ?", updatedAt).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
@@ -146,7 +128,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID,
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
@@ -164,19 +146,19 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Scan(ctx); err != nil {
|
||||
@@ -190,9 +172,33 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID)
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
// deprecated
|
||||
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*types.GettableUser, basemodel.BaseApiError) {
|
||||
users := []types.GettableUser{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&users).
|
||||
Column("u.id", "u.name", "u.email", "u.password", "u.created_at", "u.profile_picture_url", "u.org_id", "u.group_id").
|
||||
Join("JOIN personal_access_tokens p ON u.id = p.user_id").
|
||||
Where("p.token = ?", token).
|
||||
Where("p.expires_at >= strftime('%s', 'now')").
|
||||
Where("p.org_id = ?", orgID).
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
|
||||
}
|
||||
|
||||
if len(users) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
|
||||
}
|
||||
}
|
||||
return &users[0], nil
|
||||
}
|
||||
|
||||
@@ -7,5 +7,6 @@ import (
|
||||
// Connector defines methods for interaction
|
||||
// with o11y data. for example - clickhouse
|
||||
type DataConnector interface {
|
||||
Start(readerReady chan bool)
|
||||
baseint.Reader
|
||||
}
|
||||
|
||||
@@ -18,6 +18,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
|
||||
prommodel "github.com/prometheus/common/model"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
)
|
||||
@@ -30,6 +32,10 @@ func initZapLog() *zap.Logger {
|
||||
return logger
|
||||
}
|
||||
|
||||
func init() {
|
||||
prommodel.NameValidationScheme = prommodel.UTF8Validation
|
||||
}
|
||||
|
||||
func main() {
|
||||
var promConfigPath, skipTopLvlOpsPath string
|
||||
|
||||
@@ -83,7 +89,6 @@ func main() {
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
Config: promConfigPath,
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create config", zap.Error(err))
|
||||
@@ -105,7 +110,7 @@ func main() {
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
zap.L().Fatal("Failed to create signoz struct", zap.Error(err))
|
||||
}
|
||||
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
@@ -143,7 +148,7 @@ func main() {
|
||||
zap.L().Fatal("Failed to create server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := server.Start(context.Background()); err != nil {
|
||||
if err := server.Start(); err != nil {
|
||||
zap.L().Fatal("Could not start server", zap.Error(err))
|
||||
}
|
||||
|
||||
|
||||
@@ -157,6 +157,8 @@ func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
|
||||
}
|
||||
|
||||
switch planName {
|
||||
case PlanNameTeams:
|
||||
features = append(features, ProPlan...)
|
||||
case PlanNameEnterprise:
|
||||
features = append(features, EnterprisePlan...)
|
||||
case PlanNameBasic:
|
||||
|
||||
@@ -74,21 +74,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Parse the entire license properly",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
PlanName: PlanNameTeams,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "ACTIVE",
|
||||
@@ -98,14 +98,14 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Fallback to basic plan if license status is invalid",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "INVALID",
|
||||
@@ -122,21 +122,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "fallback states for validFrom and validUntil",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"valid_from": 1234.456,
|
||||
"valid_until": 5678.567,
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
PlanName: PlanNameTeams,
|
||||
ValidFrom: 1234,
|
||||
ValidUntil: 5678,
|
||||
Status: "ACTIVE",
|
||||
|
||||
@@ -1,7 +1,25 @@
|
||||
package model
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types"
|
||||
|
||||
type User struct {
|
||||
Id string `json:"id" db:"id"`
|
||||
Name string `json:"name" db:"name"`
|
||||
Email string `json:"email" db:"email"`
|
||||
CreatedAt int64 `json:"createdAt" db:"created_at"`
|
||||
ProfilePictureURL string `json:"profilePictureURL" db:"profile_picture_url"`
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
type CreatePATRequestBody struct {
|
||||
Name string `json:"name"`
|
||||
Role string `json:"role"`
|
||||
ExpiresInDays int64 `json:"expiresInDays"`
|
||||
}
|
||||
|
||||
type PAT struct {
|
||||
CreatedByUser User `json:"createdByUser"`
|
||||
UpdatedByUser User `json:"updatedByUser"`
|
||||
|
||||
types.StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
@@ -1,26 +1,30 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
const SSO = "SSO"
|
||||
const Basic = "BASIC_PLAN"
|
||||
const Pro = "PRO_PLAN"
|
||||
const Enterprise = "ENTERPRISE_PLAN"
|
||||
|
||||
var (
|
||||
PlanNameEnterprise = "ENTERPRISE"
|
||||
PlanNameTeams = "TEAMS"
|
||||
PlanNameBasic = "BASIC"
|
||||
)
|
||||
|
||||
var (
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameTeams: Pro, PlanNameEnterprise: Enterprise}
|
||||
)
|
||||
|
||||
var (
|
||||
LicenseStatusInvalid = "INVALID"
|
||||
)
|
||||
|
||||
const DisableUpsell = "DISABLE_UPSELL"
|
||||
const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
const Gateway = "GATEWAY"
|
||||
@@ -34,6 +38,90 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: DisableUpsell,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -63,12 +151,134 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var ProPlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
Name: SSO,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Gateway,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: PremiumSupport,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var EnterprisePlan = basemodel.FeatureSet{
|
||||
@@ -79,6 +289,83 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -122,8 +409,8 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package smart
|
||||
package model
|
||||
|
||||
type SpanForTraceDetails struct {
|
||||
TimeUnixNano uint64 `json:"timestamp"`
|
||||
@@ -15,3 +15,8 @@ type SpanForTraceDetails struct {
|
||||
HasError bool `json:"hasError"`
|
||||
Children []*SpanForTraceDetails `json:"children"`
|
||||
}
|
||||
|
||||
type GetSpansSubQueryDBResponse struct {
|
||||
SpanID string `ch:"spanID"`
|
||||
TraceID string `ch:"traceID"`
|
||||
}
|
||||
@@ -15,7 +15,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -53,7 +52,8 @@ type AnomalyRule struct {
|
||||
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
p *ruletypes.PostableRule,
|
||||
p *baserules.PostableRule,
|
||||
featureFlags interfaces.FeatureLookup,
|
||||
reader interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
opts ...baserules.RuleOption,
|
||||
@@ -61,7 +61,7 @@ func NewAnomalyRule(
|
||||
|
||||
zap.L().Info("creating new AnomalyRule", zap.String("id", id), zap.Any("opts", opts))
|
||||
|
||||
if p.RuleCondition.CompareOp == ruletypes.ValueIsBelow {
|
||||
if p.RuleCondition.CompareOp == baserules.ValueIsBelow {
|
||||
target := -1 * *p.RuleCondition.Target
|
||||
p.RuleCondition.Target = &target
|
||||
}
|
||||
@@ -89,9 +89,10 @@ func NewAnomalyRule(
|
||||
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FeatureLookup: featureFlags,
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
@@ -101,24 +102,27 @@ func NewAnomalyRule(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
|
||||
)
|
||||
}
|
||||
return &t, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Type() ruletypes.RuleType {
|
||||
func (r *AnomalyRule) Type() baserules.RuleType {
|
||||
return RuleTypeAnomaly
|
||||
}
|
||||
|
||||
@@ -158,7 +162,7 @@ func (r *AnomalyRule) GetSelectedQuery() string {
|
||||
return r.Condition().GetSelectedQueryName()
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (baserules.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
@@ -185,7 +189,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (rulet
|
||||
}
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
var resultVector baserules.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
zap.L().Info("anomaly scores", zap.String("scores", string(scoresJSON)))
|
||||
@@ -214,7 +218,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
|
||||
var alerts = make(map[uint64]*baserules.Alert, len(res))
|
||||
|
||||
for _, smpl := range res {
|
||||
l := make(map[string]string, len(smpl.Metric))
|
||||
@@ -226,7 +230,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
threshold := valueFormatter.Format(r.TargetVal(), r.Unit())
|
||||
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
|
||||
|
||||
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
|
||||
tmplData := baserules.AlertTemplateData(l, value, threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
// who are not used to Go's templating system.
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
@@ -234,7 +238,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
// utility function to apply go template on labels and annotations
|
||||
expand := func(text string) string {
|
||||
|
||||
tmpl := ruletypes.NewTemplateExpander(
|
||||
tmpl := baserules.NewTemplateExpander(
|
||||
ctx,
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
@@ -279,7 +283,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
alerts[h] = &baserules.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
Annotations: annotations,
|
||||
@@ -320,7 +324,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
// retention time so it is reported as resolved to the AlertManager.
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > baserules.ResolvedRetention) {
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != model.StateInactive {
|
||||
@@ -376,10 +380,10 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
|
||||
func (r *AnomalyRule) String() string {
|
||||
|
||||
ar := ruletypes.PostableRule{
|
||||
ar := baserules.PostableRule{
|
||||
AlertName: r.Name(),
|
||||
RuleCondition: r.Condition(),
|
||||
EvalWindow: ruletypes.Duration(r.EvalWindow()),
|
||||
EvalWindow: baserules.Duration(r.EvalWindow()),
|
||||
Labels: r.Labels().Map(),
|
||||
Annotations: r.Annotations().Map(),
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@@ -19,11 +18,12 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
var task baserules.Task
|
||||
|
||||
ruleId := baserules.RuleIdFromTaskName(opts.TaskName)
|
||||
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
if opts.Rule.RuleType == baserules.RuleTypeThreshold {
|
||||
// create a threshold rule
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
@@ -38,9 +38,9 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
} else if opts.Rule.RuleType == baserules.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
pr, err := baserules.NewPromRule(
|
||||
@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Rule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
|
||||
@@ -59,13 +59,14 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evalution
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
} else if opts.Rule.RuleType == baserules.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
@@ -78,10 +79,10 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, baserules.RuleTypeProm, baserules.RuleTypeThreshold)
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@@ -106,12 +107,12 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
}
|
||||
|
||||
// append name to indicate this is test alert
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, ruletypes.TestAlertPostFix)
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, baserules.TestAlertPostFix)
|
||||
|
||||
var rule baserules.Rule
|
||||
var err error
|
||||
|
||||
if parsedRule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
if parsedRule.RuleType == baserules.RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
@@ -122,6 +123,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
rule, err = baserules.NewThresholdRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
@@ -135,7 +137,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeProm {
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
rule, err = baserules.NewPromRule(
|
||||
@@ -143,7 +145,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
@@ -153,11 +155,12 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
rule, err = NewAnomalyRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithSendAlways(),
|
||||
@@ -191,9 +194,9 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID string) baserules.Task {
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, ruleDB baserules.RuleDB) baserules.Task {
|
||||
if taskType == baserules.TaskTypeCh {
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
|
||||
}
|
||||
return baserules.NewPromRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
return baserules.NewPromRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
|
||||
}
|
||||
|
||||
@@ -2,32 +2,11 @@ package postgressqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
Identity = "id"
|
||||
Integer = "bigint"
|
||||
Text = "text"
|
||||
)
|
||||
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
CloudIntegration = "cloud_integration"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
)
|
||||
|
||||
type dialect struct {
|
||||
}
|
||||
|
||||
@@ -195,10 +174,7 @@ func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table inte
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, references []string, cb func(context.Context) error) error {
|
||||
if len(references) == 0 {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -207,83 +183,10 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
return nil
|
||||
}
|
||||
|
||||
var fkReferences []string
|
||||
for _, reference := range references {
|
||||
if reference == Org && !slices.Contains(fkReferences, OrgReference) {
|
||||
fkReferences = append(fkReferences, OrgReference)
|
||||
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||
fkReferences = append(fkReferences, UserReference)
|
||||
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||
}
|
||||
}
|
||||
|
||||
createTable := bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel)
|
||||
|
||||
for _, fk := range fkReferences {
|
||||
createTable = createTable.ForeignKey(fk)
|
||||
}
|
||||
|
||||
_, err = createTable.Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddNotNullDefaultToColumn(ctx context.Context, bun bun.IDB, table string, column, columnType, defaultValue string) error {
|
||||
query := fmt.Sprintf("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT %s, ALTER COLUMN %s SET NOT NULL", table, column, defaultValue, column)
|
||||
if _, err := bun.ExecContext(ctx, query); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if columnType == Text {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
@@ -304,67 +207,5 @@ func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldMo
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
identityExists, err := dialect.ColumnExists(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if identityExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type GettablePAT struct {
|
||||
CreatedByUser PatUser `json:"createdByUser"`
|
||||
UpdatedByUser PatUser `json:"updatedByUser"`
|
||||
|
||||
StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
type PatUser struct {
|
||||
types.User
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
|
||||
return GettablePAT{
|
||||
StorablePersonalAccessToken: NewStorablePersonalAccessToken(name, role, userID, expiresAt),
|
||||
}
|
||||
}
|
||||
|
||||
type StorablePersonalAccessToken struct {
|
||||
bun.BaseModel `bun:"table:personal_access_token"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
|
||||
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
|
||||
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
|
||||
Token string `json:"token" bun:"token,type:text,notnull,unique"`
|
||||
Name string `json:"name" bun:"name,type:text,notnull"`
|
||||
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
|
||||
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
|
||||
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
|
||||
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
|
||||
}
|
||||
|
||||
func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64) StorablePersonalAccessToken {
|
||||
now := time.Now()
|
||||
if expiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
expiresAt = now.Unix() + (expiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
|
||||
return StorablePersonalAccessToken{
|
||||
Token: encodedToken,
|
||||
Name: name,
|
||||
Role: role,
|
||||
UserID: userID,
|
||||
ExpiresAt: expiresAt,
|
||||
LastUsed: 0,
|
||||
Revoked: false,
|
||||
UpdatedByUserID: "",
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
},
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,7 @@ module.exports = {
|
||||
'plugin:import/errors',
|
||||
'plugin:import/warnings',
|
||||
'plugin:react/jsx-runtime',
|
||||
// 'plugin:tailwindcss/recommended',
|
||||
],
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
|
||||
@@ -79,6 +79,7 @@
|
||||
"eventemitter3": "5.0.1",
|
||||
"file-loader": "6.1.1",
|
||||
"fontfaceobserver": "2.3.0",
|
||||
"history": "4.10.1",
|
||||
"html-webpack-plugin": "5.5.0",
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"i18next": "^21.6.12",
|
||||
@@ -114,7 +115,8 @@
|
||||
"react-markdown": "8.0.7",
|
||||
"react-query": "3.39.3",
|
||||
"react-redux": "^7.2.2",
|
||||
"react-router": "7.5.1",
|
||||
"react-router-dom": "^5.2.0",
|
||||
"react-router-dom-v5-compat": "6.27.0",
|
||||
"react-syntax-highlighter": "15.5.0",
|
||||
"react-use": "^17.3.2",
|
||||
"react-virtuoso": "4.0.3",
|
||||
@@ -184,7 +186,7 @@
|
||||
"@types/react-lottie": "1.2.10",
|
||||
"@types/react-redux": "^7.1.11",
|
||||
"@types/react-resizable": "3.0.3",
|
||||
"@types/react-router": "^5.1.20",
|
||||
"@types/react-router-dom": "^5.1.6",
|
||||
"@types/react-syntax-highlighter": "15.5.7",
|
||||
"@types/redux-mock-store": "1.0.4",
|
||||
"@types/styled-components": "^5.1.4",
|
||||
@@ -222,6 +224,7 @@
|
||||
"npm-run-all": "latest",
|
||||
"portfinder-sync": "^0.0.2",
|
||||
"postcss": "8.4.38",
|
||||
"postcss-loader": "8.1.1",
|
||||
"prettier": "2.2.1",
|
||||
"prop-types": "15.8.1",
|
||||
"raw-loader": "4.0.2",
|
||||
@@ -231,6 +234,7 @@
|
||||
"redux-mock-store": "1.5.4",
|
||||
"sass": "1.66.1",
|
||||
"sass-loader": "13.3.2",
|
||||
"tailwindcss": "3.4.17",
|
||||
"ts-jest": "^27.1.5",
|
||||
"ts-node": "^10.2.1",
|
||||
"typescript-plugin-css-modules": "5.0.1",
|
||||
|
||||
6
frontend/postcss.config.js
Normal file
6
frontend/postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
@@ -18,13 +18,6 @@
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
|
||||
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
|
||||
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
|
||||
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
|
||||
"tooltip_email_to": "Enter email addresses separated by commas.",
|
||||
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
|
||||
|
||||
"field_slack_recipient": "Recipient",
|
||||
"field_slack_title": "Title",
|
||||
"field_slack_description": "Description",
|
||||
|
||||
@@ -18,12 +18,6 @@
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
|
||||
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
|
||||
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
|
||||
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
|
||||
"tooltip_email_to": "Enter email addresses separated by commas.",
|
||||
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
|
||||
"field_slack_recipient": "Recipient",
|
||||
"field_slack_title": "Title",
|
||||
"field_slack_description": "Description",
|
||||
|
||||
@@ -68,6 +68,5 @@
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
||||
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | API Monitoring"
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer"
|
||||
}
|
||||
|
||||
@@ -5,17 +5,15 @@ import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { useGlobalEventListener } from 'hooks/useGlobalEventListener';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import history from 'lib/history';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { matchPath, useLocation } from 'react-router';
|
||||
import { matchPath, useLocation } from 'react-router-dom';
|
||||
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
|
||||
import { Organization } from 'types/api/user/getOrganization';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { safeNavigateNoSameURLMemo } from 'utils/navigate';
|
||||
import { routePermission } from 'utils/permission';
|
||||
|
||||
import routes, {
|
||||
@@ -28,10 +26,6 @@ import routes, {
|
||||
|
||||
function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
const location = useLocation();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { safeNavigate: unsafeNavigate } = useSafeNavigate({
|
||||
preventSameUrlNavigation: false,
|
||||
});
|
||||
const { pathname } = location;
|
||||
const {
|
||||
org,
|
||||
@@ -50,13 +44,9 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
() =>
|
||||
new Map(
|
||||
[...routes, LIST_LICENSES, SUPPORT_ROUTE].map((e) => {
|
||||
const currentPath = matchPath(
|
||||
{
|
||||
// Temp: Hard type cast
|
||||
path: e.path as string,
|
||||
},
|
||||
pathname,
|
||||
);
|
||||
const currentPath = matchPath(pathname, {
|
||||
path: e.path,
|
||||
});
|
||||
return [currentPath === null ? null : 'current', e];
|
||||
}),
|
||||
),
|
||||
@@ -111,7 +101,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
// if the current route is allowed to be overriden by org onboarding then only do the same
|
||||
!ROUTES_NOT_TO_BE_OVERRIDEN.includes(pathname)
|
||||
) {
|
||||
safeNavigateNoSameURLMemo(ROUTES.ONBOARDING);
|
||||
history.push(ROUTES.ONBOARDING);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
@@ -124,37 +114,31 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
pathname,
|
||||
]);
|
||||
|
||||
const safeNavigateToWorkSpaceBlocked = useCallback(
|
||||
(route: any): void => {
|
||||
const { path } = route;
|
||||
const navigateToWorkSpaceBlocked = (route: any): void => {
|
||||
const { path } = route;
|
||||
|
||||
const isRouteEnabledForWorkspaceBlockedState =
|
||||
isAdmin &&
|
||||
(path === ROUTES.ORG_SETTINGS ||
|
||||
path === ROUTES.BILLING ||
|
||||
path === ROUTES.MY_SETTINGS);
|
||||
const isRouteEnabledForWorkspaceBlockedState =
|
||||
isAdmin &&
|
||||
(path === ROUTES.ORG_SETTINGS ||
|
||||
path === ROUTES.BILLING ||
|
||||
path === ROUTES.MY_SETTINGS);
|
||||
|
||||
if (
|
||||
path &&
|
||||
path !== ROUTES.WORKSPACE_LOCKED &&
|
||||
!isRouteEnabledForWorkspaceBlockedState
|
||||
) {
|
||||
safeNavigateNoSameURLMemo(ROUTES.WORKSPACE_LOCKED);
|
||||
}
|
||||
},
|
||||
[isAdmin],
|
||||
);
|
||||
if (
|
||||
path &&
|
||||
path !== ROUTES.WORKSPACE_LOCKED &&
|
||||
!isRouteEnabledForWorkspaceBlockedState
|
||||
) {
|
||||
history.push(ROUTES.WORKSPACE_LOCKED);
|
||||
}
|
||||
};
|
||||
|
||||
const safeNavigateToWorkSpaceAccessRestricted = useCallback(
|
||||
(route: any): void => {
|
||||
const { path } = route;
|
||||
const navigateToWorkSpaceAccessRestricted = (route: any): void => {
|
||||
const { path } = route;
|
||||
|
||||
if (path && path !== ROUTES.WORKSPACE_ACCESS_RESTRICTED) {
|
||||
safeNavigateNoSameURLMemo(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
}
|
||||
},
|
||||
[],
|
||||
);
|
||||
if (path && path !== ROUTES.WORKSPACE_ACCESS_RESTRICTED) {
|
||||
history.push(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
|
||||
@@ -173,16 +157,10 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
platform === LicensePlatform.CLOUD &&
|
||||
currentRoute
|
||||
) {
|
||||
safeNavigateToWorkSpaceAccessRestricted(currentRoute);
|
||||
navigateToWorkSpaceAccessRestricted(currentRoute);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
isFetchingActiveLicenseV3,
|
||||
activeLicenseV3,
|
||||
mapRoutes,
|
||||
pathname,
|
||||
safeNavigateToWorkSpaceAccessRestricted,
|
||||
]);
|
||||
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3) {
|
||||
@@ -194,7 +172,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
currentRoute &&
|
||||
activeLicenseV3?.platform === LicensePlatform.CLOUD
|
||||
) {
|
||||
safeNavigateToWorkSpaceBlocked(currentRoute);
|
||||
navigateToWorkSpaceBlocked(currentRoute);
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
@@ -204,16 +182,15 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
activeLicenseV3?.platform,
|
||||
mapRoutes,
|
||||
pathname,
|
||||
safeNavigateToWorkSpaceBlocked,
|
||||
]);
|
||||
|
||||
const safeNavigateToWorkSpaceSuspended = useCallback((route: any): void => {
|
||||
const navigateToWorkSpaceSuspended = (route: any): void => {
|
||||
const { path } = route;
|
||||
|
||||
if (path && path !== ROUTES.WORKSPACE_SUSPENDED) {
|
||||
safeNavigateNoSameURLMemo(ROUTES.WORKSPACE_SUSPENDED);
|
||||
history.push(ROUTES.WORKSPACE_SUSPENDED);
|
||||
}
|
||||
}, []);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
|
||||
@@ -226,16 +203,10 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
currentRoute &&
|
||||
activeLicenseV3.platform === LicensePlatform.CLOUD
|
||||
) {
|
||||
safeNavigateToWorkSpaceSuspended(currentRoute);
|
||||
navigateToWorkSpaceSuspended(currentRoute);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
isFetchingActiveLicenseV3,
|
||||
activeLicenseV3,
|
||||
mapRoutes,
|
||||
pathname,
|
||||
safeNavigateToWorkSpaceSuspended,
|
||||
]);
|
||||
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
if (org && org.length > 0 && org[0].id !== undefined) {
|
||||
@@ -249,13 +220,13 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
currentRoute?.path === ROUTES.GET_STARTED &&
|
||||
featureFlags?.find((e) => e.name === FeatureKeys.ONBOARDING_V3)?.active
|
||||
) {
|
||||
safeNavigateNoSameURLMemo(ROUTES.GET_STARTED_WITH_CLOUD);
|
||||
history.push(ROUTES.GET_STARTED_WITH_CLOUD);
|
||||
}
|
||||
}, [currentRoute, featureFlags]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
// if it is an old route safeNavigate to the new route
|
||||
// if it is an old route navigate to the new route
|
||||
if (isOldRoute) {
|
||||
const redirectUrl = oldNewRoutesMapping[pathname];
|
||||
|
||||
@@ -263,8 +234,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
...location,
|
||||
pathname: redirectUrl,
|
||||
};
|
||||
|
||||
safeNavigateNoSameURLMemo(newLocation, { replace: true });
|
||||
history.replace(newLocation);
|
||||
return;
|
||||
}
|
||||
// if the current route
|
||||
@@ -274,21 +244,21 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
if (isLoggedInState) {
|
||||
const route = routePermission[key];
|
||||
if (route && route.find((e) => e === user.role) === undefined) {
|
||||
safeNavigateNoSameURLMemo(ROUTES.UN_AUTHORIZED);
|
||||
history.push(ROUTES.UN_AUTHORIZED);
|
||||
}
|
||||
} else {
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
|
||||
safeNavigateNoSameURLMemo(ROUTES.LOGIN);
|
||||
history.push(ROUTES.LOGIN);
|
||||
}
|
||||
} else if (isLoggedInState) {
|
||||
const fromPathname = getLocalStorageApi(
|
||||
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
|
||||
);
|
||||
if (fromPathname) {
|
||||
safeNavigateNoSameURLMemo(fromPathname);
|
||||
history.push(fromPathname);
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
|
||||
} else if (pathname !== ROUTES.SOMETHING_WENT_WRONG) {
|
||||
safeNavigateNoSameURLMemo(ROUTES.HOME);
|
||||
history.push(ROUTES.HOME);
|
||||
}
|
||||
} else {
|
||||
// do nothing as the unauthenticated routes are LOGIN and SIGNUP and the LOGIN container takes care of routing to signup if
|
||||
@@ -299,45 +269,17 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
|
||||
);
|
||||
if (fromPathname) {
|
||||
safeNavigateNoSameURLMemo(fromPathname);
|
||||
history.push(fromPathname);
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
|
||||
} else {
|
||||
safeNavigateNoSameURLMemo(ROUTES.HOME);
|
||||
history.push(ROUTES.HOME);
|
||||
}
|
||||
} else {
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
|
||||
safeNavigateNoSameURLMemo(ROUTES.LOGIN);
|
||||
history.push(ROUTES.LOGIN);
|
||||
}
|
||||
}, [isLoggedInState, pathname, user, isOldRoute, currentRoute, location]);
|
||||
|
||||
// global event listener for NAVIGATE event
|
||||
// This will provide access to useNavigation hook from outside of components
|
||||
useGlobalEventListener(
|
||||
'SAFE_NAVIGATE',
|
||||
(event: CustomEvent) => {
|
||||
const { to, options } = event.detail;
|
||||
if (to) {
|
||||
safeNavigate(to, options);
|
||||
}
|
||||
},
|
||||
{
|
||||
passive: true,
|
||||
},
|
||||
);
|
||||
|
||||
useGlobalEventListener(
|
||||
'UNSAFE_NAVIGATE',
|
||||
(event: CustomEvent) => {
|
||||
const { to, options } = event.detail;
|
||||
if (to) {
|
||||
unsafeNavigate(to, options);
|
||||
}
|
||||
},
|
||||
{
|
||||
passive: true,
|
||||
},
|
||||
);
|
||||
|
||||
// NOTE: disabling this rule as there is no need to have div
|
||||
// eslint-disable-next-line react/jsx-no-useless-fragment
|
||||
return <>{children}</>;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { ConfigProvider } from 'antd';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
@@ -15,7 +14,7 @@ import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import history from 'lib/history';
|
||||
import posthog from 'posthog-js';
|
||||
import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
@@ -23,11 +22,10 @@ import { IUser } from 'providers/App/types';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { BrowserRouter, Route, Routes } from 'react-router';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { extractDomain } from 'utils/app';
|
||||
import { safeNavigateNoSameURLMemo } from 'utils/navigate';
|
||||
|
||||
import { Home } from './pageComponents';
|
||||
import PrivateRoute from './Private';
|
||||
import defaultRoutes, {
|
||||
AppRoutes,
|
||||
@@ -47,6 +45,7 @@ function App(): JSX.Element {
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicenseV3,
|
||||
userFetchError,
|
||||
licensesFetchError,
|
||||
featureFlagsFetchError,
|
||||
isLoggedIn: isLoggedInState,
|
||||
featureFlags,
|
||||
@@ -56,7 +55,10 @@ function App(): JSX.Element {
|
||||
|
||||
const { hostname, pathname } = window.location;
|
||||
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
const {
|
||||
isCloudUser: isCloudUserVal,
|
||||
isEECloudUser: isEECloudUserVal,
|
||||
} = useGetTenantLicense();
|
||||
|
||||
const enableAnalytics = useCallback(
|
||||
(user: IUser): void => {
|
||||
@@ -166,7 +168,7 @@ function App(): JSX.Element {
|
||||
|
||||
let updatedRoutes = defaultRoutes;
|
||||
// if the user is a cloud user
|
||||
if (isCloudUser || isEnterpriseSelfHostedUser) {
|
||||
if (isCloudUserVal || isEECloudUserVal) {
|
||||
// if the user is on basic plan then remove billing
|
||||
if (isOnBasicPlan) {
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
@@ -188,10 +190,10 @@ function App(): JSX.Element {
|
||||
isLoggedInState,
|
||||
user,
|
||||
licenses,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isCloudUserVal,
|
||||
isFetchingLicenses,
|
||||
isFetchingUser,
|
||||
isEECloudUserVal,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -206,7 +208,6 @@ function App(): JSX.Element {
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
// feature flag shouldn't be loading and featureFlags or fetchError any one of this should be true indicating that req is complete
|
||||
// licenses should also be present. there is no check for licenses for loading and error as that is mandatory if not present then routing
|
||||
@@ -232,12 +233,7 @@ function App(): JSX.Element {
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled && !trialInfo?.trialConvertedToSubscription;
|
||||
|
||||
if (
|
||||
isLoggedInState &&
|
||||
isChatSupportEnabled &&
|
||||
!showAddCreditCardModal &&
|
||||
(isCloudUser || isEnterpriseSelfHostedUser)
|
||||
) {
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
@@ -256,53 +252,13 @@ function App(): JSX.Element {
|
||||
licenses,
|
||||
activeLicenseV3,
|
||||
trialInfo,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingUser && isCloudUser && user && user.email) {
|
||||
if (!isFetchingUser && isCloudUserVal && user && user.email) {
|
||||
enableAnalytics(user);
|
||||
}
|
||||
}, [user, isFetchingUser, isCloudUser, enableAnalytics]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isCloudUser || isEnterpriseSelfHostedUser) {
|
||||
if (process.env.POSTHOG_KEY) {
|
||||
posthog.init(process.env.POSTHOG_KEY, {
|
||||
api_host: 'https://us.i.posthog.com',
|
||||
person_profiles: 'identified_only', // or 'always' to create profiles for anonymous users as well
|
||||
});
|
||||
}
|
||||
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
environment: 'production',
|
||||
integrations: [
|
||||
Sentry.browserTracingIntegration(),
|
||||
Sentry.replayIntegration({
|
||||
maskAllText: false,
|
||||
blockAllMedia: false,
|
||||
}),
|
||||
],
|
||||
// Performance Monitoring
|
||||
tracesSampleRate: 1.0, // Capture 100% of the transactions
|
||||
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
|
||||
tracePropagationTargets: [],
|
||||
// Session Replay
|
||||
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
||||
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
||||
});
|
||||
} else {
|
||||
posthog.reset();
|
||||
Sentry.close();
|
||||
|
||||
if (window.cioanalytics && typeof window.cioanalytics.reset === 'function') {
|
||||
window.cioanalytics.reset();
|
||||
}
|
||||
}
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
}, [user, isFetchingUser, isCloudUserVal, enableAnalytics]);
|
||||
|
||||
// if the user is in logged in state
|
||||
if (isLoggedInState) {
|
||||
@@ -314,20 +270,27 @@ function App(): JSX.Element {
|
||||
// if the required calls fails then return a something went wrong error
|
||||
// this needs to be on top of data missing error because if there is an error, data will never be loaded and it will
|
||||
// move to indefinitive loading
|
||||
if (userFetchError && pathname !== ROUTES.SOMETHING_WENT_WRONG) {
|
||||
safeNavigateNoSameURLMemo(ROUTES.SOMETHING_WENT_WRONG);
|
||||
if (
|
||||
(userFetchError || licensesFetchError) &&
|
||||
pathname !== ROUTES.SOMETHING_WENT_WRONG
|
||||
) {
|
||||
history.replace(ROUTES.SOMETHING_WENT_WRONG);
|
||||
}
|
||||
|
||||
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
|
||||
if ((!licenses || !user.email || !featureFlags) && !userFetchError) {
|
||||
if (
|
||||
(!licenses || !user.email || !featureFlags) &&
|
||||
!userFetchError &&
|
||||
!licensesFetchError
|
||||
) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<BrowserRouter>
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
@@ -337,13 +300,18 @@ function App(): JSX.Element {
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Routes>
|
||||
{routes.map(({ path, element }) => (
|
||||
<Route key={`${path}`} path={path} element={element} />
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route path="/" element={<Home />} />
|
||||
<Route path="*" element={<NotFound />} />
|
||||
</Routes>
|
||||
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
@@ -353,9 +321,9 @@ function App(): JSX.Element {
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
</BrowserRouter>
|
||||
</ConfigProvider>
|
||||
</Sentry.ErrorBoundary>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import MessagingQueues from 'pages/MessagingQueues';
|
||||
import { RouteProps } from 'react-router';
|
||||
import { RouteProps } from 'react-router-dom';
|
||||
|
||||
import {
|
||||
AlertHistory,
|
||||
@@ -65,383 +65,443 @@ import {
|
||||
|
||||
const routes: AppRoutes[] = [
|
||||
{
|
||||
component: SignupPage,
|
||||
path: ROUTES.SIGN_UP,
|
||||
element: <SignupPage />,
|
||||
exact: true,
|
||||
isPrivate: false,
|
||||
key: 'SIGN_UP',
|
||||
},
|
||||
{
|
||||
path: ROUTES.GET_STARTED,
|
||||
// exact: false
|
||||
element: <Onboarding />,
|
||||
exact: false,
|
||||
component: Onboarding,
|
||||
isPrivate: true,
|
||||
key: 'GET_STARTED',
|
||||
},
|
||||
{
|
||||
path: ROUTES.GET_STARTED_WITH_CLOUD,
|
||||
// exact: false
|
||||
element: <OnboardingV2 />,
|
||||
exact: false,
|
||||
component: OnboardingV2,
|
||||
isPrivate: true,
|
||||
key: 'GET_STARTED_WITH_CLOUD',
|
||||
},
|
||||
{
|
||||
path: ROUTES.HOME,
|
||||
element: <Home />,
|
||||
exact: true,
|
||||
component: Home,
|
||||
isPrivate: true,
|
||||
key: 'HOME',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ONBOARDING,
|
||||
// exact: false
|
||||
element: <OrgOnboarding />,
|
||||
exact: false,
|
||||
component: OrgOnboarding,
|
||||
isPrivate: true,
|
||||
key: 'ONBOARDING',
|
||||
},
|
||||
{
|
||||
component: LogsIndexToFields,
|
||||
path: ROUTES.LOGS_INDEX_FIELDS,
|
||||
element: <LogsIndexToFields />,
|
||||
exact: true,
|
||||
isPrivate: true,
|
||||
key: 'LOGS_INDEX_FIELDS',
|
||||
},
|
||||
{
|
||||
component: ServicesTablePage,
|
||||
path: ROUTES.APPLICATION,
|
||||
element: <ServicesTablePage />,
|
||||
exact: true,
|
||||
isPrivate: true,
|
||||
key: 'APPLICATION',
|
||||
},
|
||||
{
|
||||
path: ROUTES.SERVICE_METRICS,
|
||||
element: <ServiceMetricsPage />,
|
||||
exact: true,
|
||||
component: ServiceMetricsPage,
|
||||
isPrivate: true,
|
||||
key: 'SERVICE_METRICS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.SERVICE_TOP_LEVEL_OPERATIONS,
|
||||
element: <ServiceTopLevelOperationsPage />,
|
||||
exact: true,
|
||||
component: ServiceTopLevelOperationsPage,
|
||||
isPrivate: true,
|
||||
key: 'SERVICE_TOP_LEVEL_OPERATIONS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.SERVICE_MAP,
|
||||
element: <ServiceMapPage />,
|
||||
component: ServiceMapPage,
|
||||
isPrivate: true,
|
||||
exact: true,
|
||||
key: 'SERVICE_MAP',
|
||||
},
|
||||
{
|
||||
path: ROUTES.LOGS_SAVE_VIEWS,
|
||||
element: <LogsSaveViews />,
|
||||
component: LogsSaveViews,
|
||||
isPrivate: true,
|
||||
exact: true,
|
||||
key: 'LOGS_SAVE_VIEWS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.TRACE_DETAIL,
|
||||
element: <TraceDetail />,
|
||||
exact: true,
|
||||
component: TraceDetail,
|
||||
isPrivate: true,
|
||||
key: 'TRACE_DETAIL',
|
||||
},
|
||||
{
|
||||
path: ROUTES.SETTINGS,
|
||||
element: <SettingsPage />,
|
||||
exact: true,
|
||||
component: SettingsPage,
|
||||
isPrivate: true,
|
||||
key: 'SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.USAGE_EXPLORER,
|
||||
element: <UsageExplorerPage />,
|
||||
exact: true,
|
||||
component: UsageExplorerPage,
|
||||
isPrivate: true,
|
||||
key: 'USAGE_EXPLORER',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ALL_DASHBOARD,
|
||||
element: <DashboardPage />,
|
||||
exact: true,
|
||||
component: DashboardPage,
|
||||
isPrivate: true,
|
||||
key: 'ALL_DASHBOARD',
|
||||
},
|
||||
{
|
||||
path: ROUTES.DASHBOARD,
|
||||
element: <NewDashboardPage />,
|
||||
exact: true,
|
||||
component: NewDashboardPage,
|
||||
isPrivate: true,
|
||||
key: 'DASHBOARD',
|
||||
},
|
||||
{
|
||||
path: ROUTES.DASHBOARD_WIDGET,
|
||||
element: <DashboardWidget />,
|
||||
exact: true,
|
||||
component: DashboardWidget,
|
||||
isPrivate: true,
|
||||
key: 'DASHBOARD_WIDGET',
|
||||
},
|
||||
{
|
||||
path: ROUTES.EDIT_ALERTS,
|
||||
element: <EditRulesPage />,
|
||||
exact: true,
|
||||
component: EditRulesPage,
|
||||
isPrivate: true,
|
||||
key: 'EDIT_ALERTS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.LIST_ALL_ALERT,
|
||||
element: <ListAllALertsPage />,
|
||||
exact: true,
|
||||
component: ListAllALertsPage,
|
||||
isPrivate: true,
|
||||
key: 'LIST_ALL_ALERT',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ALERTS_NEW,
|
||||
element: <CreateNewAlerts />,
|
||||
exact: true,
|
||||
component: CreateNewAlerts,
|
||||
isPrivate: true,
|
||||
key: 'ALERTS_NEW',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ALERT_HISTORY,
|
||||
element: <AlertHistory />,
|
||||
exact: true,
|
||||
component: AlertHistory,
|
||||
isPrivate: true,
|
||||
key: 'ALERT_HISTORY',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ALERT_OVERVIEW,
|
||||
element: <AlertOverview />,
|
||||
exact: true,
|
||||
component: AlertOverview,
|
||||
isPrivate: true,
|
||||
key: 'ALERT_OVERVIEW',
|
||||
},
|
||||
{
|
||||
path: ROUTES.TRACE,
|
||||
element: <TraceFilter />,
|
||||
exact: true,
|
||||
component: TraceFilter,
|
||||
isPrivate: true,
|
||||
key: 'TRACE',
|
||||
},
|
||||
{
|
||||
path: ROUTES.TRACES_EXPLORER,
|
||||
element: <TracesExplorer />,
|
||||
exact: true,
|
||||
component: TracesExplorer,
|
||||
isPrivate: true,
|
||||
key: 'TRACES_EXPLORER',
|
||||
},
|
||||
{
|
||||
path: ROUTES.TRACES_SAVE_VIEWS,
|
||||
element: <TracesSaveViews />,
|
||||
exact: true,
|
||||
component: TracesSaveViews,
|
||||
isPrivate: true,
|
||||
key: 'TRACES_SAVE_VIEWS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.TRACES_FUNNELS,
|
||||
element: <TracesFunnels />,
|
||||
exact: true,
|
||||
component: TracesFunnels,
|
||||
isPrivate: true,
|
||||
key: 'TRACES_FUNNELS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.TRACES_FUNNELS_DETAIL,
|
||||
element: <TracesFunnelDetails />,
|
||||
exact: true,
|
||||
component: TracesFunnelDetails,
|
||||
isPrivate: true,
|
||||
key: 'TRACES_FUNNELS_DETAIL',
|
||||
},
|
||||
{
|
||||
path: ROUTES.CHANNELS_NEW,
|
||||
element: <CreateAlertChannelAlerts />,
|
||||
exact: true,
|
||||
component: CreateAlertChannelAlerts,
|
||||
isPrivate: true,
|
||||
key: 'CHANNELS_NEW',
|
||||
},
|
||||
{
|
||||
path: ROUTES.CHANNELS_EDIT,
|
||||
element: <EditAlertChannelsAlerts />,
|
||||
exact: true,
|
||||
component: EditAlertChannelsAlerts,
|
||||
isPrivate: true,
|
||||
key: 'CHANNELS_EDIT',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ALL_CHANNELS,
|
||||
element: <AllAlertChannels />,
|
||||
exact: true,
|
||||
component: AllAlertChannels,
|
||||
isPrivate: true,
|
||||
key: 'ALL_CHANNELS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ALL_ERROR,
|
||||
element: <AllErrors />,
|
||||
exact: true,
|
||||
isPrivate: true,
|
||||
component: AllErrors,
|
||||
key: 'ALL_ERROR',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ERROR_DETAIL,
|
||||
element: <ErrorDetails />,
|
||||
exact: true,
|
||||
component: ErrorDetails,
|
||||
isPrivate: true,
|
||||
key: 'ERROR_DETAIL',
|
||||
},
|
||||
{
|
||||
path: ROUTES.VERSION,
|
||||
element: <StatusPage />,
|
||||
exact: true,
|
||||
component: StatusPage,
|
||||
isPrivate: true,
|
||||
key: 'VERSION',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ORG_SETTINGS,
|
||||
element: <OrganizationSettings />,
|
||||
exact: true,
|
||||
component: OrganizationSettings,
|
||||
isPrivate: true,
|
||||
key: 'ORG_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.INGESTION_SETTINGS,
|
||||
element: <IngestionSettings />,
|
||||
exact: true,
|
||||
component: IngestionSettings,
|
||||
isPrivate: true,
|
||||
key: 'INGESTION_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.API_KEYS,
|
||||
element: <APIKeys />,
|
||||
exact: true,
|
||||
component: APIKeys,
|
||||
isPrivate: true,
|
||||
key: 'API_KEYS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.MY_SETTINGS,
|
||||
element: <MySettings />,
|
||||
exact: true,
|
||||
component: MySettings,
|
||||
isPrivate: true,
|
||||
key: 'MY_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.CUSTOM_DOMAIN_SETTINGS,
|
||||
element: <CustomDomainSettings />,
|
||||
exact: true,
|
||||
component: CustomDomainSettings,
|
||||
isPrivate: true,
|
||||
key: 'CUSTOM_DOMAIN_SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.LOGS,
|
||||
element: <Logs />,
|
||||
exact: true,
|
||||
component: Logs,
|
||||
key: 'LOGS',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.LOGS_EXPLORER,
|
||||
element: <LogsExplorer />,
|
||||
exact: true,
|
||||
component: LogsExplorer,
|
||||
key: 'LOGS_EXPLORER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.OLD_LOGS_EXPLORER,
|
||||
element: <OldLogsExplorer />,
|
||||
exact: true,
|
||||
component: OldLogsExplorer,
|
||||
key: 'OLD_LOGS_EXPLORER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.LIVE_LOGS,
|
||||
element: <LiveLogs />,
|
||||
exact: true,
|
||||
component: LiveLogs,
|
||||
key: 'LIVE_LOGS',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.LOGS_PIPELINES,
|
||||
element: <PipelinePage />,
|
||||
exact: true,
|
||||
component: PipelinePage,
|
||||
key: 'LOGS_PIPELINES',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.LOGIN,
|
||||
element: <Login />,
|
||||
exact: true,
|
||||
component: Login,
|
||||
isPrivate: false,
|
||||
key: 'LOGIN',
|
||||
},
|
||||
{
|
||||
path: ROUTES.UN_AUTHORIZED,
|
||||
element: <UnAuthorized />,
|
||||
exact: true,
|
||||
component: UnAuthorized,
|
||||
key: 'UN_AUTHORIZED',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.PASSWORD_RESET,
|
||||
element: <PasswordReset />,
|
||||
exact: true,
|
||||
component: PasswordReset,
|
||||
key: 'PASSWORD_RESET',
|
||||
isPrivate: false,
|
||||
},
|
||||
{
|
||||
path: ROUTES.SOMETHING_WENT_WRONG,
|
||||
element: <SomethingWentWrong />,
|
||||
exact: true,
|
||||
component: SomethingWentWrong,
|
||||
key: 'SOMETHING_WENT_WRONG',
|
||||
isPrivate: false,
|
||||
},
|
||||
{
|
||||
path: ROUTES.BILLING,
|
||||
element: <BillingPage />,
|
||||
exact: true,
|
||||
component: BillingPage,
|
||||
key: 'BILLING',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.WORKSPACE_LOCKED,
|
||||
element: <WorkspaceBlocked />,
|
||||
exact: true,
|
||||
component: WorkspaceBlocked,
|
||||
isPrivate: true,
|
||||
key: 'WORKSPACE_LOCKED',
|
||||
},
|
||||
{
|
||||
path: ROUTES.WORKSPACE_SUSPENDED,
|
||||
element: <WorkspaceSuspended />,
|
||||
exact: true,
|
||||
component: WorkspaceSuspended,
|
||||
isPrivate: true,
|
||||
key: 'WORKSPACE_SUSPENDED',
|
||||
},
|
||||
{
|
||||
path: ROUTES.WORKSPACE_ACCESS_RESTRICTED,
|
||||
element: <WorkspaceAccessRestricted />,
|
||||
exact: true,
|
||||
component: WorkspaceAccessRestricted,
|
||||
isPrivate: true,
|
||||
key: 'WORKSPACE_ACCESS_RESTRICTED',
|
||||
},
|
||||
{
|
||||
path: ROUTES.SHORTCUTS,
|
||||
element: <ShortcutsPage />,
|
||||
exact: true,
|
||||
component: ShortcutsPage,
|
||||
isPrivate: true,
|
||||
key: 'SHORTCUTS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.INTEGRATIONS,
|
||||
element: <InstalledIntegrations />,
|
||||
exact: true,
|
||||
component: InstalledIntegrations,
|
||||
isPrivate: true,
|
||||
key: 'INTEGRATIONS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.MESSAGING_QUEUES_KAFKA,
|
||||
element: <MessagingQueues />,
|
||||
exact: true,
|
||||
component: MessagingQueues,
|
||||
key: 'MESSAGING_QUEUES_KAFKA',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.MESSAGING_QUEUES_CELERY_TASK,
|
||||
element: <MessagingQueues />,
|
||||
exact: true,
|
||||
component: MessagingQueues,
|
||||
key: 'MESSAGING_QUEUES_CELERY_TASK',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.MESSAGING_QUEUES_OVERVIEW,
|
||||
element: <MessagingQueues />,
|
||||
exact: true,
|
||||
component: MessagingQueues,
|
||||
key: 'MESSAGING_QUEUES_OVERVIEW',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.MESSAGING_QUEUES_KAFKA_DETAIL,
|
||||
element: <MessagingQueues />,
|
||||
exact: true,
|
||||
component: MessagingQueues,
|
||||
key: 'MESSAGING_QUEUES_KAFKA_DETAIL',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.INFRASTRUCTURE_MONITORING_HOSTS,
|
||||
element: <InfrastructureMonitoring />,
|
||||
exact: true,
|
||||
component: InfrastructureMonitoring,
|
||||
key: 'INFRASTRUCTURE_MONITORING_HOSTS',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.INFRASTRUCTURE_MONITORING_KUBERNETES,
|
||||
element: <InfrastructureMonitoring />,
|
||||
exact: true,
|
||||
component: InfrastructureMonitoring,
|
||||
key: 'INFRASTRUCTURE_MONITORING_KUBERNETES',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.METRICS_EXPLORER,
|
||||
element: <MetricsExplorer />,
|
||||
exact: true,
|
||||
component: MetricsExplorer,
|
||||
key: 'METRICS_EXPLORER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.METRICS_EXPLORER_EXPLORER,
|
||||
element: <MetricsExplorer />,
|
||||
exact: true,
|
||||
component: MetricsExplorer,
|
||||
key: 'METRICS_EXPLORER_EXPLORER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.METRICS_EXPLORER_VIEWS,
|
||||
element: <MetricsExplorer />,
|
||||
exact: true,
|
||||
component: MetricsExplorer,
|
||||
key: 'METRICS_EXPLORER_VIEWS',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.API_MONITORING,
|
||||
element: <ApiMonitoring />,
|
||||
exact: true,
|
||||
component: ApiMonitoring,
|
||||
key: 'API_MONITORING',
|
||||
isPrivate: true,
|
||||
},
|
||||
@@ -449,14 +509,16 @@ const routes: AppRoutes[] = [
|
||||
|
||||
export const SUPPORT_ROUTE: AppRoutes = {
|
||||
path: ROUTES.SUPPORT,
|
||||
element: <SupportPage />,
|
||||
exact: true,
|
||||
component: SupportPage,
|
||||
key: 'SUPPORT',
|
||||
isPrivate: true,
|
||||
};
|
||||
|
||||
export const LIST_LICENSES: AppRoutes = {
|
||||
path: ROUTES.LIST_LICENSES,
|
||||
element: <LicensePage />,
|
||||
exact: true,
|
||||
component: LicensePage,
|
||||
isPrivate: true,
|
||||
key: 'LIST_LICENSES',
|
||||
};
|
||||
@@ -487,8 +549,9 @@ export const ROUTES_NOT_TO_BE_OVERRIDEN: string[] = [
|
||||
];
|
||||
|
||||
export interface AppRoutes {
|
||||
element: RouteProps['element'];
|
||||
component: RouteProps['component'];
|
||||
path: RouteProps['path'];
|
||||
exact: RouteProps['exact'];
|
||||
isPrivate: boolean;
|
||||
key: keyof typeof ROUTES;
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/alerts/save';
|
||||
|
||||
@@ -8,7 +7,7 @@ import put from './put';
|
||||
const save = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
if (props.id && !isEmpty(props.id)) {
|
||||
if (props.id && props.id > 0) {
|
||||
return put({ ...props });
|
||||
}
|
||||
|
||||
|
||||
@@ -11,12 +11,9 @@ const logEvent = async (
|
||||
rateLimited?: boolean,
|
||||
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
// add tenant_url to attributes
|
||||
const { hostname } = window.location;
|
||||
const updatedAttributes = { ...attributes, tenant_url: hostname };
|
||||
const response = await axios.post('/event', {
|
||||
eventName,
|
||||
attributes: updatedAttributes,
|
||||
attributes,
|
||||
eventType: eventType || 'track',
|
||||
rateLimited: rateLimited || false, // TODO: Update this once we have a proper way to handle rate limiting
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import deleteLocalStorageKey from 'api/browser/localstorage/remove';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { safeNavigateNoSameURLMemo } from 'utils/navigate';
|
||||
import history from 'lib/history';
|
||||
|
||||
export const Logout = (): void => {
|
||||
deleteLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN);
|
||||
@@ -23,5 +23,5 @@ export const Logout = (): void => {
|
||||
window.Intercom('shutdown');
|
||||
}
|
||||
|
||||
safeNavigateNoSameURLMemo(ROUTES.LOGIN);
|
||||
history.push(ROUTES.LOGIN);
|
||||
};
|
||||
|
||||
@@ -8,9 +8,8 @@ import {
|
||||
import { useCeleryFilterOptions } from 'components/CeleryTask/useCeleryFilterOptions';
|
||||
import { SelectMaxTagPlaceholder } from 'components/MessagingQueues/MQCommon/MQCommon';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
|
||||
interface SelectOptionConfig {
|
||||
placeholder: string;
|
||||
@@ -28,7 +27,7 @@ function FilterSelect({
|
||||
);
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const history = useHistory();
|
||||
const location = useLocation();
|
||||
|
||||
return (
|
||||
@@ -56,13 +55,7 @@ function FilterSelect({
|
||||
}
|
||||
onChange={(value): void => {
|
||||
handleSearch('');
|
||||
setQueryParamsFromOptions(
|
||||
value,
|
||||
urlQuery,
|
||||
safeNavigate,
|
||||
location,
|
||||
queryParam,
|
||||
);
|
||||
setQueryParamsFromOptions(value, urlQuery, history, location, queryParam);
|
||||
}}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -3,9 +3,8 @@ import './CeleryTaskConfigOptions.styles.scss';
|
||||
import { Select, Spin, Typography } from 'antd';
|
||||
import { SelectMaxTagPlaceholder } from 'components/MessagingQueues/MQCommon/MQCommon';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
|
||||
import {
|
||||
getValuesFromQueryParams,
|
||||
@@ -17,7 +16,7 @@ function CeleryTaskConfigOptions(): JSX.Element {
|
||||
const { handleSearch, isFetching, options } = useCeleryFilterOptions(
|
||||
'celery.task_name',
|
||||
);
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const history = useHistory();
|
||||
const location = useLocation();
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
@@ -53,7 +52,7 @@ function CeleryTaskConfigOptions(): JSX.Element {
|
||||
setQueryParamsFromOptions(
|
||||
value,
|
||||
urlQuery,
|
||||
safeNavigate,
|
||||
history,
|
||||
location,
|
||||
QueryParams.taskName,
|
||||
);
|
||||
|
||||
@@ -7,13 +7,12 @@ import { ViewMenuAction } from 'container/GridCardLayout/config';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Card } from 'container/GridCardLayout/styles';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { getStartAndEndTimesInMilliseconds } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
@@ -50,7 +49,7 @@ function CeleryTaskBar({
|
||||
queryEnabled: boolean;
|
||||
checkIfDataExists?: (isDataAvailable: boolean) => void;
|
||||
}): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const history = useHistory();
|
||||
const { pathname } = useLocation();
|
||||
const dispatch = useDispatch();
|
||||
const urlQuery = useUrlQuery();
|
||||
@@ -68,13 +67,13 @@ function CeleryTaskBar({
|
||||
urlQuery.set(QueryParams.startTime, startTimestamp.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTimestamp.toString());
|
||||
const generatedUrl = `${pathname}?${urlQuery.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
history.push(generatedUrl);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
|
||||
}
|
||||
},
|
||||
[dispatch, pathname, urlQuery, safeNavigate],
|
||||
[dispatch, history, pathname, urlQuery],
|
||||
);
|
||||
|
||||
const [barState, setBarState] = useState<CeleryTaskState>(CeleryTaskState.All);
|
||||
|
||||
@@ -5,13 +5,12 @@ import { ViewMenuAction } from 'container/GridCardLayout/config';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Card } from 'container/GridCardLayout/styles';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { getStartAndEndTimesInMilliseconds } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
@@ -54,7 +53,7 @@ function CeleryTaskGraph({
|
||||
checkIfDataExists?: (isDataAvailable: boolean) => void;
|
||||
analyticsEvent?: string;
|
||||
}): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const history = useHistory();
|
||||
const { pathname } = useLocation();
|
||||
const dispatch = useDispatch();
|
||||
const urlQuery = useUrlQuery();
|
||||
@@ -83,13 +82,13 @@ function CeleryTaskGraph({
|
||||
urlQuery.set(QueryParams.startTime, startTimestamp.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTimestamp.toString());
|
||||
const generatedUrl = `${pathname}?${urlQuery.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
history.push(generatedUrl);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
|
||||
}
|
||||
},
|
||||
[dispatch, pathname, urlQuery, safeNavigate],
|
||||
[dispatch, history, pathname, urlQuery],
|
||||
);
|
||||
|
||||
return (
|
||||
|
||||
@@ -10,13 +10,12 @@ import { Card } from 'container/GridCardLayout/styles';
|
||||
import { Button } from 'container/MetricsApplication/Tabs/styles';
|
||||
import { useGraphClickHandler } from 'container/MetricsApplication/Tabs/util';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
|
||||
import { getStartAndEndTimesInMilliseconds } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@@ -48,7 +47,7 @@ function CeleryTaskLatencyGraph({
|
||||
queryEnabled: boolean;
|
||||
checkIfDataExists?: (isDataAvailable: boolean) => void;
|
||||
}): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const history = useHistory();
|
||||
const { pathname } = useLocation();
|
||||
const dispatch = useDispatch();
|
||||
const urlQuery = useUrlQuery();
|
||||
@@ -80,13 +79,13 @@ function CeleryTaskLatencyGraph({
|
||||
urlQuery.set(QueryParams.startTime, startTimestamp.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTimestamp.toString());
|
||||
const generatedUrl = `${pathname}?${urlQuery.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
history.push(generatedUrl);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
|
||||
}
|
||||
},
|
||||
[dispatch, pathname, urlQuery, safeNavigate],
|
||||
[dispatch, history, pathname, urlQuery],
|
||||
);
|
||||
|
||||
const selectedFilters = useMemo(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { History, Location } from 'history';
|
||||
import getRenderer from 'lib/uPlotLib/utils/getRenderer';
|
||||
import type { Location, NavigateFunction } from 'react-router';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -17,13 +17,13 @@ export function getValuesFromQueryParams(
|
||||
export function setQueryParamsFromOptions(
|
||||
value: string[],
|
||||
urlQuery: URLSearchParams,
|
||||
safeNavigate: NavigateFunction,
|
||||
history: History<unknown>,
|
||||
location: Location<unknown>,
|
||||
queryParams: QueryParams,
|
||||
): void {
|
||||
urlQuery.set(queryParams, value.join(','));
|
||||
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
|
||||
safeNavigate(generatedUrl, { replace: true });
|
||||
history.replace(generatedUrl);
|
||||
}
|
||||
|
||||
export function getFiltersFromQueryParams(
|
||||
|
||||
@@ -6,7 +6,7 @@ import { useNotifications } from 'hooks/useNotifications';
|
||||
import { CreditCard, X } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout';
|
||||
|
||||
@@ -45,7 +45,6 @@ export default function ChatSupportGateway(): JSX.Element {
|
||||
},
|
||||
);
|
||||
const { pathname } = useLocation();
|
||||
|
||||
const handleAddCreditCard = (): void => {
|
||||
logEvent('Add Credit card modal: Clicked', {
|
||||
source: `intercom icon`,
|
||||
@@ -53,7 +52,7 @@ export default function ChatSupportGateway(): JSX.Element {
|
||||
});
|
||||
|
||||
updateCreditCard({
|
||||
url: window.location.origin,
|
||||
url: window.location.href,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ import {
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import CustomTimePickerPopoverContent from './CustomTimePickerPopoverContent';
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
import { Clock, PenLine } from 'lucide-react';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { Dispatch, SetStateAction, useMemo } from 'react';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
import RangePickerModal from './RangePickerModal';
|
||||
import TimezonePicker from './TimezonePicker';
|
||||
|
||||
@@ -194,7 +194,7 @@ function ExplorerCard({
|
||||
showSearch
|
||||
placeholder="Select a view"
|
||||
dropdownStyle={DropDownOverlay}
|
||||
popupMatchSelectWidth={false}
|
||||
dropdownMatchSelectWidth={false}
|
||||
optionLabelProp="value"
|
||||
value={viewName || undefined}
|
||||
>
|
||||
|
||||
@@ -13,7 +13,7 @@ import { CreditCard, HelpCircle, X } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout';
|
||||
|
||||
@@ -153,7 +153,7 @@ function LaunchChatSupport({
|
||||
});
|
||||
|
||||
updateCreditCard({
|
||||
url: window.location.origin,
|
||||
url: window.location.href,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
import { ComponentType, lazy, LazyExoticComponent } from 'react';
|
||||
import { lazyRetry } from 'utils/lazyWithRetries';
|
||||
|
||||
type LazyComponent = ComponentType<Record<string, unknown>>;
|
||||
|
||||
type LoadableProps = Promise<{
|
||||
default: LazyComponent;
|
||||
}>;
|
||||
|
||||
function Loadable(importPath: {
|
||||
(): LoadableProps;
|
||||
}): LazyExoticComponent<LazyComponent> {
|
||||
return lazy(() => lazyRetry(() => importPath()));
|
||||
}
|
||||
|
||||
type LazyComponent = ComponentType<Record<string, unknown>>;
|
||||
|
||||
type LoadableProps = Promise<{
|
||||
default: LazyComponent;
|
||||
}>;
|
||||
|
||||
export default Loadable;
|
||||
|
||||
@@ -15,15 +15,15 @@ import {
|
||||
import { OnboardingStatusResponse } from 'api/messagingQueues/onboarding/getOnboardingStatus';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { History } from 'history';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { Bolt, Check, OctagonAlert, X } from 'lucide-react';
|
||||
import {
|
||||
KAFKA_SETUP_DOC_LINK,
|
||||
MessagingQueueHealthCheckService,
|
||||
} from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { ReactNode, useEffect, useState } from 'react';
|
||||
import type { NavigateFunction } from 'react-router';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
interface AttributeCheckListProps {
|
||||
@@ -46,7 +46,7 @@ export enum AttributesFilters {
|
||||
function ErrorTitleAndKey({
|
||||
title,
|
||||
parentTitle,
|
||||
safeNavigate,
|
||||
history,
|
||||
isCloudUserVal,
|
||||
errorMsg,
|
||||
isLeaf,
|
||||
@@ -54,7 +54,7 @@ function ErrorTitleAndKey({
|
||||
title: string;
|
||||
parentTitle: string;
|
||||
isCloudUserVal: boolean;
|
||||
safeNavigate: NavigateFunction;
|
||||
history: History<unknown>;
|
||||
errorMsg?: string;
|
||||
isLeaf?: boolean;
|
||||
}): TreeDataNode {
|
||||
@@ -76,7 +76,7 @@ function ErrorTitleAndKey({
|
||||
}
|
||||
|
||||
if (isCloudUserVal && !!link) {
|
||||
safeNavigate(link);
|
||||
history.push(link);
|
||||
} else {
|
||||
window.open(KAFKA_SETUP_DOC_LINK, '_blank');
|
||||
}
|
||||
@@ -146,7 +146,7 @@ function generateTreeDataNodes(
|
||||
response: OnboardingStatusResponse['data'],
|
||||
parentTitle: string,
|
||||
isCloudUserVal: boolean,
|
||||
safeNavigate: NavigateFunction,
|
||||
history: History<unknown>,
|
||||
): TreeDataNode[] {
|
||||
return response
|
||||
.map((item) => {
|
||||
@@ -159,7 +159,7 @@ function generateTreeDataNodes(
|
||||
title: item.attribute,
|
||||
errorMsg: item.error_message || '',
|
||||
parentTitle,
|
||||
safeNavigate,
|
||||
history,
|
||||
isCloudUserVal,
|
||||
});
|
||||
}
|
||||
@@ -182,7 +182,7 @@ function AttributeCheckList({
|
||||
setFilter(value);
|
||||
};
|
||||
const { isCloudUser: isCloudUserVal } = useGetTenantLicense();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const history = useHistory();
|
||||
|
||||
useEffect(() => {
|
||||
const filteredData = onboardingStatusResponses.map((response) => {
|
||||
@@ -192,7 +192,7 @@ function AttributeCheckList({
|
||||
errorMsg: response.errorMsg,
|
||||
isLeaf: true,
|
||||
parentTitle: response.title,
|
||||
safeNavigate,
|
||||
history,
|
||||
isCloudUserVal,
|
||||
});
|
||||
}
|
||||
@@ -210,7 +210,7 @@ function AttributeCheckList({
|
||||
filteredData,
|
||||
response.title,
|
||||
isCloudUserVal,
|
||||
safeNavigate,
|
||||
history,
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Link } from 'react-router';
|
||||
import { Link } from 'react-router-dom';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const Button = styled(Link)`
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Tabs, TabsProps } from 'antd';
|
||||
import { safeNavigateNoSameURLMemo } from 'utils/navigate';
|
||||
|
||||
import { RouteTabProps } from './types';
|
||||
|
||||
@@ -7,6 +6,7 @@ function RouteTab({
|
||||
routes,
|
||||
activeKey,
|
||||
onChangeHandler,
|
||||
history,
|
||||
...rest
|
||||
}: RouteTabProps & TabsProps): JSX.Element {
|
||||
const onChange = (activeRoute: string): void => {
|
||||
@@ -17,7 +17,7 @@ function RouteTab({
|
||||
const selectedRoute = routes.find((e) => e.key === activeRoute);
|
||||
|
||||
if (selectedRoute) {
|
||||
safeNavigateNoSameURLMemo(selectedRoute.route);
|
||||
history.push(selectedRoute.route);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { TabsProps } from 'antd';
|
||||
// import type { NavigateFunction } from 'react-router';
|
||||
import { History } from 'history';
|
||||
|
||||
export type TabRoutes = {
|
||||
name: React.ReactNode;
|
||||
@@ -12,5 +12,5 @@ export interface RouteTabProps {
|
||||
routes: TabRoutes[];
|
||||
activeKey: TabsProps['activeKey'];
|
||||
onChangeHandler?: (key: string) => void;
|
||||
// safeNavigate: NavigateFunction;
|
||||
history: History<unknown>;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,28 @@
|
||||
// keep this consistent with backend constants.go
|
||||
export enum FeatureKeys {
|
||||
SSO = 'SSO',
|
||||
ENTERPRISE_PLAN = 'ENTERPRISE_PLAN',
|
||||
BASIC_PLAN = 'BASIC_PLAN',
|
||||
ALERT_CHANNEL_SLACK = 'ALERT_CHANNEL_SLACK',
|
||||
ALERT_CHANNEL_WEBHOOK = 'ALERT_CHANNEL_WEBHOOK',
|
||||
ALERT_CHANNEL_PAGERDUTY = 'ALERT_CHANNEL_PAGERDUTY',
|
||||
ALERT_CHANNEL_OPSGENIE = 'ALERT_CHANNEL_OPSGENIE',
|
||||
ALERT_CHANNEL_MSTEAMS = 'ALERT_CHANNEL_MSTEAMS',
|
||||
DurationSort = 'DurationSort',
|
||||
TimestampSort = 'TimestampSort',
|
||||
SMART_TRACE_DETAIL = 'SMART_TRACE_DETAIL',
|
||||
CUSTOM_METRICS_FUNCTION = 'CUSTOM_METRICS_FUNCTION',
|
||||
QUERY_BUILDER_PANELS = 'QUERY_BUILDER_PANELS',
|
||||
QUERY_BUILDER_ALERTS = 'QUERY_BUILDER_ALERTS',
|
||||
DISABLE_UPSELL = 'DISABLE_UPSELL',
|
||||
USE_SPAN_METRICS = 'USE_SPAN_METRICS',
|
||||
OSS = 'OSS',
|
||||
ONBOARDING = 'ONBOARDING',
|
||||
CHAT_SUPPORT = 'CHAT_SUPPORT',
|
||||
GATEWAY = 'GATEWAY',
|
||||
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
|
||||
QUERY_BUILDER_SEARCH_V2 = 'QUERY_BUILDER_SEARCH_V2',
|
||||
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
||||
AWS_INTEGRATION = 'AWS_INTEGRATION',
|
||||
ONBOARDING_V3 = 'ONBOARDING_V3',
|
||||
TRACE_FUNNELS = 'TRACE_FUNNELS',
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import ROUTES from 'constants/routes';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { DraftingCompass } from 'lucide-react';
|
||||
import React from 'react';
|
||||
import { Link } from 'react-router';
|
||||
import { Link } from 'react-router-dom';
|
||||
|
||||
type Props = {
|
||||
children: React.ReactNode;
|
||||
|
||||
@@ -3,10 +3,10 @@ import './TopContributorsCard.styles.scss';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button } from 'antd';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import history from 'lib/history';
|
||||
import { ArrowRight } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
import TopContributorsContent from './TopContributorsContent';
|
||||
import { TopContributorsCardProps } from './types';
|
||||
@@ -17,7 +17,6 @@ function TopContributorsCard({
|
||||
totalCurrentTriggers,
|
||||
}: TopContributorsCardProps): JSX.Element {
|
||||
const { search } = useLocation();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const searchParams = useMemo(() => new URLSearchParams(search), [search]);
|
||||
|
||||
const viewAllTopContributorsParam = searchParams.get('viewAllTopContributors');
|
||||
@@ -44,7 +43,7 @@ function TopContributorsCard({
|
||||
|
||||
return newState;
|
||||
});
|
||||
safeNavigate({ search: searchParams.toString() });
|
||||
history.push({ search: searchParams.toString() });
|
||||
};
|
||||
|
||||
return (
|
||||
|
||||
@@ -3,8 +3,8 @@ import Uplot from 'components/Uplot';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import history from 'lib/history';
|
||||
import heatmapPlugin from 'lib/uPlotLib/plugins/heatmapPlugin';
|
||||
import timelinePlugin from 'lib/uPlotLib/plugins/timelinePlugin';
|
||||
import { uPlotXAxisValuesFormat } from 'lib/uPlotLib/utils/constants';
|
||||
@@ -28,8 +28,6 @@ function HorizontalTimelineGraph({
|
||||
isDarkMode: boolean;
|
||||
data: AlertRuleTimelineGraphResponse[];
|
||||
}): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
|
||||
const transformedData: AlignedData = useMemo(() => {
|
||||
if (!data?.length) {
|
||||
return [[], []];
|
||||
@@ -104,7 +102,7 @@ function HorizontalTimelineGraph({
|
||||
urlQuery.set(QueryParams.startTime, startTimestamp.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTimestamp.toString());
|
||||
|
||||
safeNavigate({
|
||||
history.push({
|
||||
search: urlQuery.toString(),
|
||||
});
|
||||
}
|
||||
@@ -133,7 +131,6 @@ function HorizontalTimelineGraph({
|
||||
urlQuery,
|
||||
dispatch,
|
||||
timezone.value,
|
||||
safeNavigate,
|
||||
],
|
||||
);
|
||||
return <Uplot data={transformedData} options={options} />;
|
||||
|
||||
@@ -2,11 +2,11 @@ import './TabsAndFilters.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { TimelineFilter, TimelineTab } from 'container/AlertHistory/types';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import history from 'lib/history';
|
||||
import { Info } from 'lucide-react';
|
||||
import Tabs2 from 'periscope/components/Tabs2';
|
||||
import { useMemo } from 'react';
|
||||
import { useLocation } from 'react-router';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
function ComingSoon(): JSX.Element {
|
||||
return (
|
||||
@@ -41,8 +41,6 @@ function TimelineTabs(): JSX.Element {
|
||||
|
||||
function TimelineFilters(): JSX.Element {
|
||||
const { search } = useLocation();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
|
||||
const searchParams = useMemo(() => new URLSearchParams(search), [search]);
|
||||
|
||||
const initialSelectedTab = useMemo(
|
||||
@@ -52,7 +50,7 @@ function TimelineFilters(): JSX.Element {
|
||||
|
||||
const handleFilter = (value: TimelineFilter): void => {
|
||||
searchParams.set('timelineFilter', value);
|
||||
safeNavigate({ search: searchParams.toString() });
|
||||
history.push({ search: searchParams.toString() });
|
||||
};
|
||||
|
||||
const tabs = [
|
||||
|
||||
@@ -5,11 +5,11 @@ import { ResizeTable } from 'components/ResizeTable';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import history from 'lib/history';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { generatePath } from 'react-router';
|
||||
import { generatePath } from 'react-router-dom';
|
||||
import { Channels, PayloadProps } from 'types/api/channels/getAll';
|
||||
|
||||
import Delete from './Delete';
|
||||
@@ -17,22 +17,17 @@ import Delete from './Delete';
|
||||
function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
|
||||
const { t } = useTranslation(['channels']);
|
||||
const { notifications } = useNotifications();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const [channels, setChannels] = useState<Channels[]>(allChannels);
|
||||
const { user } = useAppContext();
|
||||
const [action] = useComponentPermission(['new_alert_action'], user.role);
|
||||
|
||||
const onClickEditHandler = useCallback(
|
||||
(id: string) => {
|
||||
safeNavigate(
|
||||
generatePath(ROUTES.CHANNELS_EDIT, {
|
||||
id,
|
||||
}),
|
||||
{ replace: true },
|
||||
);
|
||||
},
|
||||
[safeNavigate],
|
||||
);
|
||||
const onClickEditHandler = useCallback((id: string) => {
|
||||
history.replace(
|
||||
generatePath(ROUTES.CHANNELS_EDIT, {
|
||||
id,
|
||||
}),
|
||||
);
|
||||
}, []);
|
||||
|
||||
const columns: ColumnsType<Channels> = [
|
||||
{
|
||||
|
||||
@@ -31,10 +31,6 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||
}));
|
||||
|
||||
describe('Create Alert Channel', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
@@ -18,10 +18,6 @@ import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import { testLabelInputAndHelpValue } from './testUtils';
|
||||
|
||||
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||
}));
|
||||
|
||||
describe('Create Alert Channel (Normal User)', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
@@ -20,10 +20,6 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||
}));
|
||||
|
||||
describe('Should check if the edit alert channel is properly displayed ', () => {
|
||||
beforeEach(() => {
|
||||
render(<EditAlertChannels initialValue={editAlertChannelInitialValue} />);
|
||||
|
||||
@@ -7,7 +7,7 @@ import TextToolTip from 'components/TextToolTip';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import useFetch from 'hooks/useFetch';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import history from 'lib/history';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useCallback, useEffect } from 'react';
|
||||
@@ -21,14 +21,13 @@ const { Paragraph } = Typography;
|
||||
function AlertChannels(): JSX.Element {
|
||||
const { t } = useTranslation(['channels']);
|
||||
const { user } = useAppContext();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const [addNewChannelPermission] = useComponentPermission(
|
||||
['add_new_channel'],
|
||||
user.role,
|
||||
);
|
||||
const onToggleHandler = useCallback(() => {
|
||||
safeNavigate(ROUTES.CHANNELS_NEW);
|
||||
}, [safeNavigate]);
|
||||
history.push(ROUTES.CHANNELS_NEW);
|
||||
}, []);
|
||||
|
||||
const { loading, payload, error, errorMessage } = useFetch(getAll);
|
||||
|
||||
|
||||
@@ -21,17 +21,17 @@ import ROUTES from 'constants/routes';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import useResourceAttribute from 'hooks/useResourceAttribute';
|
||||
import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import history from 'lib/history';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { Link, useLocation } from 'react-router';
|
||||
import { Link, useLocation } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { Exception, PayloadProps } from 'types/api/errors/getAll';
|
||||
@@ -66,7 +66,6 @@ function AllErrors(): JSX.Element {
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const { pathname } = useLocation();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const params = useUrlQuery();
|
||||
const { t } = useTranslation(['common']);
|
||||
const {
|
||||
@@ -203,9 +202,7 @@ function AllErrors(): JSX.Element {
|
||||
queryParams.serviceName = serviceFilterValue;
|
||||
}
|
||||
|
||||
safeNavigate(`${pathname}?${createQueryParams(queryParams)}`, {
|
||||
replace: true,
|
||||
});
|
||||
history.replace(`${pathname}?${createQueryParams(queryParams)}`);
|
||||
confirm();
|
||||
},
|
||||
[
|
||||
@@ -216,7 +213,6 @@ function AllErrors(): JSX.Element {
|
||||
getUpdatedServiceName,
|
||||
pathname,
|
||||
updatedOrder,
|
||||
safeNavigate,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -418,7 +414,7 @@ function AllErrors(): JSX.Element {
|
||||
serviceName: getFilterString(params.get(urlKey.serviceName)),
|
||||
exceptionType: getFilterString(params.get(urlKey.exceptionType)),
|
||||
});
|
||||
safeNavigate(
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: (current - 1) * pageSize,
|
||||
@@ -427,11 +423,10 @@ function AllErrors(): JSX.Element {
|
||||
exceptionType,
|
||||
serviceName,
|
||||
})}`,
|
||||
{ replace: true },
|
||||
);
|
||||
}
|
||||
},
|
||||
[pathname, safeNavigate],
|
||||
[pathname],
|
||||
);
|
||||
|
||||
const logEventCalledRef = useRef(false);
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Select, Spin, Table, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
@@ -152,7 +151,6 @@ function AllEndPoints({
|
||||
if (groupBy.length === 0) {
|
||||
setSelectedEndPointName(record.endpointName); // this will open up the endpoint details tab
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
} else {
|
||||
handleGroupByRowClick(record); // this will prepare the nested query payload
|
||||
}
|
||||
|
||||
@@ -392,39 +392,6 @@
|
||||
gap: 20px;
|
||||
padding-top: 20px;
|
||||
|
||||
.endpoint-meta-data {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
.endpoint-meta-data-pill {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
width: fit-content;
|
||||
.endpoint-meta-data-label {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
border-right: 1px solid var(--bg-slate-300);
|
||||
color: var(--text-vanilla-100);
|
||||
background: var(--bg-slate-500);
|
||||
height: calc(100% - 12px);
|
||||
}
|
||||
|
||||
.endpoint-meta-data-value {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
color: var(--text-vanilla-400);
|
||||
background: var(--bg-slate-400);
|
||||
height: calc(100% - 12px);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-details-filters-container {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
@@ -438,13 +405,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-item,
|
||||
.ant-select-item-option-content {
|
||||
flex: auto;
|
||||
white-space: normal;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
|
||||
.status-code-table-container {
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
@@ -849,13 +809,6 @@
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-item,
|
||||
.ant-select-item-option-content {
|
||||
flex: auto;
|
||||
white-space: normal;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
@@ -964,20 +917,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-meta-data {
|
||||
.endpoint-meta-data-pill {
|
||||
.endpoint-meta-data-label {
|
||||
color: var(--text-ink-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.endpoint-meta-data-value {
|
||||
color: var(--text-ink-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.status-code-table-container {
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
|
||||
@@ -19,14 +19,12 @@ function DomainDetails({
|
||||
selectedDomainIndex,
|
||||
setSelectedDomainIndex,
|
||||
domainListLength,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainData: any;
|
||||
handleClose: () => void;
|
||||
selectedDomainIndex: number;
|
||||
setSelectedDomainIndex: (index: number) => void;
|
||||
domainListLength: number;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const [selectedView, setSelectedView] = useState<VIEWS>(VIEWS.ALL_ENDPOINTS);
|
||||
const [selectedEndPointName, setSelectedEndPointName] = useState<string>('');
|
||||
@@ -134,7 +132,6 @@ function DomainDetails({
|
||||
domainName={domainData.domainName}
|
||||
endPointName={selectedEndPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -2,10 +2,7 @@ import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
extractPortAndEndpoint,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
@@ -30,12 +27,10 @@ function EndPointDetails({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
@@ -106,6 +101,8 @@ function EndPointDetails({
|
||||
const [
|
||||
endPointMetricsDataQuery,
|
||||
endPointStatusCodeDataQuery,
|
||||
endPointRateOverTimeDataQuery,
|
||||
endPointLatencyOverTimeDataQuery,
|
||||
endPointDropDownDataQuery,
|
||||
endPointDependentServicesDataQuery,
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
@@ -118,29 +115,12 @@ function EndPointDetails({
|
||||
endPointDetailsDataQueries[3],
|
||||
endPointDetailsDataQueries[4],
|
||||
endPointDetailsDataQueries[5],
|
||||
endPointDetailsDataQueries[6],
|
||||
endPointDetailsDataQueries[7],
|
||||
],
|
||||
[endPointDetailsDataQueries],
|
||||
);
|
||||
|
||||
const { endpoint, port } = useMemo(
|
||||
() => extractPortAndEndpoint(endPointName),
|
||||
[endPointName],
|
||||
);
|
||||
|
||||
const [rateOverTimeWidget, latencyOverTimeWidget] = useMemo(
|
||||
() => [
|
||||
getRateOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
getLatencyOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
],
|
||||
[domainName, endPointName, filters, domainListFilters],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="endpoint-details-container">
|
||||
<div className="endpoint-details-filters-container">
|
||||
@@ -149,8 +129,6 @@ function EndPointDetails({
|
||||
selectedEndPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".endpoint-details-filters-container"
|
||||
dropdownStyle={{ width: 'calc(100% - 36px)' }}
|
||||
/>
|
||||
</div>
|
||||
<div className="endpoint-details-filters-container-search">
|
||||
@@ -163,16 +141,6 @@ function EndPointDetails({
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data">
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Endpoint</div>
|
||||
<div className="endpoint-meta-data-value">{endpoint || '-'}</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Port</div>
|
||||
<div className="endpoint-meta-data-value">{port || '-'}</div>
|
||||
</div>
|
||||
</div>
|
||||
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
|
||||
{!isServicesFilterApplied && (
|
||||
<DependentServices
|
||||
@@ -184,14 +152,18 @@ function EndPointDetails({
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={
|
||||
endPointStatusCodeLatencyBarChartsDataQuery
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
filters={filters}
|
||||
/>
|
||||
<StatusCodeTable endPointStatusCodeDataQuery={endPointStatusCodeDataQuery} />
|
||||
<MetricOverTimeGraph widget={rateOverTimeWidget} />
|
||||
<MetricOverTimeGraph widget={latencyOverTimeWidget} />
|
||||
<MetricOverTimeGraph
|
||||
metricOverTimeDataQuery={endPointRateOverTimeDataQuery}
|
||||
widgetInfoIndex={0}
|
||||
endPointName={endPointName}
|
||||
/>
|
||||
<MetricOverTimeGraph
|
||||
metricOverTimeDataQuery={endPointLatencyOverTimeDataQuery}
|
||||
widgetInfoIndex={1}
|
||||
endPointName={endPointName}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import EndPointDetailsZeroState from './components/EndPointDetailsZeroState';
|
||||
@@ -18,12 +17,10 @@ function EndPointDetailsWrapper({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
@@ -72,7 +69,6 @@ function EndPointDetailsWrapper({
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -28,8 +28,6 @@ function EndPointDetailsZeroState({
|
||||
<EndPointsDropDown
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".end-point-details-zero-state-wrapper"
|
||||
dropdownStyle={{ width: '60%' }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -70,7 +70,7 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.rate}>
|
||||
<span className="round-metric-tag">{metricsData?.rate} ops/sec</span>
|
||||
<span className="round-metric-tag">{metricsData?.rate}/sec</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
|
||||
@@ -8,22 +8,16 @@ interface EndPointsDropDownProps {
|
||||
selectedEndPointName?: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
endPointDropDownDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
parentContainerDiv?: string;
|
||||
dropdownStyle?: React.CSSProperties;
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
selectedEndPointName: '',
|
||||
parentContainerDiv: '',
|
||||
dropdownStyle: {},
|
||||
};
|
||||
|
||||
function EndPointsDropDown({
|
||||
selectedEndPointName,
|
||||
setSelectedEndPointName,
|
||||
endPointDropDownDataQuery,
|
||||
parentContainerDiv,
|
||||
dropdownStyle,
|
||||
}: EndPointsDropDownProps): JSX.Element {
|
||||
const { data, isLoading, isFetching } = endPointDropDownDataQuery;
|
||||
|
||||
@@ -45,13 +39,6 @@ function EndPointsDropDown({
|
||||
style={{ width: '100%' }}
|
||||
onChange={handleChange}
|
||||
options={formattedData}
|
||||
getPopupContainer={
|
||||
parentContainerDiv
|
||||
? (): HTMLElement =>
|
||||
document.querySelector(parentContainerDiv) as HTMLElement
|
||||
: (triggerNode): HTMLElement => triggerNode.parentNode as HTMLElement
|
||||
}
|
||||
dropdownStyle={dropdownStyle}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table } from 'antd';
|
||||
import { ColumnType } from 'antd/lib/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
@@ -115,7 +114,6 @@ function ExpandedRow({
|
||||
onClick: (): void => {
|
||||
setSelectedEndPointName(record.endpointName);
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
})}
|
||||
|
||||
@@ -1,18 +1,110 @@
|
||||
import { Card } from 'antd';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Card, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
apiWidgetInfo,
|
||||
extractPortAndEndpoint,
|
||||
getFormattedChartData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useCallback, useMemo, useRef } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { Options } from 'uplot';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function MetricOverTimeGraph({
|
||||
metricOverTimeDataQuery,
|
||||
widgetInfoIndex,
|
||||
endPointName,
|
||||
}: {
|
||||
metricOverTimeDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
widgetInfoIndex: number;
|
||||
endPointName: string;
|
||||
}): JSX.Element {
|
||||
const { data } = metricOverTimeDataQuery;
|
||||
|
||||
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
|
||||
const { endpoint } = extractPortAndEndpoint(endPointName);
|
||||
|
||||
const formattedChartData = useMemo(
|
||||
() => getFormattedChartData(data?.payload, [endpoint]),
|
||||
[data?.payload, endpoint],
|
||||
);
|
||||
|
||||
const chartData = useMemo(() => getUPlotChartData(formattedChartData), [
|
||||
formattedChartData,
|
||||
]);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
getUPlotChartOptions({
|
||||
apiResponse: formattedChartData,
|
||||
isDarkMode,
|
||||
dimensions,
|
||||
yAxisUnit: apiWidgetInfo[widgetInfoIndex].yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
minTimeScale: Math.floor(minTime / 1e9),
|
||||
maxTimeScale: Math.floor(maxTime / 1e9),
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
}),
|
||||
[
|
||||
formattedChartData,
|
||||
minTime,
|
||||
maxTime,
|
||||
widgetInfoIndex,
|
||||
dimensions,
|
||||
isDarkMode,
|
||||
],
|
||||
);
|
||||
|
||||
const renderCardContent = useCallback(
|
||||
(query: UseQueryResult<SuccessResponse<any>, unknown>): JSX.Element => {
|
||||
if (query.isLoading) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
if (query.error) {
|
||||
return <ErrorState refetch={query.refetch} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx('chart-container', {
|
||||
'no-data-container':
|
||||
!query.isLoading && !query?.data?.payload?.data?.result?.length,
|
||||
})}
|
||||
>
|
||||
<Uplot options={options as Options} data={chartData} />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[options, chartData],
|
||||
);
|
||||
|
||||
function MetricOverTimeGraph({ widget }: { widget: Widgets }): JSX.Element {
|
||||
return (
|
||||
<div>
|
||||
<Card bordered className="endpoint-details-card">
|
||||
<div className="graph-container">
|
||||
<GridCard
|
||||
widget={widget}
|
||||
isQueryEnabled
|
||||
onDragSelect={(): void => {}}
|
||||
customOnDragSelect={(): void => {}}
|
||||
/>
|
||||
<Typography.Text>{apiWidgetInfo[widgetInfoIndex].title}</Typography.Text>
|
||||
<div className="graph-container" ref={graphRef}>
|
||||
{renderCardContent(metricOverTimeDataQuery)}
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
@@ -1,22 +1,13 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Card, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { useGetGraphCustomSeries } from 'components/CeleryTask/useGetGraphCustomSeries';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
getCustomFiltersForBarChart,
|
||||
getFormattedEndPointStatusCodeChartData,
|
||||
getStatusCodeBarChartWidgetData,
|
||||
statusCodeWidgetInfo,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { handleGraphClick } from 'container/GridCardLayout/GridCard/utils';
|
||||
import { useGraphClickToShowButton } from 'container/GridCardLayout/useGraphClickToShowButton';
|
||||
import useNavigateToExplorerPages from 'container/GridCardLayout/useNavigateToExplorerPages';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
@@ -24,8 +15,6 @@ import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { Options } from 'uplot';
|
||||
|
||||
@@ -34,10 +23,6 @@ import ErrorState from './ErrorState';
|
||||
function StatusCodeBarCharts({
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
domainListFilters,
|
||||
filters,
|
||||
}: {
|
||||
endPointStatusCodeBarChartsDataQuery: UseQueryResult<
|
||||
SuccessResponse<any>,
|
||||
@@ -47,10 +32,6 @@ function StatusCodeBarCharts({
|
||||
SuccessResponse<any>,
|
||||
unknown
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
filters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
// 0 : Status Code Count
|
||||
// 1 : Status Code Latency
|
||||
@@ -104,72 +85,6 @@ function StatusCodeBarCharts({
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const graphClick = useGraphClickToShowButton({
|
||||
graphRef,
|
||||
isButtonEnabled: true,
|
||||
buttonClassName: 'view-onclick-show-button',
|
||||
});
|
||||
|
||||
const navigateToExplorer = useNavigateToExplorer();
|
||||
|
||||
const navigateToExplorerPages = useNavigateToExplorerPages();
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { getCustomSeries } = useGetGraphCustomSeries({
|
||||
isDarkMode,
|
||||
drawStyle: 'bars',
|
||||
colorMapping: {
|
||||
'200-299': Color.BG_FOREST_500,
|
||||
'300-399': Color.BG_AMBER_400,
|
||||
'400-499': Color.BG_CHERRY_500,
|
||||
'500-599': Color.BG_ROBIN_500,
|
||||
Other: Color.BG_SIENNA_500,
|
||||
},
|
||||
});
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
[domainName, endPointName, domainListFilters, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
metric?: { [key: string]: string },
|
||||
queryData?: { queryName: string; inFocusOrNot: boolean },
|
||||
): void => {
|
||||
const customFilters = getCustomFiltersForBarChart(metric);
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
metric,
|
||||
queryData,
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
customFilters,
|
||||
});
|
||||
},
|
||||
[
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
],
|
||||
);
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
getUPlotChartOptions({
|
||||
@@ -185,8 +100,6 @@ function StatusCodeBarCharts({
|
||||
minTimeScale: Math.floor(minTime / 1e9),
|
||||
maxTimeScale: Math.floor(maxTime / 1e9),
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
onClickHandler: graphClickHandler,
|
||||
customSeries: getCustomSeries,
|
||||
}),
|
||||
[
|
||||
minTime,
|
||||
@@ -196,8 +109,6 @@ function StatusCodeBarCharts({
|
||||
formattedEndPointStatusCodeBarChartsDataPayload,
|
||||
formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
isDarkMode,
|
||||
graphClickHandler,
|
||||
getCustomSeries,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ import '../Explorer.styles.scss';
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table, Typography } from 'antd';
|
||||
import axios from 'api';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import cx from 'classnames';
|
||||
@@ -131,7 +130,6 @@ function DomainList({
|
||||
(item) => item.key === record.key,
|
||||
);
|
||||
setSelectedDomainIndex(dataIndex);
|
||||
logEvent('API Monitoring: Domain name row clicked', {});
|
||||
}
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
@@ -149,7 +147,6 @@ function DomainList({
|
||||
handleClose={(): void => {
|
||||
setSelectedDomainIndex(-1);
|
||||
}}
|
||||
domainListFilters={query?.filters}
|
||||
/>
|
||||
)}
|
||||
</section>
|
||||
|
||||
@@ -3,14 +3,13 @@ import './Explorer.styles.scss';
|
||||
import { FilterOutlined } from '@ant-design/icons';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Switch, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import QuickFilters from 'components/QuickFilters/QuickFilters';
|
||||
import { QuickFiltersSource } from 'components/QuickFilters/types';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -22,10 +21,6 @@ function Explorer(): JSX.Element {
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('API Monitoring: Landing page visited', {});
|
||||
}, []);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query: currentQuery.builder.queryData[0],
|
||||
@@ -69,12 +64,7 @@ function Explorer(): JSX.Element {
|
||||
style={{ marginLeft: 'auto' }}
|
||||
checked={showIP}
|
||||
onClick={(): void => {
|
||||
setShowIP((showIP): boolean => {
|
||||
logEvent('API Monitoring: Show IP addresses clicked', {
|
||||
showIP: !showIP,
|
||||
});
|
||||
return !showIP;
|
||||
});
|
||||
setShowIP((showIP) => !showIP);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user