Compare commits
1 Commits
v0.78.1
...
multiselec
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0a6adf177 |
91
.github/workflows/build-community.yaml
vendored
91
.github/workflows/build-community.yaml
vendored
@@ -1,91 +0,0 @@
|
||||
name: build-community
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- v*
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker_providers: ${{ steps.set-docker-providers.outputs.providers }}
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: ${{ inputs.PRIMUS_REF }}
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
- name: set-docker-providers
|
||||
id: set-docker-providers
|
||||
run: |
|
||||
if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ || ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$ ]]; then
|
||||
echo "providers=dockerhub gcp" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "providers=gcp" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./pkg/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}
|
||||
115
.github/workflows/build-enterprise.yaml
vendored
115
.github/workflows/build-enterprise.yaml
vendored
@@ -1,115 +0,0 @@
|
||||
name: build-enterprise
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- v*
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker_providers: ${{ steps.set-docker-providers.outputs.providers }}
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: ${{ inputs.PRIMUS_REF }}
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
id: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
- name: set-docker-providers
|
||||
id: set-docker-providers
|
||||
run: |
|
||||
if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ || ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$ ]]; then
|
||||
echo "providers=dockerhub gcp" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "providers=gcp" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: create-dotenv
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: frontend/.env
|
||||
key: enterprise-dotenv-${{ github.sha }}
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}
|
||||
122
.github/workflows/build.yaml
vendored
Normal file
122
.github/workflows/build.yaml
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
enterprise:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: create-env-file
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-enterprise
|
||||
|
||||
community:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-community
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -54,7 +54,6 @@ ee/query-service/tests/test-deploy/data/
|
||||
bin/
|
||||
.local/
|
||||
*/query-service/queries.active
|
||||
ee/query-service/db
|
||||
|
||||
# e2e
|
||||
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
#### Auto generated by make docker-version-alpine. DO NOT EDIT! ####
|
||||
amd64=029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85
|
||||
unknown=5fea95373b9ec85974843f31446fa6a9df4492dddae4e1cb056193c34a20a5be
|
||||
arm=b4aef1a899e0271f06d948c9a8fa626ecdb2202d3a178bc14775dd559e23df8e
|
||||
unknown=a4d1e27e63a9d6353046eb25a2f0ec02945012b217f4364cd83a73fe6dfb0b15
|
||||
arm=4fdafe217d0922f3c3e2b4f64cf043f8403a4636685cd9c51fea2cbd1f419740
|
||||
unknown=7f21ac2018d95b2c51a5779c1d5ca6c327504adc3b0fdc747a6725d30b3f13c2
|
||||
arm64=ea3c5a9671f7b3f7eb47eab06f73bc6591df978b0d5955689a9e6f943aa368c0
|
||||
unknown=a8ba68c1a9e6eea8041b4b8f996c235163440808b9654a865976fdcbede0f433
|
||||
386=dea9f02e103e837849f984d5679305c758aba7fea1b95b7766218597f61a05ab
|
||||
unknown=3c6629bec05c8273a927d46b77428bf4a378dad911a0ae284887becdc149b734
|
||||
ppc64le=0880443bffa028dfbbc4094a32dd6b7ac25684e4c0a3d50da9e0acae355c5eaf
|
||||
unknown=bb48308f976b266e3ab39bbf9af84521959bd9c295d3c763690cf41f8df2a626
|
||||
riscv64=d76e6fbe348ff20c2931bb7f101e49379648e026de95dd37f96e00ce1909dcf7
|
||||
unknown=dd807544365f6dc187cbe6de0806adce2ea9de3e7124717d1d8e8b7a18b77b64
|
||||
s390x=b815fadf80495594eb6296a6af0bc647ae5f193e0044e07acec7e5b378c9ce2d
|
||||
unknown=74681be74a280a88abb53ff1e048eb1fb624b30d0066730df6d8afd02ba82e01
|
||||
@@ -77,4 +77,4 @@ Need assistance? Join our Slack community:
|
||||
## Where do I go from here?
|
||||
|
||||
- Set up your [development environment](docs/contributing/development.md)
|
||||
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo-docs.md)
|
||||
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo/otel-demo-docs.md)
|
||||
|
||||
4
Makefile
4
Makefile
@@ -74,10 +74,6 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
--use-logs-new-schema true \
|
||||
--use-trace-new-schema true
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@go test -race ./...
|
||||
|
||||
.PHONY: go-run-community
|
||||
go-run-community: ## Runs the community go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
|
||||
@@ -72,6 +72,7 @@ sqlstore:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
|
||||
|
||||
##################### APIServer #####################
|
||||
apiserver:
|
||||
timeout:
|
||||
@@ -90,29 +91,20 @@ apiserver:
|
||||
- /api/v1/version
|
||||
- /
|
||||
|
||||
|
||||
##################### TelemetryStore #####################
|
||||
telemetrystore:
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
# Maximum number of idle connections in the connection pool.
|
||||
max_idle_conns: 50
|
||||
# Maximum number of open connections to the database.
|
||||
max_open_conns: 100
|
||||
# Maximum time to wait for a connection to be established.
|
||||
dial_timeout: 5s
|
||||
# Specifies the telemetrystore provider to use.
|
||||
provider: clickhouse
|
||||
clickhouse:
|
||||
# The DSN to use for clickhouse.
|
||||
dsn: tcp://localhost:9000
|
||||
|
||||
##################### Prometheus #####################
|
||||
prometheus:
|
||||
active_query_tracker:
|
||||
# Whether to enable the active query tracker.
|
||||
enabled: true
|
||||
# The path to use for the active query tracker.
|
||||
path: ""
|
||||
# The maximum number of concurrent queries.
|
||||
max_concurrent: 20
|
||||
# The DSN to use for ClickHouse.
|
||||
dsn: http://localhost:9000
|
||||
|
||||
##################### Alertmanager #####################
|
||||
alertmanager:
|
||||
@@ -125,7 +117,7 @@ alertmanager:
|
||||
# The poll interval for periodically syncing the alertmanager with the config in the store.
|
||||
poll_interval: 1m
|
||||
# The URL under which Alertmanager is externally reachable (for example, if Alertmanager is served via a reverse proxy). Used for generating relative and absolute links back to Alertmanager itself.
|
||||
external_url: http://localhost:8080
|
||||
external_url: http://localhost:9093
|
||||
# The global configuration for the alertmanager. All the exahustive fields can be found in the upstream: https://github.com/prometheus/alertmanager/blob/efa05feffd644ba4accb526e98a8c6545d26a783/config/config.go#L833
|
||||
global:
|
||||
# ResolveTimeout is the time after which an alert is declared resolved if it has not been updated.
|
||||
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.78.1
|
||||
image: signoz/signoz:v0.76.2
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -208,7 +208,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.38
|
||||
image: signoz/signoz-otel-collector:v0.111.34
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -232,7 +232,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.38
|
||||
image: signoz/signoz-schema-migrator:v0.111.34
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.78.1
|
||||
image: signoz/signoz:v0.76.2
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -143,7 +143,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.38
|
||||
image: signoz/signoz-otel-collector:v0.111.34
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -167,7 +167,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.38
|
||||
image: signoz/signoz-schema-migrator:v0.111.34
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.78.1}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -212,7 +212,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -238,7 +238,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -249,7 +249,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.78.1}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -146,7 +146,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -168,7 +168,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -180,7 +180,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.78.1}
|
||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
@@ -25,7 +24,7 @@ func (p *Pat) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var values []string
|
||||
var patToken string
|
||||
var pat eeTypes.StorablePersonalAccessToken
|
||||
var pat types.StorablePersonalAccessToken
|
||||
|
||||
for _, header := range p.headers {
|
||||
values = append(values, r.Header.Get(header))
|
||||
|
||||
@@ -18,4 +18,4 @@ COPY frontend/build/ /etc/signoz/web/
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
|
||||
|
||||
FROM alpine@sha256:${ALPINE_SHA}
|
||||
LABEL maintainer="signoz"
|
||||
WORKDIR /root
|
||||
|
||||
ARG OS="linux"
|
||||
ARG ARCH
|
||||
|
||||
RUN apk update && \
|
||||
apk add ca-certificates && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
|
||||
COPY ./conf/prometheus.yml /root/config/prometheus.yml
|
||||
COPY ./templates/email /root/templates
|
||||
COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
@@ -28,10 +28,11 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
|
||||
}
|
||||
|
||||
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
FeatureLookup: dp.ff,
|
||||
})
|
||||
|
||||
return dp
|
||||
|
||||
@@ -28,10 +28,11 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
|
||||
}
|
||||
|
||||
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
FeatureLookup: hp.ff,
|
||||
})
|
||||
|
||||
return hp
|
||||
|
||||
@@ -38,6 +38,12 @@ func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericPr
|
||||
}
|
||||
}
|
||||
|
||||
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().ff = ff
|
||||
}
|
||||
}
|
||||
|
||||
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().reader = reader
|
||||
@@ -50,6 +56,7 @@ type BaseSeasonalProvider struct {
|
||||
fluxInterval time.Duration
|
||||
cache cache.Cache
|
||||
keyGenerator cache.KeyGenerator
|
||||
ff interfaces.FeatureLookup
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
||||
@@ -306,9 +313,6 @@ func (p *BaseSeasonalProvider) getScore(
|
||||
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
|
||||
) float64 {
|
||||
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
|
||||
if expectedValue < 0 {
|
||||
expectedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
}
|
||||
return (value - expectedValue) / p.getStdDev(weekSeries)
|
||||
}
|
||||
|
||||
|
||||
@@ -27,10 +27,11 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
|
||||
}
|
||||
|
||||
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
FeatureLookup: wp.ff,
|
||||
})
|
||||
|
||||
return wp
|
||||
|
||||
@@ -11,8 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -23,7 +21,6 @@ import (
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -57,7 +54,6 @@ type APIHandler struct {
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(signoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
|
||||
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
@@ -75,7 +71,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
Signoz: signoz,
|
||||
Preference: preference,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -162,6 +157,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
|
||||
|
||||
// PAT APIs
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
@@ -135,12 +135,19 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newPAT := eeTypes.NewGettablePAT(
|
||||
integrationPATName,
|
||||
baseconstants.ViewerGroup,
|
||||
integrationUser.ID,
|
||||
0,
|
||||
)
|
||||
newPAT := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Token: generatePATToken(),
|
||||
UserID: integrationUser.ID,
|
||||
Name: integrationPATName,
|
||||
Role: baseconstants.ViewerGroup,
|
||||
ExpiresAt: 0,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
},
|
||||
}
|
||||
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
|
||||
@@ -2,24 +2,31 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func generatePATToken() string {
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
return encodedToken
|
||||
}
|
||||
|
||||
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
@@ -36,18 +43,31 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
pat := eeTypes.NewGettablePAT(
|
||||
req.Name,
|
||||
req.Role,
|
||||
user.ID,
|
||||
req.ExpiresInDays,
|
||||
)
|
||||
pat := model.PAT{
|
||||
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
|
||||
Name: req.Name,
|
||||
Role: req.Role,
|
||||
ExpiresAt: req.ExpiresInDays,
|
||||
},
|
||||
}
|
||||
err = validatePATRequest(pat)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
// All the PATs are associated with the user creating the PAT.
|
||||
pat.UserID = user.ID
|
||||
pat.CreatedAt = time.Now()
|
||||
pat.UpdatedAt = time.Now()
|
||||
pat.LastUsed = 0
|
||||
pat.Token = generatePATToken()
|
||||
|
||||
if pat.ExpiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
|
||||
var apierr basemodel.BaseApiError
|
||||
if pat, apierr = ah.AppDao().CreatePAT(ctx, user.OrgID, pat); apierr != nil {
|
||||
@@ -58,7 +78,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ah.Respond(w, &pat)
|
||||
}
|
||||
|
||||
func validatePATRequest(req types.GettablePAT) error {
|
||||
func validatePATRequest(req model.PAT) error {
|
||||
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
|
||||
return fmt.Errorf("valid role is required")
|
||||
}
|
||||
@@ -74,7 +94,7 @@ func validatePATRequest(req types.GettablePAT) error {
|
||||
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
req := types.GettablePAT{}
|
||||
req := model.PAT{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
@@ -96,12 +116,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
req.UpdatedByUserID = user.ID
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
id := mux.Vars(r)["id"]
|
||||
req.UpdatedAt = time.Now()
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
@@ -134,12 +149,7 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
id := mux.Vars(r)["id"]
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
@@ -149,7 +159,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id))
|
||||
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
|
||||
@@ -88,24 +88,28 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityDaily:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityHourly:
|
||||
provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
default:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
}
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
|
||||
33
ee/query-service/app/api/traces.go
Normal file
33
ee/query-service/app/api/traces.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/db"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
|
||||
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
ah.APIHandler.SearchTraces(w, r)
|
||||
return
|
||||
}
|
||||
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
||||
return
|
||||
}
|
||||
|
||||
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
|
||||
if ah.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
ah.WriteJSON(w, r, result)
|
||||
|
||||
}
|
||||
@@ -5,33 +5,38 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
)
|
||||
|
||||
type ClickhouseReader struct {
|
||||
conn clickhouse.Conn
|
||||
appdb sqlstore.SQLStore
|
||||
appdb *sqlx.DB
|
||||
*basechr.ClickHouseReader
|
||||
}
|
||||
|
||||
func NewDataConnector(
|
||||
sqlDB sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
localDB *sqlx.DB,
|
||||
ch clickhouse.Conn,
|
||||
promConfigPath string,
|
||||
lm interfaces.FeatureLookup,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
fluxIntervalForTraceDetail time.Duration,
|
||||
cache cache.Cache,
|
||||
) *ClickhouseReader {
|
||||
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
chReader := basechr.NewReader(localDB, ch, promConfigPath, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
return &ClickhouseReader{
|
||||
conn: telemetryStore.ClickhouseDB(),
|
||||
appdb: sqlDB,
|
||||
conn: ch,
|
||||
appdb: localDB,
|
||||
ClickHouseReader: chReader,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickhouseReader) Start(readerReady chan bool) {
|
||||
r.ClickHouseReader.Start(readerReady)
|
||||
}
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
package smart
|
||||
package db
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// SmartTraceAlgorithm is an algorithm to find the target span and build a tree of spans around it with the given levelUp and levelDown parameters and the given spanLimit
|
||||
func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]basemodel.SearchSpansResult, error) {
|
||||
var spans []*SpanForTraceDetails
|
||||
var spans []*model.SpanForTraceDetails
|
||||
|
||||
// if targetSpanId is null or not present then randomly select a span as targetSpanId
|
||||
if (targetSpanId == "" || targetSpanId == "null") && len(payload) > 0 {
|
||||
@@ -23,7 +24,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" {
|
||||
parentID = spanItem.References[0].SpanId
|
||||
}
|
||||
span := &SpanForTraceDetails{
|
||||
span := &model.SpanForTraceDetails{
|
||||
TimeUnixNano: spanItem.TimeUnixNano,
|
||||
SpanID: spanItem.SpanID,
|
||||
TraceID: spanItem.TraceID,
|
||||
@@ -44,7 +45,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
targetSpan := &SpanForTraceDetails{}
|
||||
targetSpan := &model.SpanForTraceDetails{}
|
||||
|
||||
// Find the target span in the span trees
|
||||
for _, root := range roots {
|
||||
@@ -64,7 +65,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Build the final result
|
||||
parents := []*SpanForTraceDetails{}
|
||||
parents := []*model.SpanForTraceDetails{}
|
||||
|
||||
// Get the parent spans of the target span up to the given levelUp parameter and spanLimit
|
||||
preParent := targetSpan
|
||||
@@ -89,11 +90,11 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Get the child spans of the target span until the given levelDown and spanLimit
|
||||
preParents := []*SpanForTraceDetails{targetSpan}
|
||||
children := []*SpanForTraceDetails{}
|
||||
preParents := []*model.SpanForTraceDetails{targetSpan}
|
||||
children := []*model.SpanForTraceDetails{}
|
||||
|
||||
for i := 0; i < levelDown && len(preParents) != 0 && spanLimit > 0; i++ {
|
||||
parents := []*SpanForTraceDetails{}
|
||||
parents := []*model.SpanForTraceDetails{}
|
||||
for _, parent := range preParents {
|
||||
if spanLimit-len(parent.Children) <= 0 {
|
||||
children = append(children, parent.Children[:spanLimit]...)
|
||||
@@ -107,7 +108,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Store the final list of spans in the resultSpanSet map to avoid duplicates
|
||||
resultSpansSet := make(map[*SpanForTraceDetails]struct{})
|
||||
resultSpansSet := make(map[*model.SpanForTraceDetails]struct{})
|
||||
resultSpansSet[targetSpan] = struct{}{}
|
||||
for _, parent := range parents {
|
||||
resultSpansSet[parent] = struct{}{}
|
||||
@@ -168,12 +169,12 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// buildSpanTrees builds trees of spans from a list of spans.
|
||||
func buildSpanTrees(spansPtr *[]*SpanForTraceDetails) ([]*SpanForTraceDetails, error) {
|
||||
func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTraceDetails, error) {
|
||||
|
||||
// Build a map of spanID to span for fast lookup
|
||||
var roots []*SpanForTraceDetails
|
||||
var roots []*model.SpanForTraceDetails
|
||||
spans := *spansPtr
|
||||
mapOfSpans := make(map[string]*SpanForTraceDetails, len(spans))
|
||||
mapOfSpans := make(map[string]*model.SpanForTraceDetails, len(spans))
|
||||
|
||||
for _, span := range spans {
|
||||
if span.ParentID == "" {
|
||||
@@ -205,8 +206,8 @@ func buildSpanTrees(spansPtr *[]*SpanForTraceDetails) ([]*SpanForTraceDetails, e
|
||||
}
|
||||
|
||||
// breadthFirstSearch performs a breadth-first search on the span tree to find the target span.
|
||||
func breadthFirstSearch(spansPtr *SpanForTraceDetails, targetId string) (*SpanForTraceDetails, error) {
|
||||
queue := []*SpanForTraceDetails{spansPtr}
|
||||
func breadthFirstSearch(spansPtr *model.SpanForTraceDetails, targetId string) (*model.SpanForTraceDetails, error) {
|
||||
queue := []*model.SpanForTraceDetails{spansPtr}
|
||||
visited := make(map[string]bool)
|
||||
|
||||
for len(queue) > 0 {
|
||||
@@ -18,14 +18,13 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/dao"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/web"
|
||||
@@ -44,11 +43,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
pqle "github.com/SigNoz/signoz/pkg/query-service/pqlEngine"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
@@ -115,6 +116,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := preferences.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -132,22 +137,27 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
// set license manager as feature flag provider in dao
|
||||
modelDao.SetFlagProvider(lm)
|
||||
readerReady := make(chan bool)
|
||||
|
||||
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
var reader interfaces.DataConnector
|
||||
qb := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
serverOptions.SigNoz.TelemetryStore.ClickHouseDB(),
|
||||
serverOptions.PromConfigPath,
|
||||
lm,
|
||||
serverOptions.Cluster,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
fluxIntervalForTraceDetail,
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
go qb.Start(readerReady)
|
||||
reader = qb
|
||||
|
||||
skipConfig := &basemodel.SkipConfig{}
|
||||
if serverOptions.SkipTopLvlOpsPath != "" {
|
||||
@@ -166,18 +176,19 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
<-readerReady
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.PromConfigPath,
|
||||
serverOptions.RuleRepoURL,
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
lm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -222,7 +233,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// start the usagemanager
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.Config.TelemetryStore.Clickhouse.DSN)
|
||||
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickHouseDB(), serverOptions.Config.TelemetryStore.ClickHouse.DSN)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -293,7 +304,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
&opAmpModel.AllAgents, agentConfMgr,
|
||||
)
|
||||
|
||||
errorList := reader.PreloadMetricsMetadata(context.Background())
|
||||
errorList := qb.PreloadMetricsMetadata(context.Background())
|
||||
for _, er := range errorList {
|
||||
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
|
||||
}
|
||||
@@ -526,27 +537,33 @@ func (s *Server) Stop() error {
|
||||
}
|
||||
|
||||
func makeRulesManager(
|
||||
promConfigPath,
|
||||
ruleRepoURL string,
|
||||
db *sqlx.DB,
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm baseint.FeatureLookup,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
) (*baserules.Manager, error) {
|
||||
// create engine
|
||||
pqle, err := pqle.FromConfigPath(promConfigPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create pql engine : %v", err)
|
||||
}
|
||||
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
TelemetryStore: telemetryStore,
|
||||
Prometheus: prometheus,
|
||||
PqlEngine: pqle,
|
||||
RepoURL: ruleRepoURL,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
FeatureFlags: fm,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
|
||||
@@ -4,13 +4,13 @@ import (
|
||||
"context"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -36,11 +36,11 @@ type ModelDao interface {
|
||||
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
|
||||
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||
|
||||
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError)
|
||||
UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError)
|
||||
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
|
||||
ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError
|
||||
}
|
||||
|
||||
@@ -6,53 +6,45 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError) {
|
||||
p.StorablePersonalAccessToken.OrgID = orgID
|
||||
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
|
||||
_, err := m.DB().NewInsert().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Returning("id").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, p.UserID)
|
||||
if createdByUser == nil {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
p.CreatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
p.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
p.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
|
||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError {
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&p.StorablePersonalAccessToken).
|
||||
Column("role", "name", "updated_at", "updated_by_user_id").
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Exec(ctx)
|
||||
@@ -63,7 +55,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.Gettable
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
@@ -76,51 +68,41 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
|
||||
}
|
||||
|
||||
patsWithUsers := []types.GettablePAT{}
|
||||
patsWithUsers := []model.PAT{}
|
||||
for i := range pats {
|
||||
patWithUser := types.GettablePAT{
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[i],
|
||||
}
|
||||
|
||||
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
|
||||
if createdByUser == nil {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.CreatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: createdByUser.CreatedAt,
|
||||
UpdatedAt: createdByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
patWithUser.CreatedByUser = model.User{
|
||||
Id: createdByUser.ID,
|
||||
Name: createdByUser.Name,
|
||||
Email: createdByUser.Email,
|
||||
CreatedAt: createdByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: createdByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
|
||||
if updatedByUser == nil {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
NotFound: true,
|
||||
}
|
||||
} else {
|
||||
patWithUser.UpdatedByUser = types.PatUser{
|
||||
User: ossTypes.User{
|
||||
ID: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
TimeAuditable: ossTypes.TimeAuditable{
|
||||
CreatedAt: updatedByUser.CreatedAt,
|
||||
UpdatedAt: updatedByUser.UpdatedAt,
|
||||
},
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
},
|
||||
NotFound: false,
|
||||
patWithUser.UpdatedByUser = model.User{
|
||||
Id: updatedByUser.ID,
|
||||
Name: updatedByUser.Name,
|
||||
Email: updatedByUser.Email,
|
||||
CreatedAt: updatedByUser.CreatedAt.Unix(),
|
||||
ProfilePictureURL: updatedByUser.ProfilePictureURL,
|
||||
NotFound: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,14 +111,14 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
|
||||
return patsWithUsers, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
|
||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError {
|
||||
updatedAt := time.Now().Unix()
|
||||
_, err := m.DB().NewUpdate().
|
||||
Model(&types.StorablePersonalAccessToken{}).
|
||||
Set("revoked = ?", true).
|
||||
Set("updated_by_user_id = ?", userID).
|
||||
Set("updated_at = ?", updatedAt).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
@@ -146,7 +128,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID,
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
@@ -164,19 +146,19 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError) {
|
||||
pats := []types.StorablePersonalAccessToken{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&pats).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("revoked = false").
|
||||
Scan(ctx); err != nil {
|
||||
@@ -190,7 +172,7 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID)
|
||||
}
|
||||
}
|
||||
|
||||
patWithUser := types.GettablePAT{
|
||||
patWithUser := model.PAT{
|
||||
StorablePersonalAccessToken: pats[0],
|
||||
}
|
||||
|
||||
@@ -198,8 +180,8 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID)
|
||||
}
|
||||
|
||||
// deprecated
|
||||
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError) {
|
||||
users := []ossTypes.GettableUser{}
|
||||
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*types.GettableUser, basemodel.BaseApiError) {
|
||||
users := []types.GettableUser{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&users).
|
||||
|
||||
@@ -7,5 +7,6 @@ import (
|
||||
// Connector defines methods for interaction
|
||||
// with o11y data. for example - clickhouse
|
||||
type DataConnector interface {
|
||||
Start(readerReady chan bool)
|
||||
baseint.Reader
|
||||
}
|
||||
|
||||
@@ -7,17 +7,17 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
|
||||
prommodel "github.com/prometheus/common/model"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
)
|
||||
@@ -30,6 +30,10 @@ func initZapLog() *zap.Logger {
|
||||
return logger
|
||||
}
|
||||
|
||||
func init() {
|
||||
prommodel.NameValidationScheme = prommodel.UTF8Validation
|
||||
}
|
||||
|
||||
func main() {
|
||||
var promConfigPath, skipTopLvlOpsPath string
|
||||
|
||||
@@ -83,7 +87,6 @@ func main() {
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
Config: promConfigPath,
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create config", zap.Error(err))
|
||||
@@ -91,21 +94,16 @@ func main() {
|
||||
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
sqlStoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlStoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewSQLStoreProviderFactories(),
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
zap.L().Fatal("Failed to create signoz struct", zap.Error(err))
|
||||
}
|
||||
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
|
||||
@@ -157,6 +157,8 @@ func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
|
||||
}
|
||||
|
||||
switch planName {
|
||||
case PlanNameTeams:
|
||||
features = append(features, ProPlan...)
|
||||
case PlanNameEnterprise:
|
||||
features = append(features, EnterprisePlan...)
|
||||
case PlanNameBasic:
|
||||
|
||||
@@ -74,21 +74,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Parse the entire license properly",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
PlanName: PlanNameTeams,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "ACTIVE",
|
||||
@@ -98,14 +98,14 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Fallback to basic plan if license status is invalid",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "INVALID",
|
||||
@@ -122,21 +122,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "fallback states for validFrom and validUntil",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "ENTERPRISE",
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"valid_from": 1234.456,
|
||||
"valid_until": 5678.567,
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
},
|
||||
PlanName: PlanNameEnterprise,
|
||||
PlanName: PlanNameTeams,
|
||||
ValidFrom: 1234,
|
||||
ValidUntil: 5678,
|
||||
Status: "ACTIVE",
|
||||
|
||||
@@ -1,7 +1,25 @@
|
||||
package model
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types"
|
||||
|
||||
type User struct {
|
||||
Id string `json:"id" db:"id"`
|
||||
Name string `json:"name" db:"name"`
|
||||
Email string `json:"email" db:"email"`
|
||||
CreatedAt int64 `json:"createdAt" db:"created_at"`
|
||||
ProfilePictureURL string `json:"profilePictureURL" db:"profile_picture_url"`
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
type CreatePATRequestBody struct {
|
||||
Name string `json:"name"`
|
||||
Role string `json:"role"`
|
||||
ExpiresInDays int64 `json:"expiresInDays"`
|
||||
}
|
||||
|
||||
type PAT struct {
|
||||
CreatedByUser User `json:"createdByUser"`
|
||||
UpdatedByUser User `json:"updatedByUser"`
|
||||
|
||||
types.StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
@@ -1,26 +1,30 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
const SSO = "SSO"
|
||||
const Basic = "BASIC_PLAN"
|
||||
const Pro = "PRO_PLAN"
|
||||
const Enterprise = "ENTERPRISE_PLAN"
|
||||
|
||||
var (
|
||||
PlanNameEnterprise = "ENTERPRISE"
|
||||
PlanNameTeams = "TEAMS"
|
||||
PlanNameBasic = "BASIC"
|
||||
)
|
||||
|
||||
var (
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameTeams: Pro, PlanNameEnterprise: Enterprise}
|
||||
)
|
||||
|
||||
var (
|
||||
LicenseStatusInvalid = "INVALID"
|
||||
)
|
||||
|
||||
const DisableUpsell = "DISABLE_UPSELL"
|
||||
const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
const Gateway = "GATEWAY"
|
||||
@@ -34,6 +38,90 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: DisableUpsell,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -63,12 +151,134 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var ProPlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
Name: SSO,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Gateway,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: PremiumSupport,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var EnterprisePlan = basemodel.FeatureSet{
|
||||
@@ -79,6 +289,83 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.SmartTraceDetail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -122,8 +409,8 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package smart
|
||||
package model
|
||||
|
||||
type SpanForTraceDetails struct {
|
||||
TimeUnixNano uint64 `json:"timestamp"`
|
||||
@@ -15,3 +15,8 @@ type SpanForTraceDetails struct {
|
||||
HasError bool `json:"hasError"`
|
||||
Children []*SpanForTraceDetails `json:"children"`
|
||||
}
|
||||
|
||||
type GetSpansSubQueryDBResponse struct {
|
||||
SpanID string `ch:"spanID"`
|
||||
TraceID string `ch:"traceID"`
|
||||
}
|
||||
@@ -53,6 +53,7 @@ type AnomalyRule struct {
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
p *baserules.PostableRule,
|
||||
featureFlags interfaces.FeatureLookup,
|
||||
reader interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
opts ...baserules.RuleOption,
|
||||
@@ -88,9 +89,10 @@ func NewAnomalyRule(
|
||||
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FeatureLookup: featureFlags,
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
@@ -100,18 +102,21 @@ func NewAnomalyRule(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
|
||||
)
|
||||
}
|
||||
return &t, nil
|
||||
|
||||
@@ -23,6 +23,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
@@ -47,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Rule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
|
||||
@@ -65,6 +66,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
@@ -121,6 +123,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
rule, err = baserules.NewThresholdRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
@@ -142,7 +145,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
@@ -157,6 +160,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
rule, err = NewAnomalyRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithSendAlways(),
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type GettablePAT struct {
|
||||
CreatedByUser PatUser `json:"createdByUser"`
|
||||
UpdatedByUser PatUser `json:"updatedByUser"`
|
||||
|
||||
StorablePersonalAccessToken
|
||||
}
|
||||
|
||||
type PatUser struct {
|
||||
types.User
|
||||
NotFound bool `json:"notFound"`
|
||||
}
|
||||
|
||||
func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
|
||||
return GettablePAT{
|
||||
StorablePersonalAccessToken: NewStorablePersonalAccessToken(name, role, userID, expiresAt),
|
||||
}
|
||||
}
|
||||
|
||||
type StorablePersonalAccessToken struct {
|
||||
bun.BaseModel `bun:"table:personal_access_token"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
|
||||
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
|
||||
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
|
||||
Token string `json:"token" bun:"token,type:text,notnull,unique"`
|
||||
Name string `json:"name" bun:"name,type:text,notnull"`
|
||||
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
|
||||
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
|
||||
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
|
||||
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
|
||||
}
|
||||
|
||||
func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64) StorablePersonalAccessToken {
|
||||
now := time.Now()
|
||||
if expiresAt != 0 {
|
||||
// convert expiresAt to unix timestamp from days
|
||||
expiresAt = now.Unix() + (expiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
// Generate a 32-byte random token.
|
||||
token := make([]byte, 32)
|
||||
rand.Read(token)
|
||||
// Encode the token in base64.
|
||||
encodedToken := base64.StdEncoding.EncodeToString(token)
|
||||
|
||||
return StorablePersonalAccessToken{
|
||||
Token: encodedToken,
|
||||
Name: name,
|
||||
Role: role,
|
||||
UserID: userID,
|
||||
ExpiresAt: expiresAt,
|
||||
LastUsed: 0,
|
||||
Revoked: false,
|
||||
UpdatedByUserID: "",
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
},
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -18,13 +18,6 @@
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
|
||||
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
|
||||
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
|
||||
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
|
||||
"tooltip_email_to": "Enter email addresses separated by commas.",
|
||||
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
|
||||
|
||||
"field_slack_recipient": "Recipient",
|
||||
"field_slack_title": "Title",
|
||||
"field_slack_description": "Description",
|
||||
|
||||
@@ -18,12 +18,6 @@
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
|
||||
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
|
||||
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
|
||||
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
|
||||
"tooltip_email_to": "Enter email addresses separated by commas.",
|
||||
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
|
||||
"field_slack_recipient": "Recipient",
|
||||
"field_slack_title": "Title",
|
||||
"field_slack_description": "Description",
|
||||
|
||||
@@ -60,14 +60,10 @@
|
||||
"INTEGRATIONS": "SigNoz | Integrations",
|
||||
"ALERT_HISTORY": "SigNoz | Alert Rule History",
|
||||
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
|
||||
"MESSAGING_QUEUES_OVERVIEW": "SigNoz | Messaging Queues",
|
||||
"MESSAGING_QUEUES_KAFKA": "SigNoz | Messaging Queues | Kafka",
|
||||
"MESSAGING_QUEUES_KAFKA_DETAIL": "SigNoz | Messaging Queues | Kafka",
|
||||
"MESSAGING_QUEUES_CELERY_TASK": "SigNoz | Messaging Queues | Celery",
|
||||
"MESSAGING_QUEUES": "SigNoz | Messaging Queues",
|
||||
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
||||
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | API Monitoring"
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer"
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { ConfigProvider } from 'antd';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
@@ -16,7 +15,6 @@ import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import history from 'lib/history';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import posthog from 'posthog-js';
|
||||
import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
@@ -28,7 +26,6 @@ import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
import { Home } from './pageComponents';
|
||||
import PrivateRoute from './Private';
|
||||
import defaultRoutes, {
|
||||
AppRoutes,
|
||||
@@ -48,6 +45,7 @@ function App(): JSX.Element {
|
||||
activeLicenseV3,
|
||||
isFetchingActiveLicenseV3,
|
||||
userFetchError,
|
||||
licensesFetchError,
|
||||
featureFlagsFetchError,
|
||||
isLoggedIn: isLoggedInState,
|
||||
featureFlags,
|
||||
@@ -57,7 +55,10 @@ function App(): JSX.Element {
|
||||
|
||||
const { hostname, pathname } = window.location;
|
||||
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
const {
|
||||
isCloudUser: isCloudUserVal,
|
||||
isEECloudUser: isEECloudUserVal,
|
||||
} = useGetTenantLicense();
|
||||
|
||||
const enableAnalytics = useCallback(
|
||||
(user: IUser): void => {
|
||||
@@ -167,7 +168,7 @@ function App(): JSX.Element {
|
||||
|
||||
let updatedRoutes = defaultRoutes;
|
||||
// if the user is a cloud user
|
||||
if (isCloudUser || isEnterpriseSelfHostedUser) {
|
||||
if (isCloudUserVal || isEECloudUserVal) {
|
||||
// if the user is on basic plan then remove billing
|
||||
if (isOnBasicPlan) {
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
@@ -189,10 +190,10 @@ function App(): JSX.Element {
|
||||
isLoggedInState,
|
||||
user,
|
||||
licenses,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isCloudUserVal,
|
||||
isFetchingLicenses,
|
||||
isFetchingUser,
|
||||
isEECloudUserVal,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -207,7 +208,6 @@ function App(): JSX.Element {
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
// feature flag shouldn't be loading and featureFlags or fetchError any one of this should be true indicating that req is complete
|
||||
// licenses should also be present. there is no check for licenses for loading and error as that is mandatory if not present then routing
|
||||
@@ -233,12 +233,7 @@ function App(): JSX.Element {
|
||||
const showAddCreditCardModal =
|
||||
!isPremiumSupportEnabled && !trialInfo?.trialConvertedToSubscription;
|
||||
|
||||
if (
|
||||
isLoggedInState &&
|
||||
isChatSupportEnabled &&
|
||||
!showAddCreditCardModal &&
|
||||
(isCloudUser || isEnterpriseSelfHostedUser)
|
||||
) {
|
||||
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
|
||||
window.Intercom('boot', {
|
||||
app_id: process.env.INTERCOM_APP_ID,
|
||||
email: user?.email || '',
|
||||
@@ -257,53 +252,13 @@ function App(): JSX.Element {
|
||||
licenses,
|
||||
activeLicenseV3,
|
||||
trialInfo,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingUser && isCloudUser && user && user.email) {
|
||||
if (!isFetchingUser && isCloudUserVal && user && user.email) {
|
||||
enableAnalytics(user);
|
||||
}
|
||||
}, [user, isFetchingUser, isCloudUser, enableAnalytics]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isCloudUser || isEnterpriseSelfHostedUser) {
|
||||
if (process.env.POSTHOG_KEY) {
|
||||
posthog.init(process.env.POSTHOG_KEY, {
|
||||
api_host: 'https://us.i.posthog.com',
|
||||
person_profiles: 'identified_only', // or 'always' to create profiles for anonymous users as well
|
||||
});
|
||||
}
|
||||
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
environment: 'production',
|
||||
integrations: [
|
||||
Sentry.browserTracingIntegration(),
|
||||
Sentry.replayIntegration({
|
||||
maskAllText: false,
|
||||
blockAllMedia: false,
|
||||
}),
|
||||
],
|
||||
// Performance Monitoring
|
||||
tracesSampleRate: 1.0, // Capture 100% of the transactions
|
||||
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
|
||||
tracePropagationTargets: [],
|
||||
// Session Replay
|
||||
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
||||
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
||||
});
|
||||
} else {
|
||||
posthog.reset();
|
||||
Sentry.close();
|
||||
|
||||
if (window.cioanalytics && typeof window.cioanalytics.reset === 'function') {
|
||||
window.cioanalytics.reset();
|
||||
}
|
||||
}
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
}, [user, isFetchingUser, isCloudUserVal, enableAnalytics]);
|
||||
|
||||
// if the user is in logged in state
|
||||
if (isLoggedInState) {
|
||||
@@ -315,55 +270,60 @@ function App(): JSX.Element {
|
||||
// if the required calls fails then return a something went wrong error
|
||||
// this needs to be on top of data missing error because if there is an error, data will never be loaded and it will
|
||||
// move to indefinitive loading
|
||||
if (userFetchError && pathname !== ROUTES.SOMETHING_WENT_WRONG) {
|
||||
if (
|
||||
(userFetchError || licensesFetchError) &&
|
||||
pathname !== ROUTES.SOMETHING_WENT_WRONG
|
||||
) {
|
||||
history.replace(ROUTES.SOMETHING_WENT_WRONG);
|
||||
}
|
||||
|
||||
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
|
||||
if ((!licenses || !user.email || !featureFlags) && !userFetchError) {
|
||||
if (
|
||||
(!licenses || !user.email || !featureFlags) &&
|
||||
!userFetchError &&
|
||||
!licensesFetchError
|
||||
) {
|
||||
return <Spinner tip="Loading..." />;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
</Sentry.ErrorBoundary>
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -207,7 +207,7 @@ export const PasswordReset = Loadable(
|
||||
export const SomethingWentWrong = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "ErrorBoundaryFallback" */ 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'
|
||||
/* webpackChunkName: "SomethingWentWrong" */ 'pages/SomethingWentWrong'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -299,3 +299,10 @@ export const MetricsExplorer = Loadable(
|
||||
export const ApiMonitoring = Loadable(
|
||||
() => import(/* webpackChunkName: "ApiMonitoring" */ 'pages/ApiMonitoring'),
|
||||
);
|
||||
|
||||
export const DynamicVariableTest = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "DynamicVariableTest" */ 'pages/DynamicVariableTest'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
CustomDomainSettings,
|
||||
DashboardPage,
|
||||
DashboardWidget,
|
||||
DynamicVariableTest,
|
||||
EditAlertChannelsAlerts,
|
||||
EditRulesPage,
|
||||
ErrorDetails,
|
||||
@@ -505,6 +506,13 @@ const routes: AppRoutes[] = [
|
||||
key: 'API_MONITORING',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.DYNAMIC_VARIABLE_TEST,
|
||||
exact: true,
|
||||
component: DynamicVariableTest,
|
||||
key: 'DYNAMIC_VARIABLE_TEST',
|
||||
isPrivate: true,
|
||||
},
|
||||
];
|
||||
|
||||
export const SUPPORT_ROUTE: AppRoutes = {
|
||||
|
||||
@@ -521,7 +521,7 @@ export default function CeleryOverviewTable({
|
||||
locale={{
|
||||
emptyText: isLoading ? null : <Typography.Text>No data</Typography.Text>,
|
||||
}}
|
||||
scroll={{ x: 'max-content' }}
|
||||
scroll={{ x: true }}
|
||||
showSorterTooltip
|
||||
onDragColumn={handleDragColumn}
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
|
||||
@@ -18,7 +18,6 @@ function CopyClipboardHOC({
|
||||
|
||||
notifications.success({
|
||||
message: notificationMessage,
|
||||
key: notificationMessage,
|
||||
});
|
||||
}
|
||||
}, [value, notifications, entityKey]);
|
||||
|
||||
231
frontend/src/components/MultiSelect/MultiSelect.styles.scss
Normal file
231
frontend/src/components/MultiSelect/MultiSelect.styles.scss
Normal file
@@ -0,0 +1,231 @@
|
||||
.multi-select-container {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto,
|
||||
'Helvetica Neue', Arial, sans-serif;
|
||||
}
|
||||
|
||||
.multi-select-label {
|
||||
margin-bottom: 4px;
|
||||
font-size: 14px;
|
||||
color: rgba(0, 0, 0, 0.85);
|
||||
}
|
||||
|
||||
.multi-select-input {
|
||||
width: 100%;
|
||||
min-height: 40px;
|
||||
border: 1px solid #d9d9d9;
|
||||
border-radius: 6px;
|
||||
padding: 4px 8px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
cursor: text;
|
||||
background-color: #fff;
|
||||
transition: all 0.3s;
|
||||
|
||||
&:hover {
|
||||
border-color: #40a9ff;
|
||||
}
|
||||
|
||||
&:focus,
|
||||
&.multi-select-input-focused {
|
||||
border-color: #40a9ff;
|
||||
box-shadow: 0 0 0 2px rgba(24, 144, 255, 0.2);
|
||||
outline: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-chips {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
width: 100%;
|
||||
gap: 4px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.multi-select-chip {
|
||||
background-color: #f0f0f0;
|
||||
border-radius: 4px;
|
||||
padding: 2px 8px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
max-width: 200px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
font-size: 14px;
|
||||
line-height: 22px;
|
||||
|
||||
.multi-select-chip-remove {
|
||||
cursor: pointer;
|
||||
font-size: 12px;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
|
||||
&:hover {
|
||||
color: #ff4d4f;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-search {
|
||||
flex: 1;
|
||||
min-width: 50px;
|
||||
border: none;
|
||||
outline: none;
|
||||
background: transparent;
|
||||
|
||||
&:focus {
|
||||
border: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
// Override Ant Design's styles
|
||||
.ant-input {
|
||||
background: transparent;
|
||||
|
||||
&:focus {
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-clear-all {
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 4px;
|
||||
cursor: pointer;
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
&:hover {
|
||||
color: #ff4d4f;
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-dropdown {
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
max-height: 400px;
|
||||
border: 1px solid #d9d9d9;
|
||||
border-radius: 6px;
|
||||
margin-top: 4px;
|
||||
background-color: #fff;
|
||||
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.15);
|
||||
z-index: 1050;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.multi-select-option {
|
||||
padding: 8px 12px;
|
||||
cursor: pointer;
|
||||
|
||||
&:hover {
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-divider {
|
||||
height: 1px;
|
||||
background-color: #f0f0f0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.multi-select-section-label {
|
||||
padding: 8px 12px;
|
||||
font-weight: 500;
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
font-size: 12px;
|
||||
background-color: #fafafa;
|
||||
}
|
||||
|
||||
.multi-select-loading {
|
||||
padding: 12px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.multi-select-no-results {
|
||||
padding: 12px;
|
||||
text-align: center;
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.multi-select-options-container,
|
||||
.multi-select-section-content {
|
||||
overflow-y: auto;
|
||||
|
||||
/* For WebKit browsers */
|
||||
&::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: #f0f0f0;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: #ccc;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb:hover {
|
||||
background: #aaa;
|
||||
}
|
||||
|
||||
/* For Firefox */
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: #ccc #f0f0f0;
|
||||
}
|
||||
|
||||
.multi-select-error {
|
||||
border-color: #ff4d4f;
|
||||
|
||||
&:hover,
|
||||
&:focus,
|
||||
&.multi-select-input-focused {
|
||||
border-color: #ff7875;
|
||||
box-shadow: 0 0 0 2px rgba(255, 77, 79, 0.2);
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-error-text {
|
||||
color: #ff4d4f;
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.multi-select-disabled {
|
||||
.multi-select-input {
|
||||
background-color: #f5f5f5;
|
||||
cursor: not-allowed;
|
||||
color: rgba(0, 0, 0, 0.25);
|
||||
border-color: #d9d9d9;
|
||||
|
||||
&:hover {
|
||||
border-color: #d9d9d9;
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-chip {
|
||||
color: rgba(0, 0, 0, 0.25);
|
||||
background-color: #eee;
|
||||
}
|
||||
}
|
||||
595
frontend/src/components/MultiSelect/MultiSelect.tsx
Normal file
595
frontend/src/components/MultiSelect/MultiSelect.tsx
Normal file
@@ -0,0 +1,595 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import './MultiSelect.styles.scss';
|
||||
|
||||
import { CloseOutlined, SearchOutlined } from '@ant-design/icons';
|
||||
import { Checkbox, Input, Spin } from 'antd';
|
||||
import { CheckboxChangeEvent } from 'antd/es/checkbox';
|
||||
import { InputRef } from 'antd/lib/input';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
|
||||
export interface MultiSelectOption {
|
||||
label: string;
|
||||
value: string;
|
||||
selected?: boolean;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export interface MultiSelectSection {
|
||||
title: string;
|
||||
options: MultiSelectOption[];
|
||||
}
|
||||
|
||||
export interface MultiSelectProps {
|
||||
/** Array of options to display in the dropdown */
|
||||
options: MultiSelectOption[];
|
||||
/** Callback when selected values change */
|
||||
onChange: (selectedValues: string[]) => void;
|
||||
/** Currently selected values */
|
||||
value?: string[];
|
||||
/** Placeholder text for the search input */
|
||||
placeholder?: string;
|
||||
/** Whether the component is in loading state */
|
||||
loading?: boolean;
|
||||
/** Allow users to add custom values */
|
||||
allowCustomValues?: boolean;
|
||||
/** Callback when search text changes - can be used for server filtering */
|
||||
onSearch?: (searchText: string) => void;
|
||||
/** Custom class name */
|
||||
className?: string;
|
||||
/** Additional sections to display (e.g., "Related Values") */
|
||||
additionalSections?: MultiSelectSection[];
|
||||
/** Show "Select All" option */
|
||||
showSelectAll?: boolean;
|
||||
/** Maximum height of dropdown in pixels */
|
||||
dropdownMaxHeight?: number;
|
||||
/** Maximum width of dropdown in pixels (defaults to matching input width) */
|
||||
dropdownMaxWidth?: number;
|
||||
/** Disable the component */
|
||||
disabled?: boolean;
|
||||
/** Error message to display */
|
||||
error?: string;
|
||||
/** Label text */
|
||||
label?: string;
|
||||
/** Allow users to clear all selections */
|
||||
allowClear?: boolean;
|
||||
/** Maximum height of a section */
|
||||
sectionMaxHeight?: number;
|
||||
}
|
||||
|
||||
function MultiSelect({
|
||||
options,
|
||||
onChange,
|
||||
value = [],
|
||||
placeholder = 'Search...',
|
||||
loading = false,
|
||||
allowCustomValues = true,
|
||||
onSearch,
|
||||
className = '',
|
||||
additionalSections = [],
|
||||
showSelectAll = true,
|
||||
dropdownMaxHeight = 400,
|
||||
dropdownMaxWidth,
|
||||
disabled = false,
|
||||
error,
|
||||
label,
|
||||
allowClear = true,
|
||||
sectionMaxHeight = 150,
|
||||
}: MultiSelectProps): JSX.Element {
|
||||
const [isDropdownOpen, setIsDropdownOpen] = useState<boolean>(false);
|
||||
const [searchText, setSearchText] = useState<string>('');
|
||||
const [selectedValues, setSelectedValues] = useState<string[]>(value);
|
||||
const [displayOptions, setDisplayOptions] = useState<MultiSelectOption[]>(
|
||||
options,
|
||||
);
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const inputRef = useRef<InputRef>(null);
|
||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||
const [focusedChipIndex, setFocusedChipIndex] = useState<number>(-1);
|
||||
const chipRefs = useRef<(HTMLDivElement | null)[]>([]);
|
||||
|
||||
// Handle save action - memoize with useCallback
|
||||
const handleSave = useCallback((): void => {
|
||||
setIsDropdownOpen(false);
|
||||
setSearchText('');
|
||||
onChange(selectedValues);
|
||||
}, [onChange, selectedValues]);
|
||||
|
||||
// Synchronize value prop with internal state
|
||||
useEffect(() => {
|
||||
setSelectedValues(value);
|
||||
}, [value]);
|
||||
|
||||
// Filter and sort options based on search text
|
||||
useEffect(() => {
|
||||
// Filter options based on search text
|
||||
const filteredOptions = options.filter((option) =>
|
||||
option.label.toLowerCase().includes(searchText.toLowerCase()),
|
||||
);
|
||||
|
||||
// Add custom value option if no matches found and allowCustomValues is true
|
||||
if (
|
||||
allowCustomValues &&
|
||||
searchText &&
|
||||
!filteredOptions.some(
|
||||
(option) => option.label.toLowerCase() === searchText.toLowerCase(),
|
||||
) &&
|
||||
!filteredOptions.some(
|
||||
(option) => option.value.toLowerCase() === searchText.toLowerCase(),
|
||||
)
|
||||
) {
|
||||
filteredOptions.unshift({
|
||||
label: `Add "${searchText}"`,
|
||||
value: searchText,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort options: selected first, then matching search term
|
||||
const sortedOptions = [...filteredOptions].sort((a, b) => {
|
||||
// First by selection status
|
||||
if (selectedValues.includes(a.value) && !selectedValues.includes(b.value))
|
||||
return -1;
|
||||
if (!selectedValues.includes(a.value) && selectedValues.includes(b.value))
|
||||
return 1;
|
||||
|
||||
// Then by match position (exact matches or starts with come first)
|
||||
const aLower = a.label.toLowerCase();
|
||||
const bLower = b.label.toLowerCase();
|
||||
const searchLower = searchText.toLowerCase();
|
||||
|
||||
if (aLower === searchLower && bLower !== searchLower) return -1;
|
||||
if (aLower !== searchLower && bLower === searchLower) return 1;
|
||||
if (aLower.startsWith(searchLower) && !bLower.startsWith(searchLower))
|
||||
return -1;
|
||||
if (!aLower.startsWith(searchLower) && bLower.startsWith(searchLower))
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
setDisplayOptions(sortedOptions);
|
||||
}, [options, searchText, selectedValues, allowCustomValues]);
|
||||
|
||||
// Close dropdown when clicking outside
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent): void => {
|
||||
if (
|
||||
containerRef.current &&
|
||||
!containerRef.current.contains(event.target as Node)
|
||||
) {
|
||||
handleSave();
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return (): void => {
|
||||
document.removeEventListener('mousedown', handleClickOutside);
|
||||
};
|
||||
}, [selectedValues, handleSave]);
|
||||
|
||||
// Adjust dropdown position if needed
|
||||
useEffect(() => {
|
||||
if (isDropdownOpen && dropdownRef.current && containerRef.current) {
|
||||
const containerRect = containerRef.current.getBoundingClientRect();
|
||||
const dropdownHeight = dropdownRef.current.offsetHeight;
|
||||
const viewportHeight = window.innerHeight;
|
||||
|
||||
// Check if dropdown extends beyond viewport bottom
|
||||
if (
|
||||
containerRect.bottom + dropdownHeight > viewportHeight &&
|
||||
containerRect.top > dropdownHeight
|
||||
) {
|
||||
dropdownRef.current.style.top = 'auto';
|
||||
dropdownRef.current.style.bottom = '100%';
|
||||
dropdownRef.current.style.marginTop = '0';
|
||||
dropdownRef.current.style.marginBottom = '4px';
|
||||
}
|
||||
}
|
||||
}, [isDropdownOpen, displayOptions]);
|
||||
|
||||
// Handle search input change
|
||||
const handleSearchChange = (e: React.ChangeEvent<HTMLInputElement>): void => {
|
||||
const text = e.target.value;
|
||||
setSearchText(text);
|
||||
if (onSearch) {
|
||||
onSearch(text);
|
||||
}
|
||||
};
|
||||
|
||||
// Handle selection change
|
||||
const handleSelectionChange = (
|
||||
option: MultiSelectOption,
|
||||
e: CheckboxChangeEvent,
|
||||
): void => {
|
||||
const { checked } = e.target;
|
||||
let newSelectedValues: string[];
|
||||
|
||||
if (checked) {
|
||||
newSelectedValues = [...selectedValues, option.value];
|
||||
} else {
|
||||
newSelectedValues = selectedValues.filter((val) => val !== option.value);
|
||||
}
|
||||
|
||||
setSelectedValues(newSelectedValues);
|
||||
};
|
||||
|
||||
// Handle "All" checkbox change
|
||||
const handleSelectAll = (e: CheckboxChangeEvent): void => {
|
||||
if (e.target.checked) {
|
||||
const allValues = options
|
||||
.filter((option) => !option.disabled)
|
||||
.map((option) => option.value);
|
||||
setSelectedValues(allValues);
|
||||
} else {
|
||||
setSelectedValues([]);
|
||||
}
|
||||
};
|
||||
|
||||
// Remove a selected item
|
||||
const handleRemoveItem = useCallback(
|
||||
(value: string): void => {
|
||||
const newSelectedValues = selectedValues.filter((val) => val !== value);
|
||||
setSelectedValues(newSelectedValues);
|
||||
},
|
||||
[selectedValues],
|
||||
);
|
||||
|
||||
// Handle clicking the input area
|
||||
const handleInputClick = (): void => {
|
||||
if (!disabled) {
|
||||
setIsDropdownOpen(true);
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
};
|
||||
|
||||
// Handle clear all selections
|
||||
const handleClearAll = (): void => {
|
||||
setSelectedValues([]);
|
||||
setSearchText('');
|
||||
inputRef.current?.focus();
|
||||
};
|
||||
|
||||
// Get display value of a selection (chips)
|
||||
const getSelectedOptions = (): MultiSelectOption[] =>
|
||||
selectedValues.map((value) => {
|
||||
const option = options.find((opt) => opt.value === value);
|
||||
return {
|
||||
label: option?.label || value,
|
||||
value,
|
||||
};
|
||||
});
|
||||
|
||||
const selectedOptions = getSelectedOptions();
|
||||
const allSelectableOptions = options.filter((option) => !option.disabled);
|
||||
const allSelected =
|
||||
allSelectableOptions.length > 0 &&
|
||||
selectedValues.length === allSelectableOptions.length;
|
||||
|
||||
const containerClasses = [
|
||||
'multi-select-container',
|
||||
className,
|
||||
disabled ? 'multi-select-disabled' : '',
|
||||
error ? 'multi-select-error' : '',
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ');
|
||||
|
||||
const inputClasses = [
|
||||
'multi-select-input',
|
||||
isDropdownOpen ? 'multi-select-input-focused' : '',
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ');
|
||||
|
||||
// Reset chip refs array when selected options change
|
||||
useEffect(() => {
|
||||
chipRefs.current = Array(selectedOptions.length).fill(null);
|
||||
}, [selectedOptions.length]);
|
||||
|
||||
// Handle chip keyboard navigation
|
||||
const handleChipKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent, index: number) => {
|
||||
e.stopPropagation(); // Prevent bubbling to container
|
||||
|
||||
switch (e.key) {
|
||||
case 'ArrowLeft':
|
||||
e.preventDefault();
|
||||
// Move focus to previous chip
|
||||
if (index > 0) {
|
||||
setFocusedChipIndex(index - 1);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'ArrowRight':
|
||||
e.preventDefault();
|
||||
// Move focus to next chip or input
|
||||
if (index < selectedOptions.length - 1) {
|
||||
setFocusedChipIndex(index + 1);
|
||||
} else {
|
||||
// Focus the input when at the last chip
|
||||
setFocusedChipIndex(-1);
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Delete':
|
||||
case 'Backspace':
|
||||
e.preventDefault();
|
||||
// Remove current chip
|
||||
handleRemoveItem(selectedOptions[index].value);
|
||||
|
||||
// Adjust focus after deletion
|
||||
if (selectedOptions.length > 1) {
|
||||
// Focus previous chip if not at beginning
|
||||
const newIndex = Math.min(index, selectedOptions.length - 2);
|
||||
setFocusedChipIndex(newIndex);
|
||||
} else {
|
||||
// If this was the last chip, focus input
|
||||
setFocusedChipIndex(-1);
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Escape':
|
||||
e.preventDefault();
|
||||
// Return focus to input
|
||||
setFocusedChipIndex(-1);
|
||||
inputRef.current?.focus();
|
||||
break;
|
||||
default:
|
||||
// No-op for unhandled keys
|
||||
break;
|
||||
}
|
||||
},
|
||||
[selectedOptions, handleRemoveItem],
|
||||
);
|
||||
|
||||
// Handle key events in the input
|
||||
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>): void => {
|
||||
if (e.key === 'Enter') {
|
||||
// Add custom value on Enter if it doesn't exist
|
||||
if (
|
||||
allowCustomValues &&
|
||||
searchText &&
|
||||
!options.some(
|
||||
(option) => option.value.toLowerCase() === searchText.toLowerCase(),
|
||||
) &&
|
||||
!options.some(
|
||||
(option) => option.label.toLowerCase() === searchText.toLowerCase(),
|
||||
)
|
||||
) {
|
||||
const newSelectedValues = [...selectedValues, searchText];
|
||||
setSelectedValues(newSelectedValues);
|
||||
setSearchText('');
|
||||
} else if (isDropdownOpen) {
|
||||
handleSave();
|
||||
}
|
||||
} else if (e.key === 'Escape') {
|
||||
handleSave();
|
||||
} else if (
|
||||
e.key === 'Backspace' &&
|
||||
!searchText &&
|
||||
selectedValues.length > 0
|
||||
) {
|
||||
// Remove the last selected item when pressing backspace in an empty input
|
||||
const newSelectedValues = [...selectedValues];
|
||||
newSelectedValues.pop();
|
||||
setSelectedValues(newSelectedValues);
|
||||
} else if (e.key === 'Tab' && isDropdownOpen) {
|
||||
// Close dropdown but keep focus within component
|
||||
e.preventDefault();
|
||||
handleSave();
|
||||
}
|
||||
|
||||
// Add navigation TO chips when in input field
|
||||
if (e.key === 'ArrowLeft' && !searchText && selectedOptions.length > 0) {
|
||||
e.preventDefault();
|
||||
setFocusedChipIndex(selectedOptions.length - 1);
|
||||
}
|
||||
};
|
||||
|
||||
// Focus the appropriate chip when focusedChipIndex changes
|
||||
useEffect(() => {
|
||||
if (focusedChipIndex >= 0 && chipRefs.current[focusedChipIndex]) {
|
||||
chipRefs.current[focusedChipIndex]?.focus();
|
||||
}
|
||||
}, [focusedChipIndex]);
|
||||
|
||||
return (
|
||||
<div className={containerClasses} ref={containerRef}>
|
||||
{label && <div className="multi-select-label">{label}</div>}
|
||||
<div
|
||||
className={inputClasses}
|
||||
onClick={handleInputClick}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
handleInputClick();
|
||||
}
|
||||
}}
|
||||
role="combobox"
|
||||
aria-expanded={isDropdownOpen}
|
||||
aria-haspopup="listbox"
|
||||
aria-controls="multi-select-dropdown"
|
||||
aria-owns="multi-select-dropdown"
|
||||
tabIndex={disabled ? -1 : 0}
|
||||
>
|
||||
<div className="multi-select-chips">
|
||||
{selectedOptions.map((option, index) => (
|
||||
<div
|
||||
key={option.value}
|
||||
className={`multi-select-chip ${
|
||||
focusedChipIndex === index ? 'multi-select-chip-focused' : ''
|
||||
}`}
|
||||
ref={(el): void => {
|
||||
chipRefs.current[index] = el;
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
onKeyDown={(e): void => handleChipKeyDown(e, index)}
|
||||
onFocus={(): void => setFocusedChipIndex(index)}
|
||||
onClick={(e): void => e.stopPropagation()}
|
||||
aria-label={`Selected option: ${option.label}`}
|
||||
>
|
||||
{option.label}
|
||||
{!disabled && (
|
||||
<button
|
||||
type="button"
|
||||
className="multi-select-chip-remove"
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
handleRemoveItem(option.value);
|
||||
}}
|
||||
aria-label={`Remove ${option.label}`}
|
||||
tabIndex={-1} // Don't make the inner button tabbable
|
||||
>
|
||||
<CloseOutlined />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
<Input
|
||||
ref={inputRef}
|
||||
className="multi-select-search"
|
||||
placeholder={selectedOptions.length === 0 ? placeholder : ''}
|
||||
value={searchText}
|
||||
onChange={handleSearchChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
onFocus={(): void => setIsDropdownOpen(true)}
|
||||
suffix={<SearchOutlined />}
|
||||
bordered={false}
|
||||
disabled={disabled}
|
||||
/>
|
||||
{allowClear && selectedValues.length > 0 && !disabled && (
|
||||
<button
|
||||
type="button"
|
||||
className="multi-select-clear-all"
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
handleClearAll();
|
||||
}}
|
||||
aria-label="Clear all selections"
|
||||
>
|
||||
<CloseOutlined />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{error && <div className="multi-select-error-text">{error}</div>}
|
||||
|
||||
{isDropdownOpen && !disabled && (
|
||||
<div
|
||||
className="multi-select-dropdown"
|
||||
ref={dropdownRef}
|
||||
style={{
|
||||
maxHeight: `${dropdownMaxHeight}px`,
|
||||
maxWidth: dropdownMaxWidth ? `${dropdownMaxWidth}px` : undefined,
|
||||
}}
|
||||
id="multi-select-dropdown"
|
||||
role="listbox"
|
||||
aria-multiselectable="true"
|
||||
>
|
||||
{loading ? (
|
||||
<div className="multi-select-loading">
|
||||
<Spin size="small" />
|
||||
<span>We are updating the values ...</span>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
{showSelectAll && (
|
||||
<>
|
||||
<div className="multi-select-option">
|
||||
<Checkbox checked={allSelected} onChange={handleSelectAll}>
|
||||
ALL
|
||||
</Checkbox>
|
||||
</div>
|
||||
<div className="multi-select-divider" />
|
||||
</>
|
||||
)}
|
||||
<div
|
||||
className="multi-select-options-container"
|
||||
style={{ maxHeight: `${sectionMaxHeight}px` }}
|
||||
>
|
||||
{displayOptions.length > 0 ? (
|
||||
displayOptions.map((option) => (
|
||||
<div
|
||||
key={option.value}
|
||||
className="multi-select-option"
|
||||
role="option"
|
||||
aria-selected={selectedValues.includes(option.value)}
|
||||
>
|
||||
<Checkbox
|
||||
checked={selectedValues.includes(option.value)}
|
||||
onChange={(e): void => handleSelectionChange(option, e)}
|
||||
disabled={option.disabled}
|
||||
>
|
||||
{option.label}
|
||||
</Checkbox>
|
||||
</div>
|
||||
))
|
||||
) : (
|
||||
<div className="multi-select-no-results">
|
||||
{allowCustomValues && searchText
|
||||
? `Add "${searchText}"`
|
||||
: 'No results found'}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{additionalSections.map(
|
||||
(section) =>
|
||||
section.options.length > 0 && (
|
||||
<div key={`section-${section.title}`}>
|
||||
<div className="multi-select-divider" />
|
||||
<div className="multi-select-section-label">{section.title}</div>
|
||||
<div
|
||||
className="multi-select-section-content"
|
||||
style={{ maxHeight: `${sectionMaxHeight}px` }}
|
||||
>
|
||||
{section.options.map((option) => (
|
||||
<div
|
||||
key={option.value}
|
||||
className="multi-select-option"
|
||||
role="option"
|
||||
aria-selected={selectedValues.includes(option.value)}
|
||||
>
|
||||
<Checkbox
|
||||
checked={selectedValues.includes(option.value)}
|
||||
onChange={(e): void => handleSelectionChange(option, e)}
|
||||
disabled={option.disabled}
|
||||
>
|
||||
{option.label}
|
||||
</Checkbox>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Define defaultProps to fix linter warnings
|
||||
MultiSelect.defaultProps = {
|
||||
value: [],
|
||||
placeholder: 'Search...',
|
||||
loading: false,
|
||||
allowCustomValues: true,
|
||||
onSearch: undefined,
|
||||
className: '',
|
||||
additionalSections: [],
|
||||
showSelectAll: true,
|
||||
dropdownMaxHeight: 400,
|
||||
dropdownMaxWidth: undefined,
|
||||
disabled: false,
|
||||
error: undefined,
|
||||
label: undefined,
|
||||
allowClear: true,
|
||||
sectionMaxHeight: 150,
|
||||
};
|
||||
|
||||
export default MultiSelect;
|
||||
8
frontend/src/components/MultiSelect/index.ts
Normal file
8
frontend/src/components/MultiSelect/index.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import MultiSelect from './MultiSelect';
|
||||
|
||||
export type {
|
||||
MultiSelectOption,
|
||||
MultiSelectProps,
|
||||
MultiSelectSection,
|
||||
} from './MultiSelect';
|
||||
export default MultiSelect;
|
||||
@@ -1,5 +1,3 @@
|
||||
import './ResizeTable.styles.scss';
|
||||
|
||||
import { SyntheticEvent, useMemo } from 'react';
|
||||
import { Resizable, ResizeCallbackData } from 'react-resizable';
|
||||
|
||||
@@ -12,8 +10,8 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
const handle = useMemo(
|
||||
() => (
|
||||
<SpanStyle
|
||||
className="react-resizable-handle"
|
||||
onClick={(e): void => e.stopPropagation()}
|
||||
className="resize-handle"
|
||||
/>
|
||||
),
|
||||
[],
|
||||
@@ -21,7 +19,7 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
|
||||
if (!width) {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
return <th {...restProps} className="resizable-header" />;
|
||||
return <th {...restProps} />;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -31,10 +29,9 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
|
||||
handle={handle}
|
||||
onResize={onResize}
|
||||
draggableOpts={enableUserSelectHack}
|
||||
minConstraints={[150, 0]}
|
||||
>
|
||||
{/* eslint-disable-next-line react/jsx-props-no-spreading */}
|
||||
<th {...restProps} className="resizable-header" />
|
||||
<th {...restProps} />
|
||||
</Resizable>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
.resizable-header {
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
position: relative;
|
||||
|
||||
.ant-table-column-title {
|
||||
white-space: normal;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
}
|
||||
|
||||
.resize-main-table {
|
||||
.ant-table-body {
|
||||
.ant-table-tbody {
|
||||
.ant-table-row {
|
||||
.ant-table-cell {
|
||||
.ant-typography {
|
||||
white-space: unset;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.logs-table,
|
||||
.traces-table {
|
||||
.resize-table {
|
||||
.resize-handle {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
inset-inline-end: -5px;
|
||||
width: 10px;
|
||||
cursor: col-resize;
|
||||
|
||||
&::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
width: 1px;
|
||||
height: 1.6em;
|
||||
background-color: var(--bg-slate-200);
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,63 +2,35 @@
|
||||
|
||||
import { Table } from 'antd';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import cx from 'classnames';
|
||||
import { dragColumnParams } from 'hooks/useDragColumns/configs';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { debounce, set } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { set } from 'lodash-es';
|
||||
import {
|
||||
SyntheticEvent,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import ReactDragListView from 'react-drag-listview';
|
||||
import { ResizeCallbackData } from 'react-resizable';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
import ResizableHeader from './ResizableHeader';
|
||||
import { DragSpanStyle } from './styles';
|
||||
import { ResizeTableProps } from './types';
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function ResizeTable({
|
||||
columns,
|
||||
onDragColumn,
|
||||
pagination,
|
||||
widgetId,
|
||||
shouldPersistColumnWidths = false,
|
||||
...restProps
|
||||
}: ResizeTableProps): JSX.Element {
|
||||
const [columnsData, setColumns] = useState<ColumnsType>([]);
|
||||
const { setColumnWidths, selectedDashboard } = useDashboard();
|
||||
|
||||
const columnWidths = shouldPersistColumnWidths
|
||||
? (selectedDashboard?.data?.widgets?.find(
|
||||
(widget) => widget.id === widgetId,
|
||||
) as Widgets)?.columnWidths
|
||||
: undefined;
|
||||
|
||||
const updateAllColumnWidths = useRef(
|
||||
debounce((widthsConfig: Record<string, number>) => {
|
||||
if (!widgetId || !shouldPersistColumnWidths) return;
|
||||
setColumnWidths?.((prev) => ({
|
||||
...prev,
|
||||
[widgetId]: widthsConfig,
|
||||
}));
|
||||
}, 1000),
|
||||
).current;
|
||||
|
||||
const handleResize = useCallback(
|
||||
(index: number) => (
|
||||
e: SyntheticEvent<Element>,
|
||||
_e: SyntheticEvent<Element>,
|
||||
{ size }: ResizeCallbackData,
|
||||
): void => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
const newColumns = [...columnsData];
|
||||
newColumns[index] = {
|
||||
...newColumns[index],
|
||||
@@ -93,7 +65,6 @@ function ResizeTable({
|
||||
...restProps,
|
||||
components: { header: { cell: ResizableHeader } },
|
||||
columns: mergedColumns,
|
||||
className: cx('resize-main-table', restProps.className),
|
||||
};
|
||||
|
||||
set(
|
||||
@@ -107,39 +78,9 @@ function ResizeTable({
|
||||
|
||||
useEffect(() => {
|
||||
if (columns) {
|
||||
// Apply stored column widths from widget configuration
|
||||
const columnsWithStoredWidths = columns.map((col) => {
|
||||
const dataIndex = (col as RowData).dataIndex as string;
|
||||
if (dataIndex && columnWidths && columnWidths[dataIndex]) {
|
||||
return {
|
||||
...col,
|
||||
width: columnWidths[dataIndex], // Apply stored width
|
||||
};
|
||||
}
|
||||
return col;
|
||||
});
|
||||
|
||||
setColumns(columnsWithStoredWidths);
|
||||
setColumns(columns);
|
||||
}
|
||||
}, [columns, columnWidths]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!shouldPersistColumnWidths) return;
|
||||
// Collect all column widths in a single object
|
||||
const newColumnWidths: Record<string, number> = {};
|
||||
|
||||
mergedColumns.forEach((col) => {
|
||||
if (col.width && (col as RowData).dataIndex) {
|
||||
const dataIndex = (col as RowData).dataIndex as string;
|
||||
newColumnWidths[dataIndex] = col.width as number;
|
||||
}
|
||||
});
|
||||
|
||||
// Only update if there are actual widths to set
|
||||
if (Object.keys(newColumnWidths).length > 0) {
|
||||
updateAllColumnWidths(newColumnWidths);
|
||||
}
|
||||
}, [mergedColumns, updateAllColumnWidths, shouldPersistColumnWidths]);
|
||||
}, [columns]);
|
||||
|
||||
return onDragColumn ? (
|
||||
<ReactDragListView.DragColumn {...dragColumnParams} onDragEnd={onDragColumn}>
|
||||
|
||||
@@ -8,8 +8,6 @@ export const SpanStyle = styled.span`
|
||||
width: 0.625rem;
|
||||
height: 100%;
|
||||
cursor: col-resize;
|
||||
margin-left: 4px;
|
||||
margin-right: 4px;
|
||||
`;
|
||||
|
||||
export const DragSpanStyle = styled.span`
|
||||
|
||||
@@ -9,8 +9,6 @@ import { TableDataSource } from './contants';
|
||||
|
||||
export interface ResizeTableProps extends TableProps<any> {
|
||||
onDragColumn?: (fromIndex: number, toIndex: number) => void;
|
||||
widgetId?: string;
|
||||
shouldPersistColumnWidths?: boolean;
|
||||
}
|
||||
export interface DynamicColumnTableProps extends TableProps<any> {
|
||||
tablesource: typeof TableDataSource[keyof typeof TableDataSource];
|
||||
|
||||
@@ -1,13 +1,28 @@
|
||||
// keep this consistent with backend constants.go
|
||||
export enum FeatureKeys {
|
||||
SSO = 'SSO',
|
||||
ENTERPRISE_PLAN = 'ENTERPRISE_PLAN',
|
||||
BASIC_PLAN = 'BASIC_PLAN',
|
||||
ALERT_CHANNEL_SLACK = 'ALERT_CHANNEL_SLACK',
|
||||
ALERT_CHANNEL_WEBHOOK = 'ALERT_CHANNEL_WEBHOOK',
|
||||
ALERT_CHANNEL_PAGERDUTY = 'ALERT_CHANNEL_PAGERDUTY',
|
||||
ALERT_CHANNEL_OPSGENIE = 'ALERT_CHANNEL_OPSGENIE',
|
||||
ALERT_CHANNEL_MSTEAMS = 'ALERT_CHANNEL_MSTEAMS',
|
||||
DurationSort = 'DurationSort',
|
||||
TimestampSort = 'TimestampSort',
|
||||
SMART_TRACE_DETAIL = 'SMART_TRACE_DETAIL',
|
||||
CUSTOM_METRICS_FUNCTION = 'CUSTOM_METRICS_FUNCTION',
|
||||
QUERY_BUILDER_PANELS = 'QUERY_BUILDER_PANELS',
|
||||
QUERY_BUILDER_ALERTS = 'QUERY_BUILDER_ALERTS',
|
||||
DISABLE_UPSELL = 'DISABLE_UPSELL',
|
||||
USE_SPAN_METRICS = 'USE_SPAN_METRICS',
|
||||
OSS = 'OSS',
|
||||
ONBOARDING = 'ONBOARDING',
|
||||
CHAT_SUPPORT = 'CHAT_SUPPORT',
|
||||
GATEWAY = 'GATEWAY',
|
||||
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
|
||||
QUERY_BUILDER_SEARCH_V2 = 'QUERY_BUILDER_SEARCH_V2',
|
||||
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
||||
AWS_INTEGRATION = 'AWS_INTEGRATION',
|
||||
ONBOARDING_V3 = 'ONBOARDING_V3',
|
||||
THIRD_PARTY_API = 'THIRD_PARTY_API',
|
||||
TRACE_FUNNELS = 'TRACE_FUNNELS',
|
||||
}
|
||||
|
||||
@@ -75,6 +75,7 @@ const ROUTES = {
|
||||
METRICS_EXPLORER_BASE: '/metrics-explorer',
|
||||
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',
|
||||
HOME_PAGE: '/',
|
||||
DYNAMIC_VARIABLE_TEST: '/dynamic-variable-test',
|
||||
} as const;
|
||||
|
||||
export default ROUTES;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import AlertChannels from 'container/AllAlertChannels';
|
||||
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
|
||||
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
@@ -21,13 +20,6 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('Alert Channels Settings List page', () => {
|
||||
beforeEach(() => {
|
||||
render(<AlertChannels />);
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import AlertChannels from 'container/AllAlertChannels';
|
||||
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
|
||||
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
@@ -26,13 +25,6 @@ jest.mock('hooks/useComponentPermission', () => ({
|
||||
default: jest.fn().mockImplementation(() => [false]),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('Alert Channels Settings List page (Normal User)', () => {
|
||||
beforeEach(() => {
|
||||
render(<AlertChannels />);
|
||||
|
||||
@@ -31,10 +31,6 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||
}));
|
||||
|
||||
describe('Create Alert Channel', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
@@ -18,10 +18,6 @@ import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import { testLabelInputAndHelpValue } from './testUtils';
|
||||
|
||||
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||
}));
|
||||
|
||||
describe('Create Alert Channel (Normal User)', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
@@ -20,10 +20,6 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||
}));
|
||||
|
||||
describe('Should check if the edit alert channel is properly displayed ', () => {
|
||||
beforeEach(() => {
|
||||
render(<EditAlertChannels initialValue={editAlertChannelInitialValue} />);
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Select, Spin, Table, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
@@ -152,7 +151,6 @@ function AllEndPoints({
|
||||
if (groupBy.length === 0) {
|
||||
setSelectedEndPointName(record.endpointName); // this will open up the endpoint details tab
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
} else {
|
||||
handleGroupByRowClick(record); // this will prepare the nested query payload
|
||||
}
|
||||
|
||||
@@ -392,39 +392,6 @@
|
||||
gap: 20px;
|
||||
padding-top: 20px;
|
||||
|
||||
.endpoint-meta-data {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
.endpoint-meta-data-pill {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
width: fit-content;
|
||||
.endpoint-meta-data-label {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
border-right: 1px solid var(--bg-slate-300);
|
||||
color: var(--text-vanilla-100);
|
||||
background: var(--bg-slate-500);
|
||||
height: calc(100% - 12px);
|
||||
}
|
||||
|
||||
.endpoint-meta-data-value {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
color: var(--text-vanilla-400);
|
||||
background: var(--bg-slate-400);
|
||||
height: calc(100% - 12px);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-details-filters-container {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
@@ -438,13 +405,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-item,
|
||||
.ant-select-item-option-content {
|
||||
flex: auto;
|
||||
white-space: normal;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
|
||||
.status-code-table-container {
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
@@ -849,13 +809,6 @@
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-item,
|
||||
.ant-select-item-option-content {
|
||||
flex: auto;
|
||||
white-space: normal;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
@@ -964,20 +917,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-meta-data {
|
||||
.endpoint-meta-data-pill {
|
||||
.endpoint-meta-data-label {
|
||||
color: var(--text-ink-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.endpoint-meta-data-value {
|
||||
color: var(--text-ink-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.status-code-table-container {
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
|
||||
@@ -19,14 +19,12 @@ function DomainDetails({
|
||||
selectedDomainIndex,
|
||||
setSelectedDomainIndex,
|
||||
domainListLength,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainData: any;
|
||||
handleClose: () => void;
|
||||
selectedDomainIndex: number;
|
||||
setSelectedDomainIndex: (index: number) => void;
|
||||
domainListLength: number;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const [selectedView, setSelectedView] = useState<VIEWS>(VIEWS.ALL_ENDPOINTS);
|
||||
const [selectedEndPointName, setSelectedEndPointName] = useState<string>('');
|
||||
@@ -134,7 +132,6 @@ function DomainDetails({
|
||||
domainName={domainData.domainName}
|
||||
endPointName={selectedEndPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -2,10 +2,7 @@ import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
extractPortAndEndpoint,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
@@ -30,12 +27,10 @@ function EndPointDetails({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
@@ -106,6 +101,8 @@ function EndPointDetails({
|
||||
const [
|
||||
endPointMetricsDataQuery,
|
||||
endPointStatusCodeDataQuery,
|
||||
endPointRateOverTimeDataQuery,
|
||||
endPointLatencyOverTimeDataQuery,
|
||||
endPointDropDownDataQuery,
|
||||
endPointDependentServicesDataQuery,
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
@@ -118,29 +115,12 @@ function EndPointDetails({
|
||||
endPointDetailsDataQueries[3],
|
||||
endPointDetailsDataQueries[4],
|
||||
endPointDetailsDataQueries[5],
|
||||
endPointDetailsDataQueries[6],
|
||||
endPointDetailsDataQueries[7],
|
||||
],
|
||||
[endPointDetailsDataQueries],
|
||||
);
|
||||
|
||||
const { endpoint, port } = useMemo(
|
||||
() => extractPortAndEndpoint(endPointName),
|
||||
[endPointName],
|
||||
);
|
||||
|
||||
const [rateOverTimeWidget, latencyOverTimeWidget] = useMemo(
|
||||
() => [
|
||||
getRateOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
getLatencyOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
],
|
||||
[domainName, endPointName, filters, domainListFilters],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="endpoint-details-container">
|
||||
<div className="endpoint-details-filters-container">
|
||||
@@ -149,8 +129,6 @@ function EndPointDetails({
|
||||
selectedEndPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".endpoint-details-filters-container"
|
||||
dropdownStyle={{ width: 'calc(100% - 36px)' }}
|
||||
/>
|
||||
</div>
|
||||
<div className="endpoint-details-filters-container-search">
|
||||
@@ -163,16 +141,6 @@ function EndPointDetails({
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data">
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Endpoint</div>
|
||||
<div className="endpoint-meta-data-value">{endpoint || '-'}</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Port</div>
|
||||
<div className="endpoint-meta-data-value">{port || '-'}</div>
|
||||
</div>
|
||||
</div>
|
||||
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
|
||||
{!isServicesFilterApplied && (
|
||||
<DependentServices
|
||||
@@ -184,14 +152,18 @@ function EndPointDetails({
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={
|
||||
endPointStatusCodeLatencyBarChartsDataQuery
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
filters={filters}
|
||||
/>
|
||||
<StatusCodeTable endPointStatusCodeDataQuery={endPointStatusCodeDataQuery} />
|
||||
<MetricOverTimeGraph widget={rateOverTimeWidget} />
|
||||
<MetricOverTimeGraph widget={latencyOverTimeWidget} />
|
||||
<MetricOverTimeGraph
|
||||
metricOverTimeDataQuery={endPointRateOverTimeDataQuery}
|
||||
widgetInfoIndex={0}
|
||||
endPointName={endPointName}
|
||||
/>
|
||||
<MetricOverTimeGraph
|
||||
metricOverTimeDataQuery={endPointLatencyOverTimeDataQuery}
|
||||
widgetInfoIndex={1}
|
||||
endPointName={endPointName}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import EndPointDetailsZeroState from './components/EndPointDetailsZeroState';
|
||||
@@ -18,12 +17,10 @@ function EndPointDetailsWrapper({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
@@ -72,7 +69,6 @@ function EndPointDetailsWrapper({
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -28,8 +28,6 @@ function EndPointDetailsZeroState({
|
||||
<EndPointsDropDown
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".end-point-details-zero-state-wrapper"
|
||||
dropdownStyle={{ width: '60%' }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -70,7 +70,7 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.rate}>
|
||||
<span className="round-metric-tag">{metricsData?.rate} ops/sec</span>
|
||||
<span className="round-metric-tag">{metricsData?.rate}/sec</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
|
||||
@@ -8,22 +8,16 @@ interface EndPointsDropDownProps {
|
||||
selectedEndPointName?: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
endPointDropDownDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
parentContainerDiv?: string;
|
||||
dropdownStyle?: React.CSSProperties;
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
selectedEndPointName: '',
|
||||
parentContainerDiv: '',
|
||||
dropdownStyle: {},
|
||||
};
|
||||
|
||||
function EndPointsDropDown({
|
||||
selectedEndPointName,
|
||||
setSelectedEndPointName,
|
||||
endPointDropDownDataQuery,
|
||||
parentContainerDiv,
|
||||
dropdownStyle,
|
||||
}: EndPointsDropDownProps): JSX.Element {
|
||||
const { data, isLoading, isFetching } = endPointDropDownDataQuery;
|
||||
|
||||
@@ -45,13 +39,6 @@ function EndPointsDropDown({
|
||||
style={{ width: '100%' }}
|
||||
onChange={handleChange}
|
||||
options={formattedData}
|
||||
getPopupContainer={
|
||||
parentContainerDiv
|
||||
? (): HTMLElement =>
|
||||
document.querySelector(parentContainerDiv) as HTMLElement
|
||||
: (triggerNode): HTMLElement => triggerNode.parentNode as HTMLElement
|
||||
}
|
||||
dropdownStyle={dropdownStyle}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table } from 'antd';
|
||||
import { ColumnType } from 'antd/lib/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
@@ -115,7 +114,6 @@ function ExpandedRow({
|
||||
onClick: (): void => {
|
||||
setSelectedEndPointName(record.endpointName);
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
})}
|
||||
|
||||
@@ -1,18 +1,110 @@
|
||||
import { Card } from 'antd';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Card, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
apiWidgetInfo,
|
||||
extractPortAndEndpoint,
|
||||
getFormattedChartData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useCallback, useMemo, useRef } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { Options } from 'uplot';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function MetricOverTimeGraph({
|
||||
metricOverTimeDataQuery,
|
||||
widgetInfoIndex,
|
||||
endPointName,
|
||||
}: {
|
||||
metricOverTimeDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
widgetInfoIndex: number;
|
||||
endPointName: string;
|
||||
}): JSX.Element {
|
||||
const { data } = metricOverTimeDataQuery;
|
||||
|
||||
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
|
||||
const { endpoint } = extractPortAndEndpoint(endPointName);
|
||||
|
||||
const formattedChartData = useMemo(
|
||||
() => getFormattedChartData(data?.payload, [endpoint]),
|
||||
[data?.payload, endpoint],
|
||||
);
|
||||
|
||||
const chartData = useMemo(() => getUPlotChartData(formattedChartData), [
|
||||
formattedChartData,
|
||||
]);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
getUPlotChartOptions({
|
||||
apiResponse: formattedChartData,
|
||||
isDarkMode,
|
||||
dimensions,
|
||||
yAxisUnit: apiWidgetInfo[widgetInfoIndex].yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
minTimeScale: Math.floor(minTime / 1e9),
|
||||
maxTimeScale: Math.floor(maxTime / 1e9),
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
}),
|
||||
[
|
||||
formattedChartData,
|
||||
minTime,
|
||||
maxTime,
|
||||
widgetInfoIndex,
|
||||
dimensions,
|
||||
isDarkMode,
|
||||
],
|
||||
);
|
||||
|
||||
const renderCardContent = useCallback(
|
||||
(query: UseQueryResult<SuccessResponse<any>, unknown>): JSX.Element => {
|
||||
if (query.isLoading) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
if (query.error) {
|
||||
return <ErrorState refetch={query.refetch} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx('chart-container', {
|
||||
'no-data-container':
|
||||
!query.isLoading && !query?.data?.payload?.data?.result?.length,
|
||||
})}
|
||||
>
|
||||
<Uplot options={options as Options} data={chartData} />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[options, chartData],
|
||||
);
|
||||
|
||||
function MetricOverTimeGraph({ widget }: { widget: Widgets }): JSX.Element {
|
||||
return (
|
||||
<div>
|
||||
<Card bordered className="endpoint-details-card">
|
||||
<div className="graph-container">
|
||||
<GridCard
|
||||
widget={widget}
|
||||
isQueryEnabled
|
||||
onDragSelect={(): void => {}}
|
||||
customOnDragSelect={(): void => {}}
|
||||
/>
|
||||
<Typography.Text>{apiWidgetInfo[widgetInfoIndex].title}</Typography.Text>
|
||||
<div className="graph-container" ref={graphRef}>
|
||||
{renderCardContent(metricOverTimeDataQuery)}
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
@@ -1,22 +1,13 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Card, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { useGetGraphCustomSeries } from 'components/CeleryTask/useGetGraphCustomSeries';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
getCustomFiltersForBarChart,
|
||||
getFormattedEndPointStatusCodeChartData,
|
||||
getStatusCodeBarChartWidgetData,
|
||||
statusCodeWidgetInfo,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { handleGraphClick } from 'container/GridCardLayout/GridCard/utils';
|
||||
import { useGraphClickToShowButton } from 'container/GridCardLayout/useGraphClickToShowButton';
|
||||
import useNavigateToExplorerPages from 'container/GridCardLayout/useNavigateToExplorerPages';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
@@ -24,8 +15,6 @@ import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { Options } from 'uplot';
|
||||
|
||||
@@ -34,10 +23,6 @@ import ErrorState from './ErrorState';
|
||||
function StatusCodeBarCharts({
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
domainListFilters,
|
||||
filters,
|
||||
}: {
|
||||
endPointStatusCodeBarChartsDataQuery: UseQueryResult<
|
||||
SuccessResponse<any>,
|
||||
@@ -47,10 +32,6 @@ function StatusCodeBarCharts({
|
||||
SuccessResponse<any>,
|
||||
unknown
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
filters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
// 0 : Status Code Count
|
||||
// 1 : Status Code Latency
|
||||
@@ -104,72 +85,6 @@ function StatusCodeBarCharts({
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const graphClick = useGraphClickToShowButton({
|
||||
graphRef,
|
||||
isButtonEnabled: true,
|
||||
buttonClassName: 'view-onclick-show-button',
|
||||
});
|
||||
|
||||
const navigateToExplorer = useNavigateToExplorer();
|
||||
|
||||
const navigateToExplorerPages = useNavigateToExplorerPages();
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { getCustomSeries } = useGetGraphCustomSeries({
|
||||
isDarkMode,
|
||||
drawStyle: 'bars',
|
||||
colorMapping: {
|
||||
'200-299': Color.BG_FOREST_500,
|
||||
'300-399': Color.BG_AMBER_400,
|
||||
'400-499': Color.BG_CHERRY_500,
|
||||
'500-599': Color.BG_ROBIN_500,
|
||||
Other: Color.BG_SIENNA_500,
|
||||
},
|
||||
});
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
[domainName, endPointName, domainListFilters, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
metric?: { [key: string]: string },
|
||||
queryData?: { queryName: string; inFocusOrNot: boolean },
|
||||
): void => {
|
||||
const customFilters = getCustomFiltersForBarChart(metric);
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
metric,
|
||||
queryData,
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
customFilters,
|
||||
});
|
||||
},
|
||||
[
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
],
|
||||
);
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
getUPlotChartOptions({
|
||||
@@ -185,8 +100,6 @@ function StatusCodeBarCharts({
|
||||
minTimeScale: Math.floor(minTime / 1e9),
|
||||
maxTimeScale: Math.floor(maxTime / 1e9),
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
onClickHandler: graphClickHandler,
|
||||
customSeries: getCustomSeries,
|
||||
}),
|
||||
[
|
||||
minTime,
|
||||
@@ -196,8 +109,6 @@ function StatusCodeBarCharts({
|
||||
formattedEndPointStatusCodeBarChartsDataPayload,
|
||||
formattedEndPointStatusCodeLatencyBarChartsDataPayload,
|
||||
isDarkMode,
|
||||
graphClickHandler,
|
||||
getCustomSeries,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ import '../Explorer.styles.scss';
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table, Typography } from 'antd';
|
||||
import axios from 'api';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import cx from 'classnames';
|
||||
@@ -131,7 +130,6 @@ function DomainList({
|
||||
(item) => item.key === record.key,
|
||||
);
|
||||
setSelectedDomainIndex(dataIndex);
|
||||
logEvent('API Monitoring: Domain name row clicked', {});
|
||||
}
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
@@ -149,7 +147,6 @@ function DomainList({
|
||||
handleClose={(): void => {
|
||||
setSelectedDomainIndex(-1);
|
||||
}}
|
||||
domainListFilters={query?.filters}
|
||||
/>
|
||||
)}
|
||||
</section>
|
||||
|
||||
@@ -3,14 +3,13 @@ import './Explorer.styles.scss';
|
||||
import { FilterOutlined } from '@ant-design/icons';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Switch, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import QuickFilters from 'components/QuickFilters/QuickFilters';
|
||||
import { QuickFiltersSource } from 'components/QuickFilters/types';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -22,10 +21,6 @@ function Explorer(): JSX.Element {
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('API Monitoring: Landing page visited', {});
|
||||
}, []);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query: currentQuery.builder.queryData[0],
|
||||
@@ -69,12 +64,7 @@ function Explorer(): JSX.Element {
|
||||
style={{ marginLeft: 'auto' }}
|
||||
checked={showIP}
|
||||
onClick={(): void => {
|
||||
setShowIP((showIP): boolean => {
|
||||
logEvent('API Monitoring: Show IP addresses clicked', {
|
||||
showIP: !showIP,
|
||||
});
|
||||
return !showIP;
|
||||
});
|
||||
setShowIP((showIP) => !showIP);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -8,23 +8,16 @@ import {
|
||||
} from 'components/QuickFilters/types';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { GraphClickMetaData } from 'container/GridCardLayout/useNavigateToExplorerPages';
|
||||
import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsApplication.factory';
|
||||
import dayjs from 'dayjs';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { ArrowUpDown, ChevronDown, ChevronRight } from 'lucide-react';
|
||||
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@@ -135,15 +128,12 @@ export const columnsConfig: ColumnType<APIDomainsRowData>[] = [
|
||||
sorter: false,
|
||||
align: 'right',
|
||||
className: `column`,
|
||||
render: (lastUsed: number | string): string =>
|
||||
lastUsed === 'n/a' || lastUsed === '-'
|
||||
? '-'
|
||||
: getLastUsedRelativeTime(lastUsed as number),
|
||||
render: (lastUsed: number): string => getLastUsedRelativeTime(lastUsed),
|
||||
},
|
||||
{
|
||||
title: (
|
||||
<div>
|
||||
Rate <span className="round-metric-tag">ops/s</span>
|
||||
Rate <span className="round-metric-tag">/s</span>
|
||||
</div>
|
||||
),
|
||||
dataIndex: 'rate',
|
||||
@@ -165,26 +155,21 @@ export const columnsConfig: ColumnType<APIDomainsRowData>[] = [
|
||||
sorter: false,
|
||||
align: 'right',
|
||||
className: `column`,
|
||||
render: (errorRate: number | string): React.ReactNode => {
|
||||
if (errorRate === 'n/a' || errorRate === '-') {
|
||||
return '-';
|
||||
}
|
||||
return (
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(((errorRate as number) * 100).toFixed(1))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(((errorRate as number) * 100).toFixed(1));
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar error-rate"
|
||||
/>
|
||||
);
|
||||
},
|
||||
render: (errorRate: number): React.ReactNode => (
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number((errorRate * 100).toFixed(1))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number((errorRate * 100).toFixed(1));
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar error-rate"
|
||||
/>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: (
|
||||
@@ -232,9 +217,9 @@ interface APIMonitoringResponseRow {
|
||||
data: {
|
||||
endpoints: number;
|
||||
error_rate: number;
|
||||
lastseen: number | string;
|
||||
lastseen: number;
|
||||
[domainNameKey]: string;
|
||||
p99: number | string;
|
||||
p99: number;
|
||||
rps: number;
|
||||
};
|
||||
}
|
||||
@@ -247,12 +232,12 @@ interface EndPointsResponseRow {
|
||||
|
||||
export interface APIDomainsRowData {
|
||||
key: string;
|
||||
domainName: string;
|
||||
endpointCount: number | string;
|
||||
rate: number | string;
|
||||
errorRate: number | string;
|
||||
latency: number | string;
|
||||
lastUsed: string;
|
||||
domainName: React.ReactNode;
|
||||
endpointCount: React.ReactNode;
|
||||
rate: React.ReactNode;
|
||||
errorRate: React.ReactNode;
|
||||
latency: React.ReactNode;
|
||||
lastUsed: React.ReactNode;
|
||||
}
|
||||
|
||||
// Rename this to a proper name
|
||||
@@ -261,20 +246,12 @@ export const formatDataForTable = (
|
||||
): APIDomainsRowData[] =>
|
||||
data?.map((domain) => ({
|
||||
key: v4(),
|
||||
domainName: domain?.data[domainNameKey] || '-',
|
||||
endpointCount: domain?.data?.endpoints || '-',
|
||||
rate: domain.data.rps || '-',
|
||||
errorRate: domain.data.error_rate || '-',
|
||||
latency:
|
||||
domain.data.p99 === 'n/a'
|
||||
? '-'
|
||||
: Math.round(Number(domain.data.p99) / 1000000), // Convert from nanoseconds to milliseconds
|
||||
lastUsed:
|
||||
domain.data.lastseen === 'n/a'
|
||||
? '-'
|
||||
: new Date(
|
||||
Math.floor(Number(domain.data.lastseen) / 1000000),
|
||||
).toISOString(), // Convert from nanoseconds to milliseconds
|
||||
domainName: domain.data[domainNameKey] || '',
|
||||
endpointCount: domain.data.endpoints,
|
||||
rate: domain.data.rps,
|
||||
errorRate: domain.data.error_rate,
|
||||
latency: Math.round(domain.data.p99 / 1000000), // Convert from nanoseconds to milliseconds
|
||||
lastUsed: new Date(Math.floor(domain.data.lastseen / 1000000)).toISOString(), // Convert from nanoseconds to milliseconds
|
||||
}));
|
||||
|
||||
// Rename this to a proper name
|
||||
@@ -491,6 +468,7 @@ export const extractPortAndEndpoint = (
|
||||
}
|
||||
};
|
||||
|
||||
// Add icons in the below column headers
|
||||
export const getEndPointsColumnsConfig = (
|
||||
isGroupedByAttribute: boolean,
|
||||
expandedRowKeys: React.Key[],
|
||||
@@ -598,7 +576,7 @@ export const formatEndPointsDataForTable = (
|
||||
);
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: (endpoint.data['http.url'] as string) || '-',
|
||||
endpointName: (endpoint.data['http.url'] as string) || '',
|
||||
port,
|
||||
callCount: endpoint.data.A || '-',
|
||||
latency:
|
||||
@@ -615,6 +593,7 @@ export const formatEndPointsDataForTable = (
|
||||
|
||||
const groupedByAttributeData = groupBy.map((attribute) => attribute.key);
|
||||
|
||||
// TODO: Use tags to show the concatenated attribute values
|
||||
return data?.map((endpoint) => {
|
||||
const newEndpointName = groupedByAttributeData
|
||||
.map((attribute) => endpoint.data[attribute])
|
||||
@@ -660,7 +639,7 @@ export const createFiltersForSelectedRowData = (
|
||||
type: null,
|
||||
},
|
||||
op: '=',
|
||||
value: groupedByMeta[key] || '',
|
||||
value: groupedByMeta[key],
|
||||
id: key,
|
||||
})),
|
||||
);
|
||||
@@ -670,10 +649,12 @@ export const createFiltersForSelectedRowData = (
|
||||
|
||||
// First query payload for endpoint metrics
|
||||
// Second query payload for endpoint status code
|
||||
// Third query payload for endpoint dropdown selection
|
||||
// Fourth query payload for endpoint dependant services
|
||||
// Fifth query payload for endpoint response status count bar chart
|
||||
// Sixth query payload for endpoint response status code latency bar chart
|
||||
// Third query payload for endpoint rate over time graph
|
||||
// Fourth query payload for endpoint latency over time graph
|
||||
// Fifth query payload for endpoint dropdown selection
|
||||
// Sixth query payload for endpoint dependant services
|
||||
// Seventh query payload for endpoint response status count bar chart
|
||||
// Eighth query payload for endpoint response status code latency bar chart
|
||||
export const getEndPointDetailsQueryPayload = (
|
||||
domainName: string,
|
||||
endPointName: string,
|
||||
@@ -1120,6 +1101,205 @@ export const getEndPointDetailsQueryPayload = (
|
||||
end,
|
||||
step: 60,
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: '------false',
|
||||
isColumn: false,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
aggregateOperator: 'rate',
|
||||
dataSource: DataSource.TRACES,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: '3c76fe0b',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'net.peer.name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'net.peer.name',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
{
|
||||
id: '30710f04',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'http.url--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...filters.items,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'http.url--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: '',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'rate',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
|
||||
promql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
start,
|
||||
end,
|
||||
step: 60,
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'duration_nano--float64----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'duration_nano',
|
||||
type: '',
|
||||
},
|
||||
aggregateOperator: 'p99',
|
||||
dataSource: DataSource.TRACES,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: '63adb3ff',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'net.peer.name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'net.peer.name',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
{
|
||||
id: '50142500',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'http.url--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...filters.items,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'http.url--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: '',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'p99',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
|
||||
promql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
start,
|
||||
end,
|
||||
step: 60,
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
@@ -1621,7 +1801,7 @@ interface EndPointMetricsData {
|
||||
interface EndPointStatusCodeData {
|
||||
key: string;
|
||||
statusCode: string;
|
||||
count: number | string;
|
||||
count: number;
|
||||
p99Latency: number | string;
|
||||
}
|
||||
|
||||
@@ -1644,8 +1824,8 @@ export const getFormattedEndPointStatusCodeData = (
|
||||
): EndPointStatusCodeData[] =>
|
||||
data?.map((row) => ({
|
||||
key: v4(),
|
||||
statusCode: row.data.response_status_code || '-',
|
||||
count: row.data.A || '-',
|
||||
statusCode: row.data.response_status_code,
|
||||
count: row.data.A,
|
||||
p99Latency:
|
||||
row.data.B === 'n/a' ? '-' : Math.round(Number(row.data.B) / 1000000), // Convert from nanoseconds to milliseconds,
|
||||
}));
|
||||
@@ -1677,6 +1857,11 @@ export const endPointStatusCodeColumns: ColumnType<EndPointStatusCodeData>[] = [
|
||||
},
|
||||
];
|
||||
|
||||
export const apiWidgetInfo = [
|
||||
{ title: 'Rate over time', yAxisUnit: 'ops/s' },
|
||||
{ title: 'Latency over time', yAxisUnit: 'ns' },
|
||||
];
|
||||
|
||||
export const statusCodeWidgetInfo = [
|
||||
{ yAxisUnit: 'calls' },
|
||||
{ yAxisUnit: 'ns' },
|
||||
@@ -1700,8 +1885,8 @@ export const getFormattedEndPointDropDownData = (
|
||||
): EndPointDropDownData[] =>
|
||||
data?.map((row) => ({
|
||||
key: v4(),
|
||||
label: row.data['http.url'] || '-',
|
||||
value: row.data['http.url'] || '-',
|
||||
label: row.data['http.url'],
|
||||
value: row.data['http.url'],
|
||||
}));
|
||||
|
||||
interface DependentServicesResponseRow {
|
||||
@@ -1718,7 +1903,6 @@ interface DependentServicesData {
|
||||
percentage: number;
|
||||
}
|
||||
|
||||
// Discuss once about type safety of this function
|
||||
export const getFormattedDependentServicesData = (
|
||||
data: DependentServicesResponseRow[],
|
||||
): DependentServicesData[] => {
|
||||
@@ -1799,7 +1983,7 @@ export const groupStatusCodes = (
|
||||
|
||||
// Track all timestamps
|
||||
series.values.forEach((value) => {
|
||||
allTimestamps.add(Number(value[0]));
|
||||
allTimestamps.add(value[0]);
|
||||
});
|
||||
|
||||
// Initialize or update the grouped series
|
||||
@@ -1865,114 +2049,8 @@ export const groupStatusCodes = (
|
||||
});
|
||||
});
|
||||
|
||||
// Define the order of status code ranges
|
||||
const statusCodeOrder = ['200-299', '300-399', '400-499', '500-599', 'Other'];
|
||||
|
||||
// Return the grouped series in the specified order
|
||||
return statusCodeOrder
|
||||
.filter((code) => groupedSeries[code]) // Only include codes that exist in the data
|
||||
.map((code) => groupedSeries[code]);
|
||||
return Object.values(groupedSeries);
|
||||
};
|
||||
|
||||
export const getStatusCodeBarChartWidgetData = (
|
||||
domainName: string,
|
||||
endPointName: string,
|
||||
filters: IBuilderQuery['filters'],
|
||||
): Widgets => ({
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: '------false',
|
||||
isColumn: false,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
aggregateOperator: 'count',
|
||||
dataSource: DataSource.TRACES,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'c6724407',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'net.peer.name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'net.peer.name',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
{
|
||||
id: '8b1be6f0',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'http.url--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...filters.items,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: '',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'rate',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
|
||||
promql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
description: '',
|
||||
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
|
||||
isStacked: false,
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
title: '',
|
||||
opacity: '',
|
||||
nullZeroValues: '',
|
||||
timePreferance: 'GLOBAL_TIME',
|
||||
softMin: null,
|
||||
softMax: null,
|
||||
selectedLogFields: null,
|
||||
selectedTracesFields: null,
|
||||
});
|
||||
interface EndPointStatusCodePayloadData {
|
||||
data: {
|
||||
result: QueryData[];
|
||||
@@ -2007,277 +2085,3 @@ export const END_POINT_DETAILS_QUERY_KEYS_ARRAY = [
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA,
|
||||
];
|
||||
|
||||
export const getRateOverTimeWidgetData = (
|
||||
domainName: string,
|
||||
endPointName: string,
|
||||
filters: IBuilderQuery['filters'],
|
||||
): Widgets => {
|
||||
const { endpoint, port } = extractPortAndEndpoint(endPointName);
|
||||
const legend = `${
|
||||
port !== '-' && port !== 'n/a' ? `${port}:` : ''
|
||||
}${endpoint}`;
|
||||
return getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
title: 'Rate Over Time',
|
||||
description: 'Rate over time.',
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: '------false',
|
||||
isColumn: false,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
aggregateOperator: 'rate',
|
||||
dataSource: DataSource.TRACES,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: '3c76fe0b',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'net.peer.name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'net.peer.name',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
{
|
||||
id: '30710f04',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'http.url--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...filters.items,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'http.url--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'rate',
|
||||
},
|
||||
],
|
||||
yAxisUnit: 'ops/s',
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
export const getLatencyOverTimeWidgetData = (
|
||||
domainName: string,
|
||||
endPointName: string,
|
||||
filters: IBuilderQuery['filters'],
|
||||
): Widgets => {
|
||||
const { endpoint, port } = extractPortAndEndpoint(endPointName);
|
||||
const legend = `${port}:${endpoint}`;
|
||||
return getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
title: 'Latency Over Time',
|
||||
description: 'Latency over time.',
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'duration_nano--float64----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'duration_nano',
|
||||
type: '',
|
||||
},
|
||||
aggregateOperator: 'p99',
|
||||
dataSource: DataSource.TRACES,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: '63adb3ff',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'net.peer.name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'net.peer.name',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
{
|
||||
id: '50142500',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'http.url--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...filters.items,
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'http.url--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'p99',
|
||||
},
|
||||
],
|
||||
yAxisUnit: 'ns',
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to get the start and end status codes from a status code range string
|
||||
* @param value Status code range string (e.g. '200-299') or boolean
|
||||
* @returns Tuple of [startStatusCode, endStatusCode] as strings
|
||||
*/
|
||||
const getStartAndEndStatusCode = (
|
||||
value: string | boolean,
|
||||
): [string, string] => {
|
||||
if (!value) {
|
||||
return ['', ''];
|
||||
}
|
||||
|
||||
switch (value) {
|
||||
case '100-199':
|
||||
return ['100', '199'];
|
||||
case '200-299':
|
||||
return ['200', '299'];
|
||||
case '300-399':
|
||||
return ['300', '399'];
|
||||
case '400-499':
|
||||
return ['400', '499'];
|
||||
case '500-599':
|
||||
return ['500', '599'];
|
||||
default:
|
||||
return ['', ''];
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates filter items for bar chart based on group by fields and request data
|
||||
* Used specifically for filtering status code ranges in bar charts
|
||||
* @param groupBy Array of group by fields to create filters for
|
||||
* @param requestData Data from graph click containing values to filter on
|
||||
* @returns Array of TagFilterItems with >= and < operators for status code ranges
|
||||
*/
|
||||
export const createGroupByFiltersForBarChart = (
|
||||
groupBy: BaseAutocompleteData[],
|
||||
requestData: GraphClickMetaData,
|
||||
): TagFilterItem[] =>
|
||||
groupBy
|
||||
.map((gb) => {
|
||||
const value = requestData[gb.key];
|
||||
const [startStatusCode, endStatusCode] = getStartAndEndStatusCode(value);
|
||||
return value
|
||||
? [
|
||||
{
|
||||
id: v4(),
|
||||
key: gb,
|
||||
op: '>=',
|
||||
value: startStatusCode,
|
||||
},
|
||||
{
|
||||
id: v4(),
|
||||
key: gb,
|
||||
op: '<=',
|
||||
value: endStatusCode,
|
||||
},
|
||||
]
|
||||
: [];
|
||||
})
|
||||
.flat();
|
||||
|
||||
export const getCustomFiltersForBarChart = (
|
||||
metric:
|
||||
| {
|
||||
[key: string]: string;
|
||||
}
|
||||
| undefined,
|
||||
): TagFilterItem[] => {
|
||||
if (!metric?.response_status_code) {
|
||||
return [];
|
||||
}
|
||||
const [startStatusCode, endStatusCode] = getStartAndEndStatusCode(
|
||||
metric.response_status_code,
|
||||
);
|
||||
return [
|
||||
{
|
||||
id: v4(),
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'response_status_code--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'response_status_code',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '>=',
|
||||
value: startStatusCode,
|
||||
},
|
||||
{
|
||||
id: v4(),
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'response_status_code--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'response_status_code',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '<=',
|
||||
value: endStatusCode,
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
@@ -61,7 +61,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
isLoading: isLoadingDeploymentsData,
|
||||
isFetching: isFetchingDeploymentsData,
|
||||
refetch: refetchDeploymentsData,
|
||||
} = useGetDeploymentsData(true);
|
||||
} = useGetDeploymentsData();
|
||||
|
||||
const {
|
||||
mutate: updateSubDomain,
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Form, Input } from 'antd';
|
||||
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
@@ -10,20 +9,7 @@ function MsTeams({ setSelectedConfig }: MsTeamsProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<>
|
||||
<Form.Item
|
||||
name="webhook_url"
|
||||
label={t('field_webhook_url')}
|
||||
tooltip={{
|
||||
title: (
|
||||
<MarkdownRenderer
|
||||
markdownContent={t('tooltip_ms_teams_url')}
|
||||
variables={{}}
|
||||
/>
|
||||
),
|
||||
overlayInnerStyle: { maxWidth: 400 },
|
||||
placement: 'right',
|
||||
}}
|
||||
>
|
||||
<Form.Item name="webhook_url" label={t('field_webhook_url')}>
|
||||
<Input
|
||||
onChange={(event): void => {
|
||||
setSelectedConfig((value) => ({
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Form, Input } from 'antd';
|
||||
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import { OpsgenieChannel } from '../../CreateAlertChannels/config';
|
||||
@@ -20,21 +19,7 @@ function OpsgenieForm({ setSelectedConfig }: OpsgenieFormProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<>
|
||||
<Form.Item
|
||||
name="api_key"
|
||||
label={t('field_opsgenie_api_key')}
|
||||
tooltip={{
|
||||
title: (
|
||||
<MarkdownRenderer
|
||||
markdownContent={t('tooltip_opsgenie_api_key')}
|
||||
variables={{}}
|
||||
/>
|
||||
),
|
||||
overlayInnerStyle: { maxWidth: 400 },
|
||||
placement: 'right',
|
||||
}}
|
||||
required
|
||||
>
|
||||
<Form.Item name="api_key" label={t('field_opsgenie_api_key')} required>
|
||||
<Input
|
||||
onChange={handleInputChange('api_key')}
|
||||
data-testid="opsgenie-api-key-textbox"
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Form, Input } from 'antd';
|
||||
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
@@ -11,20 +10,7 @@ function PagerForm({ setSelectedConfig }: PagerFormProps): JSX.Element {
|
||||
const { t } = useTranslation('channels');
|
||||
return (
|
||||
<>
|
||||
<Form.Item
|
||||
name="routing_key"
|
||||
label={t('field_pager_routing_key')}
|
||||
tooltip={{
|
||||
title: (
|
||||
<MarkdownRenderer
|
||||
markdownContent={t('tooltip_pager_routing_key')}
|
||||
variables={{}}
|
||||
/>
|
||||
),
|
||||
overlayInnerStyle: { maxWidth: 400 },
|
||||
placement: 'right',
|
||||
}}
|
||||
>
|
||||
<Form.Item name="routing_key" label={t('field_pager_routing_key')} required>
|
||||
<Input
|
||||
onChange={(event): void => {
|
||||
setSelectedConfig((value) => ({
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Form, Input } from 'antd';
|
||||
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
@@ -12,20 +11,7 @@ function Slack({ setSelectedConfig }: SlackProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<>
|
||||
<Form.Item
|
||||
name="api_url"
|
||||
label={t('field_webhook_url')}
|
||||
tooltip={{
|
||||
title: (
|
||||
<MarkdownRenderer
|
||||
markdownContent={t('tooltip_slack_url')}
|
||||
variables={{}}
|
||||
/>
|
||||
),
|
||||
overlayInnerStyle: { maxWidth: 400 },
|
||||
placement: 'right',
|
||||
}}
|
||||
>
|
||||
<Form.Item name="api_url" label={t('field_webhook_url')}>
|
||||
<Input
|
||||
onChange={(event): void => {
|
||||
setSelectedConfig((value) => ({
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Form, Input } from 'antd';
|
||||
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
@@ -10,20 +9,7 @@ function WebhookSettings({ setSelectedConfig }: WebhookProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<>
|
||||
<Form.Item
|
||||
name="api_url"
|
||||
label={t('field_webhook_url')}
|
||||
tooltip={{
|
||||
title: (
|
||||
<MarkdownRenderer
|
||||
markdownContent={t('tooltip_webhook_url')}
|
||||
variables={{}}
|
||||
/>
|
||||
),
|
||||
overlayInnerStyle: { maxWidth: 400 },
|
||||
placement: 'right',
|
||||
}}
|
||||
>
|
||||
<Form.Item name="api_url" label={t('field_webhook_url')}>
|
||||
<Input
|
||||
onChange={(event): void => {
|
||||
setSelectedConfig((value) => ({
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Form, FormInstance, Input, Select, Switch, Typography } from 'antd';
|
||||
import { Store } from 'antd/lib/form/interface';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import ROUTES from 'constants/routes';
|
||||
import {
|
||||
ChannelType,
|
||||
@@ -10,8 +11,11 @@ import {
|
||||
WebhookChannel,
|
||||
} from 'container/CreateAlertChannels/config';
|
||||
import history from 'lib/history';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { Dispatch, ReactElement, SetStateAction } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { FeatureFlagProps } from 'types/api/features/getFeaturesFlags';
|
||||
import { isFeatureKeys } from 'utils/app';
|
||||
|
||||
import EmailSettings from './Settings/Email';
|
||||
import MsTeamsSettings from './Settings/MsTeams';
|
||||
@@ -35,6 +39,17 @@ function FormAlertChannels({
|
||||
editing = false,
|
||||
}: FormAlertChannelsProps): JSX.Element {
|
||||
const { t } = useTranslation('channels');
|
||||
const { featureFlags } = useAppContext();
|
||||
|
||||
const feature = `ALERT_CHANNEL_${type.toUpperCase()}`;
|
||||
|
||||
const featureKey = isFeatureKeys(feature)
|
||||
? feature
|
||||
: FeatureKeys.ALERT_CHANNEL_SLACK;
|
||||
|
||||
const hasFeature = featureFlags?.find(
|
||||
(flag: FeatureFlagProps) => flag.name === featureKey,
|
||||
);
|
||||
|
||||
const renderSettings = (): ReactElement | null => {
|
||||
switch (type) {
|
||||
@@ -131,7 +146,7 @@ function FormAlertChannels({
|
||||
|
||||
<Form.Item>
|
||||
<Button
|
||||
disabled={savingState}
|
||||
disabled={savingState || !hasFeature}
|
||||
loading={savingState}
|
||||
type="primary"
|
||||
onClick={(): void => onSaveHandler(type)}
|
||||
@@ -139,7 +154,7 @@ function FormAlertChannels({
|
||||
{t('button_save_channel')}
|
||||
</Button>
|
||||
<Button
|
||||
disabled={testingState}
|
||||
disabled={testingState || !hasFeature}
|
||||
loading={testingState}
|
||||
onClick={(): void => onTestHandler(type)}
|
||||
>
|
||||
|
||||
@@ -467,6 +467,10 @@ function FormAlertRules({
|
||||
panelType,
|
||||
]);
|
||||
|
||||
const isAlertAvailable =
|
||||
!featureFlags?.find((flag) => flag.name === FeatureKeys.QUERY_BUILDER_ALERTS)
|
||||
?.active || false;
|
||||
|
||||
const saveRule = useCallback(async () => {
|
||||
if (!isFormValid()) {
|
||||
return;
|
||||
@@ -684,6 +688,11 @@ function FormAlertRules({
|
||||
|
||||
const isAlertNameMissing = !formInstance.getFieldValue('alert');
|
||||
|
||||
const isAlertAvailableToSave =
|
||||
isAlertAvailable &&
|
||||
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
||||
alertType !== AlertTypes.METRICS_BASED_ALERT;
|
||||
|
||||
const onUnitChangeHandler = (value: string): void => {
|
||||
setYAxisUnit(value);
|
||||
// reset target unit
|
||||
@@ -856,6 +865,7 @@ function FormAlertRules({
|
||||
icon={<SaveOutlined />}
|
||||
disabled={
|
||||
isAlertNameMissing ||
|
||||
isAlertAvailableToSave ||
|
||||
!isChannelConfigurationValid ||
|
||||
queryStatus === 'error'
|
||||
}
|
||||
|
||||
@@ -49,7 +49,6 @@ function FullView({
|
||||
isDependedDataLoaded = false,
|
||||
onToggleModelHandler,
|
||||
onClickHandler,
|
||||
customOnDragSelect,
|
||||
setCurrentGraphRef,
|
||||
}: FullViewProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
@@ -253,7 +252,7 @@ function FullView({
|
||||
onToggleModelHandler={onToggleModelHandler}
|
||||
setGraphVisibility={setGraphsVisibilityStates}
|
||||
graphVisibility={graphsVisibilityStates}
|
||||
onDragSelect={customOnDragSelect ?? onDragSelect}
|
||||
onDragSelect={onDragSelect}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
searchTerm={searchTerm}
|
||||
onClickHandler={onClickHandler}
|
||||
|
||||
@@ -50,7 +50,6 @@ export interface FullViewProps {
|
||||
widget: Widgets;
|
||||
fullViewOptions?: boolean;
|
||||
onClickHandler?: OnClickPluginOpts['onClick'];
|
||||
customOnDragSelect?: (start: number, end: number) => void;
|
||||
name: string;
|
||||
tableProcessedDataRef: MutableRefObject<RowData[]>;
|
||||
version?: string;
|
||||
|
||||
@@ -50,7 +50,6 @@ function WidgetGraphComponent({
|
||||
setRequestData,
|
||||
onClickHandler,
|
||||
onDragSelect,
|
||||
customOnDragSelect,
|
||||
customTooltipElement,
|
||||
openTracesButton,
|
||||
onOpenTraceBtnClick,
|
||||
@@ -328,7 +327,6 @@ function WidgetGraphComponent({
|
||||
onToggleModelHandler={onToggleModelHandler}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
onClickHandler={onClickHandler ?? graphClickHandler}
|
||||
customOnDragSelect={customOnDragSelect}
|
||||
setCurrentGraphRef={setCurrentGraphRef}
|
||||
/>
|
||||
</Modal>
|
||||
|
||||
@@ -36,7 +36,6 @@ function GridCardGraph({
|
||||
version,
|
||||
onClickHandler,
|
||||
onDragSelect,
|
||||
customOnDragSelect,
|
||||
customTooltipElement,
|
||||
dataAvailable,
|
||||
getGraphData,
|
||||
@@ -273,7 +272,6 @@ function GridCardGraph({
|
||||
setRequestData={setRequestData}
|
||||
onClickHandler={onClickHandler}
|
||||
onDragSelect={onDragSelect}
|
||||
customOnDragSelect={customOnDragSelect}
|
||||
customTooltipElement={customTooltipElement}
|
||||
openTracesButton={openTracesButton}
|
||||
onOpenTraceBtnClick={onOpenTraceBtnClick}
|
||||
|
||||
@@ -33,7 +33,6 @@ export interface WidgetGraphComponentProps {
|
||||
setRequestData?: Dispatch<SetStateAction<GetQueryResultsProps>>;
|
||||
onClickHandler?: OnClickPluginOpts['onClick'];
|
||||
onDragSelect: (start: number, end: number) => void;
|
||||
customOnDragSelect?: (start: number, end: number) => void;
|
||||
customTooltipElement?: HTMLDivElement;
|
||||
openTracesButton?: boolean;
|
||||
onOpenTraceBtnClick?: (record: RowData) => void;
|
||||
@@ -50,7 +49,6 @@ export interface GridCardGraphProps {
|
||||
variables?: Dashboard['data']['variables'];
|
||||
version?: string;
|
||||
onDragSelect: (start: number, end: number) => void;
|
||||
customOnDragSelect?: (start: number, end: number) => void;
|
||||
customTooltipElement?: HTMLDivElement;
|
||||
dataAvailable?: (isDataAvailable: boolean) => void;
|
||||
getGraphData?: (graphData?: MetricRangePayloadProps['data']) => void;
|
||||
|
||||
@@ -178,7 +178,6 @@ interface HandleGraphClickParams {
|
||||
navigateToExplorer: (props: NavigateToExplorerProps) => void;
|
||||
notifications: NotificationInstance;
|
||||
graphClick: (props: GraphClickProps) => void;
|
||||
customFilters?: TagFilterItem[];
|
||||
}
|
||||
|
||||
export const handleGraphClick = async ({
|
||||
@@ -193,7 +192,6 @@ export const handleGraphClick = async ({
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
customFilters,
|
||||
}: HandleGraphClickParams): Promise<void> => {
|
||||
const { stepInterval } = widget?.query?.builder?.queryData?.[0] ?? {};
|
||||
|
||||
@@ -223,7 +221,7 @@ export const handleGraphClick = async ({
|
||||
}: ${key}`,
|
||||
onClick: (): void =>
|
||||
navigateToExplorer({
|
||||
filters: [...result[key].filters, ...(customFilters || [])],
|
||||
filters: result[key].filters,
|
||||
dataSource: result[key].dataSource as DataSource,
|
||||
startTime: xValue,
|
||||
endTime: xValue + (stepInterval ?? 60),
|
||||
|
||||
@@ -44,10 +44,7 @@ import { EditMenuAction, ViewMenuAction } from './config';
|
||||
import DashboardEmptyState from './DashboardEmptyState/DashboardEmptyState';
|
||||
import GridCard from './GridCard';
|
||||
import { Card, CardContainer, ReactGridLayout } from './styles';
|
||||
import {
|
||||
hasColumnWidthsChanged,
|
||||
removeUndefinedValuesFromLayout,
|
||||
} from './utils';
|
||||
import { removeUndefinedValuesFromLayout } from './utils';
|
||||
import { MenuItemKeys } from './WidgetHeader/contants';
|
||||
import { WidgetRowHeader } from './WidgetRow';
|
||||
|
||||
@@ -71,7 +68,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
setDashboardQueryRangeCalled,
|
||||
setSelectedRowWidgetId,
|
||||
isDashboardFetching,
|
||||
columnWidths,
|
||||
} = useDashboard();
|
||||
const { data } = selectedDashboard || {};
|
||||
const { pathname } = useLocation();
|
||||
@@ -166,7 +162,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
logEventCalledRef.current = true;
|
||||
}
|
||||
}, [data]);
|
||||
|
||||
const onSaveHandler = (): void => {
|
||||
if (!selectedDashboard) return;
|
||||
|
||||
@@ -176,15 +171,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
...selectedDashboard.data,
|
||||
panelMap: { ...currentPanelMap },
|
||||
layout: dashboardLayout.filter((e) => e.i !== PANEL_TYPES.EMPTY_WIDGET),
|
||||
widgets: selectedDashboard?.data?.widgets?.map((widget) => {
|
||||
if (columnWidths?.[widget.id]) {
|
||||
return {
|
||||
...widget,
|
||||
columnWidths: columnWidths[widget.id],
|
||||
};
|
||||
}
|
||||
return widget;
|
||||
}),
|
||||
},
|
||||
uuid: selectedDashboard.uuid,
|
||||
};
|
||||
@@ -241,31 +227,20 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
isDashboardLocked ||
|
||||
!saveLayoutPermission ||
|
||||
updateDashboardMutation.isLoading ||
|
||||
isDashboardFetching
|
||||
dashboardLayout &&
|
||||
Array.isArray(dashboardLayout) &&
|
||||
dashboardLayout.length > 0 &&
|
||||
!isEqual(layouts, dashboardLayout) &&
|
||||
!isDashboardLocked &&
|
||||
saveLayoutPermission &&
|
||||
!updateDashboardMutation.isLoading &&
|
||||
!isDashboardFetching
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const shouldSaveLayout =
|
||||
dashboardLayout &&
|
||||
Array.isArray(dashboardLayout) &&
|
||||
dashboardLayout.length > 0 &&
|
||||
!isEqual(layouts, dashboardLayout);
|
||||
|
||||
const shouldSaveColumnWidths =
|
||||
dashboardLayout &&
|
||||
Array.isArray(dashboardLayout) &&
|
||||
dashboardLayout.length > 0 &&
|
||||
hasColumnWidthsChanged(columnWidths, selectedDashboard);
|
||||
|
||||
if (shouldSaveLayout || shouldSaveColumnWidths) {
|
||||
onSaveHandler();
|
||||
}
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [dashboardLayout, columnWidths]);
|
||||
}, [dashboardLayout]);
|
||||
|
||||
const onSettingsModalSubmit = (): void => {
|
||||
const newTitle = form.getFieldValue('title');
|
||||
|
||||
@@ -12,7 +12,7 @@ import { v4 } from 'uuid';
|
||||
|
||||
import { extractQueryNamesFromExpression } from './utils';
|
||||
|
||||
export type GraphClickMetaData = {
|
||||
type GraphClickMetaData = {
|
||||
[key: string]: string | boolean;
|
||||
queryName: string;
|
||||
inFocusOrNot: boolean;
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { FORMULA_REGEXP } from 'constants/regExp';
|
||||
import { isEmpty, isEqual } from 'lodash-es';
|
||||
import { Layout } from 'react-grid-layout';
|
||||
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
export const removeUndefinedValuesFromLayout = (layout: Layout[]): Layout[] =>
|
||||
layout.map((obj) =>
|
||||
@@ -27,27 +25,3 @@ export function extractQueryNamesFromExpression(expression: string): string[] {
|
||||
// Extract matches and deduplicate
|
||||
return [...new Set(expression.match(queryNameRegex) || [])];
|
||||
}
|
||||
|
||||
export const hasColumnWidthsChanged = (
|
||||
columnWidths: Record<string, Record<string, number>>,
|
||||
selectedDashboard?: Dashboard,
|
||||
): boolean => {
|
||||
// If no column widths stored, no changes
|
||||
if (isEmpty(columnWidths) || !selectedDashboard) return false;
|
||||
|
||||
// Check each widget's column widths
|
||||
return Object.keys(columnWidths).some((widgetId) => {
|
||||
const dashboardWidget = selectedDashboard?.data?.widgets?.find(
|
||||
(widget) => widget.id === widgetId,
|
||||
) as Widgets;
|
||||
|
||||
const newWidths = columnWidths[widgetId];
|
||||
const existingWidths = dashboardWidget?.columnWidths;
|
||||
|
||||
// If both are empty/undefined, no change
|
||||
if (isEmpty(newWidths) || isEmpty(existingWidths)) return false;
|
||||
|
||||
// Compare stored column widths with dashboard widget's column widths
|
||||
return !isEqual(newWidths, existingWidths);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -43,7 +43,6 @@ function GridTableComponent({
|
||||
sticky,
|
||||
openTracesButton,
|
||||
onOpenTraceBtnClick,
|
||||
widgetId,
|
||||
...props
|
||||
}: GridTableComponentProps): JSX.Element {
|
||||
const { t } = useTranslation(['valueGraph']);
|
||||
@@ -230,7 +229,6 @@ function GridTableComponent({
|
||||
columns={openTracesButton ? columnDataWithOpenTracesButton : newColumnData}
|
||||
dataSource={dataSource}
|
||||
sticky={sticky}
|
||||
widgetId={widgetId}
|
||||
onRow={
|
||||
openTracesButton
|
||||
? (record): React.HTMLAttributes<HTMLElement> => ({
|
||||
|
||||
@@ -17,7 +17,6 @@ export type GridTableComponentProps = {
|
||||
searchTerm?: string;
|
||||
openTracesButton?: boolean;
|
||||
onOpenTraceBtnClick?: (record: RowData) => void;
|
||||
widgetId?: string;
|
||||
} & Pick<LogsExplorerTableProps, 'data'> &
|
||||
Omit<TableProps<RowData>, 'columns' | 'dataSource'>;
|
||||
|
||||
|
||||
@@ -23,13 +23,10 @@ function DataSourceInfo({
|
||||
|
||||
const notSendingData = !dataSentToSigNoz;
|
||||
|
||||
const isEnabled =
|
||||
activeLicenseV3 && activeLicenseV3.platform === LicensePlatform.CLOUD;
|
||||
|
||||
const {
|
||||
data: deploymentsData,
|
||||
isError: isErrorDeploymentsData,
|
||||
} = useGetDeploymentsData(isEnabled || false);
|
||||
} = useGetDeploymentsData();
|
||||
|
||||
const [region, setRegion] = useState<string>('');
|
||||
const [url, setUrl] = useState<string>('');
|
||||
|
||||
@@ -293,7 +293,7 @@ function MultiIngestionSettings(): JSX.Element {
|
||||
isLoading: isLoadingDeploymentsData,
|
||||
isFetching: isFetchingDeploymentsData,
|
||||
isError: isErrorDeploymentsData,
|
||||
} = useGetDeploymentsData(true);
|
||||
} = useGetDeploymentsData();
|
||||
|
||||
const {
|
||||
mutate: createIngestionKey,
|
||||
|
||||
@@ -10,6 +10,7 @@ import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQue
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import { useEventSourceEvent } from 'hooks/useEventSourceEvent';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { prepareQueryRangePayload } from 'lib/dashboard/prepareQueryRangePayload';
|
||||
import { useEventSource } from 'providers/EventSource';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
@@ -37,6 +38,8 @@ function LiveLogsContainer(): JSX.Element {
|
||||
|
||||
const batchedEventsRef = useRef<ILog[]>([]);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { selectedTime: globalSelectedTime } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
@@ -47,8 +50,6 @@ function LiveLogsContainer(): JSX.Element {
|
||||
handleCloseConnection,
|
||||
initialLoading,
|
||||
isConnectionLoading,
|
||||
isConnectionError,
|
||||
reconnectDueToError,
|
||||
} = useEventSource();
|
||||
|
||||
const compositeQuery = useGetCompositeQueryParam();
|
||||
@@ -85,8 +86,8 @@ function LiveLogsContainer(): JSX.Element {
|
||||
);
|
||||
|
||||
const handleError = useCallback(() => {
|
||||
console.error('Sorry, something went wrong');
|
||||
}, []);
|
||||
notifications.error({ message: 'Sorry, something went wrong' });
|
||||
}, [notifications]);
|
||||
|
||||
useEventSourceEvent('message', handleGetLiveLogs);
|
||||
useEventSourceEvent('error', handleError);
|
||||
@@ -152,23 +153,6 @@ function LiveLogsContainer(): JSX.Element {
|
||||
handleStartNewConnection,
|
||||
]);
|
||||
|
||||
useEffect((): (() => void) | undefined => {
|
||||
if (isConnectionError && reconnectDueToError && compositeQuery) {
|
||||
// Small delay to prevent immediate reconnection attempts
|
||||
const reconnectTimer = setTimeout(() => {
|
||||
handleStartNewConnection(compositeQuery);
|
||||
}, 1000);
|
||||
|
||||
return (): void => clearTimeout(reconnectTimer);
|
||||
}
|
||||
return undefined;
|
||||
}, [
|
||||
isConnectionError,
|
||||
reconnectDueToError,
|
||||
compositeQuery,
|
||||
handleStartNewConnection,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
const prefetchedList = queryLocationState?.listQueryPayload[0]?.list;
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import './LogsPanelComponent.styles.scss';
|
||||
|
||||
import { Table } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import Controls from 'container/Controls';
|
||||
@@ -79,14 +79,9 @@ function LogsPanelComponent({
|
||||
|
||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
||||
|
||||
const columns = useMemo(
|
||||
() =>
|
||||
getLogPanelColumnsList(
|
||||
widget.selectedLogFields,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[widget.selectedLogFields],
|
||||
const columns = getLogPanelColumnsList(
|
||||
widget.selectedLogFields,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
);
|
||||
|
||||
const dataLength =
|
||||
@@ -221,18 +216,16 @@ function LogsPanelComponent({
|
||||
<div className="logs-table">
|
||||
<div className="resize-table">
|
||||
<OverlayScrollbar>
|
||||
<ResizeTable
|
||||
<Table
|
||||
pagination={false}
|
||||
tableLayout="fixed"
|
||||
scroll={{ x: `max-content` }}
|
||||
scroll={{ x: `calc(50vw - 10px)` }}
|
||||
sticky
|
||||
loading={queryResponse.isFetching}
|
||||
style={tableStyles}
|
||||
dataSource={flattenLogData}
|
||||
columns={columns}
|
||||
onRow={handleRow}
|
||||
widgetId={widget.id}
|
||||
shouldPersistColumnWidths
|
||||
/>
|
||||
</OverlayScrollbar>
|
||||
</div>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user