Compare commits
33 Commits
add-valida
...
v0.79.0-18
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
18b89242c1 | ||
|
|
c396d24968 | ||
|
|
0138d757c8 | ||
|
|
844195b84f | ||
|
|
8ff05b2e8f | ||
|
|
c8c56c544e | ||
|
|
1c43655336 | ||
|
|
c269c8c6b8 | ||
|
|
3142b6cc6d | ||
|
|
58e141685a | ||
|
|
e17f63a50c | ||
|
|
838ef5dcc5 | ||
|
|
e53d3d1269 | ||
|
|
2330420c0d | ||
|
|
65ac277074 | ||
|
|
b7982ca348 | ||
|
|
2748b49a44 | ||
|
|
7345027762 | ||
|
|
68f874e433 | ||
|
|
54a82b1664 | ||
|
|
93dc585145 | ||
|
|
6a143efd2c | ||
|
|
0116eb20ab | ||
|
|
79e9d1b357 | ||
|
|
b89ce82e25 | ||
|
|
b43a198fd8 | ||
|
|
b40ca4baf3 | ||
|
|
8df77c9221 | ||
|
|
f67555576f | ||
|
|
f0a4c37073 | ||
|
|
7972261237 | ||
|
|
3b4a8e5e0f | ||
|
|
5ef3b8ee3f |
87
.github/workflows/build-community.yaml
vendored
Normal file
87
.github/workflows/build-community.yaml
vendored
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
name: build-community
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
tags:
|
||||||
|
- v*
|
||||||
|
|
||||||
|
env:
|
||||||
|
PRIMUS_HOME: .primus
|
||||||
|
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
docker_providers: ${{ steps.set-docker-providers.outputs.providers }}
|
||||||
|
version: ${{ steps.build-info.outputs.version }}
|
||||||
|
hash: ${{ steps.build-info.outputs.hash }}
|
||||||
|
time: ${{ steps.build-info.outputs.time }}
|
||||||
|
branch: ${{ steps.build-info.outputs.branch }}
|
||||||
|
steps:
|
||||||
|
- name: self-checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- id: token
|
||||||
|
name: github-token-gen
|
||||||
|
uses: actions/create-github-app-token@v1
|
||||||
|
with:
|
||||||
|
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||||
|
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||||
|
owner: ${{ github.repository_owner }}
|
||||||
|
- name: primus-checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: signoz/primus
|
||||||
|
ref: ${{ inputs.PRIMUS_REF }}
|
||||||
|
path: .primus
|
||||||
|
token: ${{ steps.token.outputs.token }}
|
||||||
|
- name: build-info
|
||||||
|
run: |
|
||||||
|
echo "version=$(eval $MAKE info-version)" >> $GITHUB_OUTPUT
|
||||||
|
echo "hash=$(eval $MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||||
|
echo "time=$(eval $MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||||
|
echo "branch=$(eval $MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||||
|
- name: set-docker-providers
|
||||||
|
id: set-docker-providers
|
||||||
|
run: |
|
||||||
|
if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ || ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$ ]]; then
|
||||||
|
echo "providers=dockerhub gcp" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "providers=gcp" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
js-build:
|
||||||
|
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||||
|
needs: prepare
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
PRIMUS_REF: main
|
||||||
|
JS_SRC: frontend
|
||||||
|
JS_OUTPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||||
|
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||||
|
DOCKER_BUILD: false
|
||||||
|
DOCKER_MANIFEST: false
|
||||||
|
go-build:
|
||||||
|
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||||
|
needs: [prepare, js-build]
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
PRIMUS_REF: main
|
||||||
|
GO_NAME: signoz-community
|
||||||
|
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||||
|
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||||
|
GO_BUILD_CONTEXT: ./pkg/query-service
|
||||||
|
GO_BUILD_FLAGS: >-
|
||||||
|
-tags timetzdata
|
||||||
|
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.Version=${{ needs.prepare.outputs.version }}
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
|
||||||
|
GO_CGO_ENABLED: 1
|
||||||
|
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||||
|
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch
|
||||||
|
DOCKER_MANIFEST: true
|
||||||
|
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}
|
||||||
110
.github/workflows/build-enterprise.yaml
vendored
Normal file
110
.github/workflows/build-enterprise.yaml
vendored
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
name: build-enterprise
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
tags:
|
||||||
|
- v*
|
||||||
|
|
||||||
|
env:
|
||||||
|
PRIMUS_HOME: .primus
|
||||||
|
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
docker_providers: ${{ steps.set-docker-providers.outputs.providers }}
|
||||||
|
version: ${{ steps.build-info.outputs.version }}
|
||||||
|
hash: ${{ steps.build-info.outputs.hash }}
|
||||||
|
time: ${{ steps.build-info.outputs.time }}
|
||||||
|
branch: ${{ steps.build-info.outputs.branch }}
|
||||||
|
steps:
|
||||||
|
- name: self-checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- id: token
|
||||||
|
name: github-token-gen
|
||||||
|
uses: actions/create-github-app-token@v1
|
||||||
|
with:
|
||||||
|
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||||
|
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||||
|
owner: ${{ github.repository_owner }}
|
||||||
|
- name: primus-checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: signoz/primus
|
||||||
|
ref: ${{ inputs.PRIMUS_REF }}
|
||||||
|
path: .primus
|
||||||
|
token: ${{ steps.token.outputs.token }}
|
||||||
|
- name: build-info
|
||||||
|
run: |
|
||||||
|
echo "version=$(eval $MAKE info-version)" >> $GITHUB_OUTPUT
|
||||||
|
echo "hash=$(eval $MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||||
|
echo "time=$(eval $MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||||
|
echo "branch=$(eval $MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||||
|
- name: set-docker-providers
|
||||||
|
id: set-docker-providers
|
||||||
|
run: |
|
||||||
|
if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ || ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$ ]]; then
|
||||||
|
echo "providers=dockerhub gcp" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "providers=gcp" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: create-dotenv
|
||||||
|
run: |
|
||||||
|
mkdir -p frontend
|
||||||
|
echo 'CI=1' > frontend/.env
|
||||||
|
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
|
||||||
|
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||||
|
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||||
|
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||||
|
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||||
|
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||||
|
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||||
|
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||||
|
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||||
|
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||||
|
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||||
|
- name: cache-dotenv
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: frontend/.env
|
||||||
|
key: enterprise-dotenv-${{ github.sha }}
|
||||||
|
js-build:
|
||||||
|
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||||
|
needs: prepare
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
PRIMUS_REF: main
|
||||||
|
JS_SRC: frontend
|
||||||
|
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||||
|
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||||
|
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||||
|
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||||
|
DOCKER_BUILD: false
|
||||||
|
DOCKER_MANIFEST: false
|
||||||
|
go-build:
|
||||||
|
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||||
|
needs: [prepare, js-build]
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
PRIMUS_REF: main
|
||||||
|
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||||
|
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||||
|
GO_BUILD_CONTEXT: ./ee/query-service
|
||||||
|
GO_BUILD_FLAGS: >-
|
||||||
|
-tags timetzdata
|
||||||
|
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.Version=${{ needs.prepare.outputs.version }}
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||||
|
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||||
|
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||||
|
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
|
||||||
|
GO_CGO_ENABLED: 1
|
||||||
|
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||||
|
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||||
|
DOCKER_MANIFEST: true
|
||||||
|
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}
|
||||||
122
.github/workflows/build.yaml
vendored
122
.github/workflows/build.yaml
vendored
@@ -1,122 +0,0 @@
|
|||||||
name: build
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
tags:
|
|
||||||
- v*
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
enterprise:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: setup
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: "1.22"
|
|
||||||
- name: setup-qemu
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
- name: setup-buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
version: latest
|
|
||||||
- name: docker-login
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: create-env-file
|
|
||||||
run: |
|
|
||||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
|
||||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
|
||||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
|
||||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
|
||||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
|
||||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
|
||||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
|
||||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
|
||||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
|
||||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
|
||||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
|
||||||
- name: github-ref-info
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
GH_REF=${{ github.ref }}
|
|
||||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
|
||||||
PREFIX="refs/tags/"
|
|
||||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
|
||||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
PREFIX="refs/heads/"
|
|
||||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
|
||||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
- name: set-version
|
|
||||||
run: |
|
|
||||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
|
||||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
|
||||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
|
||||||
echo "VERSION=latest" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
- name: cross-compilation-tools
|
|
||||||
run: |
|
|
||||||
set -ex
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
|
||||||
- name: publish
|
|
||||||
run: make docker-buildx-enterprise
|
|
||||||
|
|
||||||
community:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: setup-go
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: "1.22"
|
|
||||||
- name: setup-qemu
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
- name: setup-buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
version: latest
|
|
||||||
- name: docker-login
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: github-ref-info
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
GH_REF=${{ github.ref }}
|
|
||||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
|
||||||
PREFIX="refs/tags/"
|
|
||||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
|
||||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
PREFIX="refs/heads/"
|
|
||||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
|
||||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
- name: set-version
|
|
||||||
run: |
|
|
||||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
|
||||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
|
||||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
|
||||||
echo "VERSION=latest" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
- name: cross-compilation-tools
|
|
||||||
run: |
|
|
||||||
set -ex
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
|
||||||
- name: publish
|
|
||||||
run: make docker-buildx-community
|
|
||||||
17
.versions/alpine
Normal file
17
.versions/alpine
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
#### Auto generated by make docker-version-alpine. DO NOT EDIT! ####
|
||||||
|
amd64=029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85
|
||||||
|
unknown=5fea95373b9ec85974843f31446fa6a9df4492dddae4e1cb056193c34a20a5be
|
||||||
|
arm=b4aef1a899e0271f06d948c9a8fa626ecdb2202d3a178bc14775dd559e23df8e
|
||||||
|
unknown=a4d1e27e63a9d6353046eb25a2f0ec02945012b217f4364cd83a73fe6dfb0b15
|
||||||
|
arm=4fdafe217d0922f3c3e2b4f64cf043f8403a4636685cd9c51fea2cbd1f419740
|
||||||
|
unknown=7f21ac2018d95b2c51a5779c1d5ca6c327504adc3b0fdc747a6725d30b3f13c2
|
||||||
|
arm64=ea3c5a9671f7b3f7eb47eab06f73bc6591df978b0d5955689a9e6f943aa368c0
|
||||||
|
unknown=a8ba68c1a9e6eea8041b4b8f996c235163440808b9654a865976fdcbede0f433
|
||||||
|
386=dea9f02e103e837849f984d5679305c758aba7fea1b95b7766218597f61a05ab
|
||||||
|
unknown=3c6629bec05c8273a927d46b77428bf4a378dad911a0ae284887becdc149b734
|
||||||
|
ppc64le=0880443bffa028dfbbc4094a32dd6b7ac25684e4c0a3d50da9e0acae355c5eaf
|
||||||
|
unknown=bb48308f976b266e3ab39bbf9af84521959bd9c295d3c763690cf41f8df2a626
|
||||||
|
riscv64=d76e6fbe348ff20c2931bb7f101e49379648e026de95dd37f96e00ce1909dcf7
|
||||||
|
unknown=dd807544365f6dc187cbe6de0806adce2ea9de3e7124717d1d8e8b7a18b77b64
|
||||||
|
s390x=b815fadf80495594eb6296a6af0bc647ae5f193e0044e07acec7e5b378c9ce2d
|
||||||
|
unknown=74681be74a280a88abb53ff1e048eb1fb624b30d0066730df6d8afd02ba82e01
|
||||||
@@ -174,7 +174,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:v0.76.2
|
image: signoz/signoz:v0.78.0
|
||||||
command:
|
command:
|
||||||
- --config=/root/config/prometheus.yml
|
- --config=/root/config/prometheus.yml
|
||||||
- --use-logs-new-schema=true
|
- --use-logs-new-schema=true
|
||||||
@@ -208,7 +208,7 @@ services:
|
|||||||
retries: 3
|
retries: 3
|
||||||
otel-collector:
|
otel-collector:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-otel-collector:v0.111.34
|
image: signoz/signoz-otel-collector:v0.111.38
|
||||||
command:
|
command:
|
||||||
- --config=/etc/otel-collector-config.yaml
|
- --config=/etc/otel-collector-config.yaml
|
||||||
- --manager-config=/etc/manager-config.yaml
|
- --manager-config=/etc/manager-config.yaml
|
||||||
@@ -232,7 +232,7 @@ services:
|
|||||||
- signoz
|
- signoz
|
||||||
schema-migrator:
|
schema-migrator:
|
||||||
!!merge <<: *common
|
!!merge <<: *common
|
||||||
image: signoz/signoz-schema-migrator:v0.111.34
|
image: signoz/signoz-schema-migrator:v0.111.38
|
||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:v0.76.2
|
image: signoz/signoz:v0.78.0
|
||||||
command:
|
command:
|
||||||
- --config=/root/config/prometheus.yml
|
- --config=/root/config/prometheus.yml
|
||||||
- --use-logs-new-schema=true
|
- --use-logs-new-schema=true
|
||||||
@@ -143,7 +143,7 @@ services:
|
|||||||
retries: 3
|
retries: 3
|
||||||
otel-collector:
|
otel-collector:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-otel-collector:v0.111.34
|
image: signoz/signoz-otel-collector:v0.111.38
|
||||||
command:
|
command:
|
||||||
- --config=/etc/otel-collector-config.yaml
|
- --config=/etc/otel-collector-config.yaml
|
||||||
- --manager-config=/etc/manager-config.yaml
|
- --manager-config=/etc/manager-config.yaml
|
||||||
@@ -167,7 +167,7 @@ services:
|
|||||||
- signoz
|
- signoz
|
||||||
schema-migrator:
|
schema-migrator:
|
||||||
!!merge <<: *common
|
!!merge <<: *common
|
||||||
image: signoz/signoz-schema-migrator:v0.111.34
|
image: signoz/signoz-schema-migrator:v0.111.38
|
||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
|||||||
@@ -177,7 +177,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
image: signoz/signoz:${VERSION:-v0.78.0}
|
||||||
container_name: signoz
|
container_name: signoz
|
||||||
command:
|
command:
|
||||||
- --config=/root/config/prometheus.yml
|
- --config=/root/config/prometheus.yml
|
||||||
@@ -212,7 +212,7 @@ services:
|
|||||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||||
otel-collector:
|
otel-collector:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
- --config=/etc/otel-collector-config.yaml
|
- --config=/etc/otel-collector-config.yaml
|
||||||
@@ -238,7 +238,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
schema-migrator-sync:
|
schema-migrator-sync:
|
||||||
!!merge <<: *common
|
!!merge <<: *common
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||||
container_name: schema-migrator-sync
|
container_name: schema-migrator-sync
|
||||||
command:
|
command:
|
||||||
- sync
|
- sync
|
||||||
@@ -249,7 +249,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
schema-migrator-async:
|
schema-migrator-async:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||||
container_name: schema-migrator-async
|
container_name: schema-migrator-async
|
||||||
command:
|
command:
|
||||||
- async
|
- async
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
image: signoz/signoz:${VERSION:-v0.78.0}
|
||||||
container_name: signoz
|
container_name: signoz
|
||||||
command:
|
command:
|
||||||
- --config=/root/config/prometheus.yml
|
- --config=/root/config/prometheus.yml
|
||||||
@@ -146,7 +146,7 @@ services:
|
|||||||
retries: 3
|
retries: 3
|
||||||
otel-collector:
|
otel-collector:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
- --config=/etc/otel-collector-config.yaml
|
- --config=/etc/otel-collector-config.yaml
|
||||||
@@ -168,7 +168,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
schema-migrator-sync:
|
schema-migrator-sync:
|
||||||
!!merge <<: *common
|
!!merge <<: *common
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||||
container_name: schema-migrator-sync
|
container_name: schema-migrator-sync
|
||||||
command:
|
command:
|
||||||
- sync
|
- sync
|
||||||
@@ -180,7 +180,7 @@ services:
|
|||||||
restart: on-failure
|
restart: on-failure
|
||||||
schema-migrator-async:
|
schema-migrator-async:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||||
container_name: schema-migrator-async
|
container_name: schema-migrator-async
|
||||||
command:
|
command:
|
||||||
- async
|
- async
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:${VERSION:-v0.76.2}
|
image: signoz/signoz:${VERSION:-v0.78.0}
|
||||||
container_name: signoz
|
container_name: signoz
|
||||||
command:
|
command:
|
||||||
- --config=/root/config/prometheus.yml
|
- --config=/root/config/prometheus.yml
|
||||||
@@ -144,7 +144,7 @@ services:
|
|||||||
retries: 3
|
retries: 3
|
||||||
otel-collector:
|
otel-collector:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
|
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
- --config=/etc/otel-collector-config.yaml
|
- --config=/etc/otel-collector-config.yaml
|
||||||
@@ -166,7 +166,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
schema-migrator-sync:
|
schema-migrator-sync:
|
||||||
!!merge <<: *common
|
!!merge <<: *common
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||||
container_name: schema-migrator-sync
|
container_name: schema-migrator-sync
|
||||||
command:
|
command:
|
||||||
- sync
|
- sync
|
||||||
@@ -178,7 +178,7 @@ services:
|
|||||||
restart: on-failure
|
restart: on-failure
|
||||||
schema-migrator-async:
|
schema-migrator-async:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
|
||||||
container_name: schema-migrator-async
|
container_name: schema-migrator-async
|
||||||
command:
|
command:
|
||||||
- async
|
- async
|
||||||
|
|||||||
@@ -18,4 +18,4 @@ COPY frontend/build/ /etc/signoz/web/
|
|||||||
RUN chmod 755 /root /root/signoz
|
RUN chmod 755 /root /root/signoz
|
||||||
|
|
||||||
ENTRYPOINT ["./signoz"]
|
ENTRYPOINT ["./signoz"]
|
||||||
CMD ["-config", "/root/config/prometheus.yml"]
|
CMD ["-config", "/root/config/prometheus.yml"]
|
||||||
22
ee/query-service/Dockerfile.multi-arch
Normal file
22
ee/query-service/Dockerfile.multi-arch
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
|
||||||
|
|
||||||
|
FROM alpine@sha256:${ALPINE_SHA}
|
||||||
|
LABEL maintainer="signoz"
|
||||||
|
WORKDIR /root
|
||||||
|
|
||||||
|
ARG OS="linux"
|
||||||
|
ARG ARCH
|
||||||
|
|
||||||
|
RUN apk update && \
|
||||||
|
apk add ca-certificates && \
|
||||||
|
rm -rf /var/cache/apk/*
|
||||||
|
|
||||||
|
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
|
||||||
|
COPY ./conf/prometheus.yml /root/config/prometheus.yml
|
||||||
|
COPY ./templates/email /root/templates
|
||||||
|
COPY frontend/build/ /etc/signoz/web/
|
||||||
|
|
||||||
|
RUN chmod 755 /root /root/signoz
|
||||||
|
|
||||||
|
ENTRYPOINT ["./signoz"]
|
||||||
|
CMD ["-config", "/root/config/prometheus.yml"]
|
||||||
@@ -28,11 +28,10 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
|
|||||||
}
|
}
|
||||||
|
|
||||||
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||||
Reader: dp.reader,
|
Reader: dp.reader,
|
||||||
Cache: dp.cache,
|
Cache: dp.cache,
|
||||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||||
FluxInterval: dp.fluxInterval,
|
FluxInterval: dp.fluxInterval,
|
||||||
FeatureLookup: dp.ff,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return dp
|
return dp
|
||||||
|
|||||||
@@ -28,11 +28,10 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
|
|||||||
}
|
}
|
||||||
|
|
||||||
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||||
Reader: hp.reader,
|
Reader: hp.reader,
|
||||||
Cache: hp.cache,
|
Cache: hp.cache,
|
||||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||||
FluxInterval: hp.fluxInterval,
|
FluxInterval: hp.fluxInterval,
|
||||||
FeatureLookup: hp.ff,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return hp
|
return hp
|
||||||
|
|||||||
@@ -38,12 +38,6 @@ func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericPr
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
|
|
||||||
return func(p T) {
|
|
||||||
p.GetBaseSeasonalProvider().ff = ff
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
|
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
|
||||||
return func(p T) {
|
return func(p T) {
|
||||||
p.GetBaseSeasonalProvider().reader = reader
|
p.GetBaseSeasonalProvider().reader = reader
|
||||||
@@ -56,7 +50,6 @@ type BaseSeasonalProvider struct {
|
|||||||
fluxInterval time.Duration
|
fluxInterval time.Duration
|
||||||
cache cache.Cache
|
cache cache.Cache
|
||||||
keyGenerator cache.KeyGenerator
|
keyGenerator cache.KeyGenerator
|
||||||
ff interfaces.FeatureLookup
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
||||||
|
|||||||
@@ -27,11 +27,10 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
|
|||||||
}
|
}
|
||||||
|
|
||||||
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||||
Reader: wp.reader,
|
Reader: wp.reader,
|
||||||
Cache: wp.cache,
|
Cache: wp.cache,
|
||||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||||
FluxInterval: wp.fluxInterval,
|
FluxInterval: wp.fluxInterval,
|
||||||
FeatureLookup: wp.ff,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return wp
|
return wp
|
||||||
|
|||||||
@@ -11,6 +11,8 @@ import (
|
|||||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||||
|
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||||
|
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
|
||||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||||
@@ -21,6 +23,7 @@ import (
|
|||||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||||
"github.com/SigNoz/signoz/pkg/signoz"
|
"github.com/SigNoz/signoz/pkg/signoz"
|
||||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||||
"github.com/SigNoz/signoz/pkg/version"
|
"github.com/SigNoz/signoz/pkg/version"
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
)
|
)
|
||||||
@@ -54,6 +57,7 @@ type APIHandler struct {
|
|||||||
|
|
||||||
// NewAPIHandler returns an APIHandler
|
// NewAPIHandler returns an APIHandler
|
||||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||||
|
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(signoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
|
||||||
|
|
||||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||||
Reader: opts.DataConnector,
|
Reader: opts.DataConnector,
|
||||||
@@ -71,6 +75,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
|||||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||||
Signoz: signoz,
|
Signoz: signoz,
|
||||||
|
Preference: preference,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -157,7 +162,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
|||||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
|
||||||
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
|
|
||||||
|
|
||||||
// PAT APIs
|
// PAT APIs
|
||||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||||
|
|||||||
@@ -10,9 +10,12 @@ import (
|
|||||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||||
"github.com/SigNoz/signoz/ee/types"
|
"github.com/SigNoz/signoz/ee/types"
|
||||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/http/render"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
@@ -93,7 +96,12 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
req.UpdatedByUserID = user.ID
|
req.UpdatedByUserID = user.ID
|
||||||
id := mux.Vars(r)["id"]
|
idStr := mux.Vars(r)["id"]
|
||||||
|
id, err := valuer.NewUUID(idStr)
|
||||||
|
if err != nil {
|
||||||
|
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||||
|
return
|
||||||
|
}
|
||||||
req.UpdatedAt = time.Now()
|
req.UpdatedAt = time.Now()
|
||||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||||
var apierr basemodel.BaseApiError
|
var apierr basemodel.BaseApiError
|
||||||
@@ -126,7 +134,12 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
id := mux.Vars(r)["id"]
|
idStr := mux.Vars(r)["id"]
|
||||||
|
id, err := valuer.NewUUID(idStr)
|
||||||
|
if err != nil {
|
||||||
|
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||||
|
return
|
||||||
|
}
|
||||||
user, err := auth.GetUserFromReqContext(r.Context())
|
user, err := auth.GetUserFromReqContext(r.Context())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
RespondError(w, &model.ApiError{
|
RespondError(w, &model.ApiError{
|
||||||
@@ -136,7 +149,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info("Revoke PAT with id", zap.String("id", id))
|
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
|
||||||
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
|
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
|
||||||
RespondError(w, apierr, nil)
|
RespondError(w, apierr, nil)
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -88,28 +88,24 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
|||||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
|
|
||||||
)
|
)
|
||||||
case anomaly.SeasonalityDaily:
|
case anomaly.SeasonalityDaily:
|
||||||
provider = anomaly.NewDailyProvider(
|
provider = anomaly.NewDailyProvider(
|
||||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
|
||||||
)
|
)
|
||||||
case anomaly.SeasonalityHourly:
|
case anomaly.SeasonalityHourly:
|
||||||
provider = anomaly.NewHourlyProvider(
|
provider = anomaly.NewHourlyProvider(
|
||||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
|
|
||||||
)
|
)
|
||||||
default:
|
default:
|
||||||
provider = anomaly.NewDailyProvider(
|
provider = anomaly.NewDailyProvider(
|
||||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||||
|
|||||||
@@ -1,33 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/query-service/app/db"
|
|
||||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
|
||||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
|
||||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
|
||||||
"go.uber.org/zap"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
|
||||||
|
|
||||||
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
|
|
||||||
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
|
|
||||||
ah.APIHandler.SearchTraces(w, r)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
|
|
||||||
if ah.HandleError(w, err, http.StatusBadRequest) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
ah.WriteJSON(w, r, result)
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -5,36 +5,33 @@ import (
|
|||||||
|
|
||||||
"github.com/ClickHouse/clickhouse-go/v2"
|
"github.com/ClickHouse/clickhouse-go/v2"
|
||||||
|
|
||||||
"github.com/jmoiron/sqlx"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/cache"
|
"github.com/SigNoz/signoz/pkg/cache"
|
||||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||||
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ClickhouseReader struct {
|
type ClickhouseReader struct {
|
||||||
conn clickhouse.Conn
|
conn clickhouse.Conn
|
||||||
appdb *sqlx.DB
|
appdb sqlstore.SQLStore
|
||||||
*basechr.ClickHouseReader
|
*basechr.ClickHouseReader
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewDataConnector(
|
func NewDataConnector(
|
||||||
localDB *sqlx.DB,
|
sqlDB sqlstore.SQLStore,
|
||||||
telemetryStore telemetrystore.TelemetryStore,
|
telemetryStore telemetrystore.TelemetryStore,
|
||||||
prometheus prometheus.Prometheus,
|
prometheus prometheus.Prometheus,
|
||||||
lm interfaces.FeatureLookup,
|
|
||||||
cluster string,
|
cluster string,
|
||||||
useLogsNewSchema bool,
|
useLogsNewSchema bool,
|
||||||
useTraceNewSchema bool,
|
useTraceNewSchema bool,
|
||||||
fluxIntervalForTraceDetail time.Duration,
|
fluxIntervalForTraceDetail time.Duration,
|
||||||
cache cache.Cache,
|
cache cache.Cache,
|
||||||
) *ClickhouseReader {
|
) *ClickhouseReader {
|
||||||
chReader := basechr.NewReader(localDB, telemetryStore, prometheus, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||||
return &ClickhouseReader{
|
return &ClickhouseReader{
|
||||||
conn: telemetryStore.ClickhouseDB(),
|
conn: telemetryStore.ClickhouseDB(),
|
||||||
appdb: localDB,
|
appdb: sqlDB,
|
||||||
ClickHouseReader: chReader,
|
ClickHouseReader: chReader,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,7 +44,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||||
@@ -116,10 +115,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := preferences.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB()); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -144,10 +139,9 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
reader := db.NewDataConnector(
|
reader := db.NewDataConnector(
|
||||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
serverOptions.SigNoz.SQLStore,
|
||||||
serverOptions.SigNoz.TelemetryStore,
|
serverOptions.SigNoz.TelemetryStore,
|
||||||
serverOptions.SigNoz.Prometheus,
|
serverOptions.SigNoz.Prometheus,
|
||||||
lm,
|
|
||||||
serverOptions.Cluster,
|
serverOptions.Cluster,
|
||||||
serverOptions.UseLogsNewSchema,
|
serverOptions.UseLogsNewSchema,
|
||||||
serverOptions.UseTraceNewSchema,
|
serverOptions.UseTraceNewSchema,
|
||||||
@@ -178,7 +172,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
|||||||
reader,
|
reader,
|
||||||
c,
|
c,
|
||||||
serverOptions.DisableRules,
|
serverOptions.DisableRules,
|
||||||
lm,
|
|
||||||
serverOptions.UseLogsNewSchema,
|
serverOptions.UseLogsNewSchema,
|
||||||
serverOptions.UseTraceNewSchema,
|
serverOptions.UseTraceNewSchema,
|
||||||
serverOptions.SigNoz.Alertmanager,
|
serverOptions.SigNoz.Alertmanager,
|
||||||
@@ -538,7 +531,6 @@ func makeRulesManager(
|
|||||||
ch baseint.Reader,
|
ch baseint.Reader,
|
||||||
cache cache.Cache,
|
cache cache.Cache,
|
||||||
disableRules bool,
|
disableRules bool,
|
||||||
fm baseint.FeatureLookup,
|
|
||||||
useLogsNewSchema bool,
|
useLogsNewSchema bool,
|
||||||
useTraceNewSchema bool,
|
useTraceNewSchema bool,
|
||||||
alertmanager alertmanager.Alertmanager,
|
alertmanager alertmanager.Alertmanager,
|
||||||
@@ -555,7 +547,6 @@ func makeRulesManager(
|
|||||||
Context: context.Background(),
|
Context: context.Background(),
|
||||||
Logger: zap.L(),
|
Logger: zap.L(),
|
||||||
DisableRules: disableRules,
|
DisableRules: disableRules,
|
||||||
FeatureFlags: fm,
|
|
||||||
Reader: ch,
|
Reader: ch,
|
||||||
Cache: cache,
|
Cache: cache,
|
||||||
EvalDelay: baseconst.GetEvalDelay(),
|
EvalDelay: baseconst.GetEvalDelay(),
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/uptrace/bun"
|
"github.com/uptrace/bun"
|
||||||
)
|
)
|
||||||
@@ -36,10 +37,10 @@ type ModelDao interface {
|
|||||||
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
|
||||||
|
|
||||||
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
|
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
|
||||||
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id string) basemodel.BaseApiError
|
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
|
||||||
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||||
GetPATByID(ctx context.Context, orgID string, id string) (*types.GettablePAT, basemodel.BaseApiError)
|
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
|
||||||
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
|
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
|
||||||
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
||||||
RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError
|
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,12 +9,14 @@ import (
|
|||||||
"github.com/SigNoz/signoz/ee/types"
|
"github.com/SigNoz/signoz/ee/types"
|
||||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
|
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
|
||||||
p.StorablePersonalAccessToken.OrgID = orgID
|
p.StorablePersonalAccessToken.OrgID = orgID
|
||||||
|
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
|
||||||
_, err := m.DB().NewInsert().
|
_, err := m.DB().NewInsert().
|
||||||
Model(&p.StorablePersonalAccessToken).
|
Model(&p.StorablePersonalAccessToken).
|
||||||
Exec(ctx)
|
Exec(ctx)
|
||||||
@@ -46,11 +48,11 @@ func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.Gettable
|
|||||||
return p, nil
|
return p, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id string) basemodel.BaseApiError {
|
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
|
||||||
_, err := m.DB().NewUpdate().
|
_, err := m.DB().NewUpdate().
|
||||||
Model(&p.StorablePersonalAccessToken).
|
Model(&p.StorablePersonalAccessToken).
|
||||||
Column("role", "name", "updated_at", "updated_by_user_id").
|
Column("role", "name", "updated_at", "updated_by_user_id").
|
||||||
Where("id = ?", id).
|
Where("id = ?", id.StringValue()).
|
||||||
Where("org_id = ?", orgID).
|
Where("org_id = ?", orgID).
|
||||||
Where("revoked = false").
|
Where("revoked = false").
|
||||||
Exec(ctx)
|
Exec(ctx)
|
||||||
@@ -127,14 +129,14 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
|
|||||||
return patsWithUsers, nil
|
return patsWithUsers, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError {
|
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
|
||||||
updatedAt := time.Now().Unix()
|
updatedAt := time.Now().Unix()
|
||||||
_, err := m.DB().NewUpdate().
|
_, err := m.DB().NewUpdate().
|
||||||
Model(&types.StorablePersonalAccessToken{}).
|
Model(&types.StorablePersonalAccessToken{}).
|
||||||
Set("revoked = ?", true).
|
Set("revoked = ?", true).
|
||||||
Set("updated_by_user_id = ?", userID).
|
Set("updated_by_user_id = ?", userID).
|
||||||
Set("updated_at = ?", updatedAt).
|
Set("updated_at = ?", updatedAt).
|
||||||
Where("id = ?", id).
|
Where("id = ?", id.StringValue()).
|
||||||
Where("org_id = ?", orgID).
|
Where("org_id = ?", orgID).
|
||||||
Exec(ctx)
|
Exec(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -169,12 +171,12 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT
|
|||||||
return &patWithUser, nil
|
return &patWithUser, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*types.GettablePAT, basemodel.BaseApiError) {
|
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
|
||||||
pats := []types.StorablePersonalAccessToken{}
|
pats := []types.StorablePersonalAccessToken{}
|
||||||
|
|
||||||
if err := m.DB().NewSelect().
|
if err := m.DB().NewSelect().
|
||||||
Model(&pats).
|
Model(&pats).
|
||||||
Where("id = ?", id).
|
Where("id = ?", id.StringValue()).
|
||||||
Where("org_id = ?", orgID).
|
Where("org_id = ?", orgID).
|
||||||
Where("revoked = false").
|
Where("revoked = false").
|
||||||
Scan(ctx); err != nil {
|
Scan(ctx); err != nil {
|
||||||
|
|||||||
@@ -157,8 +157,6 @@ func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch planName {
|
switch planName {
|
||||||
case PlanNameTeams:
|
|
||||||
features = append(features, ProPlan...)
|
|
||||||
case PlanNameEnterprise:
|
case PlanNameEnterprise:
|
||||||
features = append(features, EnterprisePlan...)
|
features = append(features, EnterprisePlan...)
|
||||||
case PlanNameBasic:
|
case PlanNameBasic:
|
||||||
|
|||||||
@@ -74,21 +74,21 @@ func TestNewLicenseV3(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Parse the entire license properly",
|
name: "Parse the entire license properly",
|
||||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||||
pass: true,
|
pass: true,
|
||||||
expected: &LicenseV3{
|
expected: &LicenseV3{
|
||||||
ID: "does-not-matter",
|
ID: "does-not-matter",
|
||||||
Key: "does-not-matter-key",
|
Key: "does-not-matter-key",
|
||||||
Data: map[string]interface{}{
|
Data: map[string]interface{}{
|
||||||
"plan": map[string]interface{}{
|
"plan": map[string]interface{}{
|
||||||
"name": "TEAMS",
|
"name": "ENTERPRISE",
|
||||||
},
|
},
|
||||||
"category": "FREE",
|
"category": "FREE",
|
||||||
"status": "ACTIVE",
|
"status": "ACTIVE",
|
||||||
"valid_from": float64(1730899309),
|
"valid_from": float64(1730899309),
|
||||||
"valid_until": float64(-1),
|
"valid_until": float64(-1),
|
||||||
},
|
},
|
||||||
PlanName: PlanNameTeams,
|
PlanName: PlanNameEnterprise,
|
||||||
ValidFrom: 1730899309,
|
ValidFrom: 1730899309,
|
||||||
ValidUntil: -1,
|
ValidUntil: -1,
|
||||||
Status: "ACTIVE",
|
Status: "ACTIVE",
|
||||||
@@ -98,14 +98,14 @@ func TestNewLicenseV3(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Fallback to basic plan if license status is invalid",
|
name: "Fallback to basic plan if license status is invalid",
|
||||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||||
pass: true,
|
pass: true,
|
||||||
expected: &LicenseV3{
|
expected: &LicenseV3{
|
||||||
ID: "does-not-matter",
|
ID: "does-not-matter",
|
||||||
Key: "does-not-matter-key",
|
Key: "does-not-matter-key",
|
||||||
Data: map[string]interface{}{
|
Data: map[string]interface{}{
|
||||||
"plan": map[string]interface{}{
|
"plan": map[string]interface{}{
|
||||||
"name": "TEAMS",
|
"name": "ENTERPRISE",
|
||||||
},
|
},
|
||||||
"category": "FREE",
|
"category": "FREE",
|
||||||
"status": "INVALID",
|
"status": "INVALID",
|
||||||
@@ -122,21 +122,21 @@ func TestNewLicenseV3(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "fallback states for validFrom and validUntil",
|
name: "fallback states for validFrom and validUntil",
|
||||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
|
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||||
pass: true,
|
pass: true,
|
||||||
expected: &LicenseV3{
|
expected: &LicenseV3{
|
||||||
ID: "does-not-matter",
|
ID: "does-not-matter",
|
||||||
Key: "does-not-matter-key",
|
Key: "does-not-matter-key",
|
||||||
Data: map[string]interface{}{
|
Data: map[string]interface{}{
|
||||||
"plan": map[string]interface{}{
|
"plan": map[string]interface{}{
|
||||||
"name": "TEAMS",
|
"name": "ENTERPRISE",
|
||||||
},
|
},
|
||||||
"valid_from": 1234.456,
|
"valid_from": 1234.456,
|
||||||
"valid_until": 5678.567,
|
"valid_until": 5678.567,
|
||||||
"category": "FREE",
|
"category": "FREE",
|
||||||
"status": "ACTIVE",
|
"status": "ACTIVE",
|
||||||
},
|
},
|
||||||
PlanName: PlanNameTeams,
|
PlanName: PlanNameEnterprise,
|
||||||
ValidFrom: 1234,
|
ValidFrom: 1234,
|
||||||
ValidUntil: 5678,
|
ValidUntil: 5678,
|
||||||
Status: "ACTIVE",
|
Status: "ACTIVE",
|
||||||
|
|||||||
@@ -1,30 +1,26 @@
|
|||||||
package model
|
package model
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
|
||||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
const SSO = "SSO"
|
const SSO = "SSO"
|
||||||
const Basic = "BASIC_PLAN"
|
const Basic = "BASIC_PLAN"
|
||||||
const Pro = "PRO_PLAN"
|
|
||||||
const Enterprise = "ENTERPRISE_PLAN"
|
const Enterprise = "ENTERPRISE_PLAN"
|
||||||
|
|
||||||
var (
|
var (
|
||||||
PlanNameEnterprise = "ENTERPRISE"
|
PlanNameEnterprise = "ENTERPRISE"
|
||||||
PlanNameTeams = "TEAMS"
|
|
||||||
PlanNameBasic = "BASIC"
|
PlanNameBasic = "BASIC"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameTeams: Pro, PlanNameEnterprise: Enterprise}
|
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
LicenseStatusInvalid = "INVALID"
|
LicenseStatusInvalid = "INVALID"
|
||||||
)
|
)
|
||||||
|
|
||||||
const DisableUpsell = "DISABLE_UPSELL"
|
|
||||||
const Onboarding = "ONBOARDING"
|
const Onboarding = "ONBOARDING"
|
||||||
const ChatSupport = "CHAT_SUPPORT"
|
const ChatSupport = "CHAT_SUPPORT"
|
||||||
const Gateway = "GATEWAY"
|
const Gateway = "GATEWAY"
|
||||||
@@ -38,90 +34,6 @@ var BasicPlan = basemodel.FeatureSet{
|
|||||||
UsageLimit: -1,
|
UsageLimit: -1,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.OSS,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: DisableUpsell,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.SmartTraceDetail,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.CustomMetricsFunction,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.QueryBuilderPanels,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.QueryBuilderAlerts,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelSlack,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelWebhook,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelPagerduty,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelOpsgenie,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelEmail,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelMsTeams,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
basemodel.Feature{
|
||||||
Name: basemodel.UseSpanMetrics,
|
Name: basemodel.UseSpanMetrics,
|
||||||
Active: false,
|
Active: false,
|
||||||
@@ -150,142 +62,6 @@ var BasicPlan = basemodel.FeatureSet{
|
|||||||
UsageLimit: -1,
|
UsageLimit: -1,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.HostsInfraMonitoring,
|
|
||||||
Active: constants.EnableHostsInfraMonitoring(),
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.TraceFunnels,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
var ProPlan = basemodel.FeatureSet{
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: SSO,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.OSS,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.SmartTraceDetail,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.CustomMetricsFunction,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.QueryBuilderPanels,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.QueryBuilderAlerts,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelSlack,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelWebhook,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelPagerduty,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelOpsgenie,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelEmail,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelMsTeams,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.UseSpanMetrics,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: Gateway,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: PremiumSupport,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AnomalyDetection,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.HostsInfraMonitoring,
|
|
||||||
Active: constants.EnableHostsInfraMonitoring(),
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
basemodel.Feature{
|
||||||
Name: basemodel.TraceFunnels,
|
Name: basemodel.TraceFunnels,
|
||||||
Active: false,
|
Active: false,
|
||||||
@@ -303,83 +79,6 @@ var EnterprisePlan = basemodel.FeatureSet{
|
|||||||
UsageLimit: -1,
|
UsageLimit: -1,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.OSS,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.SmartTraceDetail,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.CustomMetricsFunction,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.QueryBuilderPanels,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.QueryBuilderAlerts,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelSlack,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelWebhook,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelPagerduty,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelOpsgenie,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelEmail,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.AlertChannelMsTeams,
|
|
||||||
Active: true,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
basemodel.Feature{
|
||||||
Name: basemodel.UseSpanMetrics,
|
Name: basemodel.UseSpanMetrics,
|
||||||
Active: false,
|
Active: false,
|
||||||
@@ -422,13 +121,6 @@ var EnterprisePlan = basemodel.FeatureSet{
|
|||||||
UsageLimit: -1,
|
UsageLimit: -1,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.HostsInfraMonitoring,
|
|
||||||
Active: constants.EnableHostsInfraMonitoring(),
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
basemodel.Feature{
|
basemodel.Feature{
|
||||||
Name: basemodel.TraceFunnels,
|
Name: basemodel.TraceFunnels,
|
||||||
Active: false,
|
Active: false,
|
||||||
|
|||||||
@@ -53,7 +53,6 @@ type AnomalyRule struct {
|
|||||||
func NewAnomalyRule(
|
func NewAnomalyRule(
|
||||||
id string,
|
id string,
|
||||||
p *baserules.PostableRule,
|
p *baserules.PostableRule,
|
||||||
featureFlags interfaces.FeatureLookup,
|
|
||||||
reader interfaces.Reader,
|
reader interfaces.Reader,
|
||||||
cache cache.Cache,
|
cache cache.Cache,
|
||||||
opts ...baserules.RuleOption,
|
opts ...baserules.RuleOption,
|
||||||
@@ -89,10 +88,9 @@ func NewAnomalyRule(
|
|||||||
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
|
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
|
||||||
|
|
||||||
querierOptsV2 := querierV2.QuerierOptions{
|
querierOptsV2 := querierV2.QuerierOptions{
|
||||||
Reader: reader,
|
Reader: reader,
|
||||||
Cache: cache,
|
Cache: cache,
|
||||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||||
FeatureLookup: featureFlags,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||||
@@ -102,21 +100,18 @@ func NewAnomalyRule(
|
|||||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
|
|
||||||
)
|
)
|
||||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||||
t.provider = anomaly.NewDailyProvider(
|
t.provider = anomaly.NewDailyProvider(
|
||||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
|
|
||||||
)
|
)
|
||||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||||
t.provider = anomaly.NewWeeklyProvider(
|
t.provider = anomaly.NewWeeklyProvider(
|
||||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
return &t, nil
|
return &t, nil
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
|||||||
tr, err := baserules.NewThresholdRule(
|
tr, err := baserules.NewThresholdRule(
|
||||||
ruleId,
|
ruleId,
|
||||||
opts.Rule,
|
opts.Rule,
|
||||||
opts.FF,
|
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.UseLogsNewSchema,
|
opts.UseLogsNewSchema,
|
||||||
opts.UseTraceNewSchema,
|
opts.UseTraceNewSchema,
|
||||||
@@ -66,7 +65,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
|||||||
ar, err := NewAnomalyRule(
|
ar, err := NewAnomalyRule(
|
||||||
ruleId,
|
ruleId,
|
||||||
opts.Rule,
|
opts.Rule,
|
||||||
opts.FF,
|
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.Cache,
|
opts.Cache,
|
||||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||||
@@ -123,7 +121,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
|||||||
rule, err = baserules.NewThresholdRule(
|
rule, err = baserules.NewThresholdRule(
|
||||||
alertname,
|
alertname,
|
||||||
parsedRule,
|
parsedRule,
|
||||||
opts.FF,
|
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.UseLogsNewSchema,
|
opts.UseLogsNewSchema,
|
||||||
opts.UseTraceNewSchema,
|
opts.UseTraceNewSchema,
|
||||||
@@ -160,7 +157,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
|||||||
rule, err = NewAnomalyRule(
|
rule, err = NewAnomalyRule(
|
||||||
alertname,
|
alertname,
|
||||||
parsedRule,
|
parsedRule,
|
||||||
opts.FF,
|
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.Cache,
|
opts.Cache,
|
||||||
baserules.WithSendAlways(),
|
baserules.WithSendAlways(),
|
||||||
|
|||||||
@@ -4,10 +4,28 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"slices"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/uptrace/bun"
|
"github.com/uptrace/bun"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
Identity = "id"
|
||||||
|
Integer = "bigint"
|
||||||
|
Text = "text"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
Org = "org"
|
||||||
|
User = "user"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||||
|
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||||
|
)
|
||||||
|
|
||||||
type dialect struct {
|
type dialect struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -175,7 +193,10 @@ func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table inte
|
|||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, references []string, cb func(context.Context) error) error {
|
||||||
|
if len(references) == 0 {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||||
|
}
|
||||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -184,12 +205,25 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = bun.
|
var fkReferences []string
|
||||||
|
for _, reference := range references {
|
||||||
|
if reference == Org && !slices.Contains(fkReferences, OrgReference) {
|
||||||
|
fkReferences = append(fkReferences, OrgReference)
|
||||||
|
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||||
|
fkReferences = append(fkReferences, UserReference)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
createTable := bun.
|
||||||
NewCreateTable().
|
NewCreateTable().
|
||||||
IfNotExists().
|
IfNotExists().
|
||||||
Model(newModel).
|
Model(newModel)
|
||||||
Exec(ctx)
|
|
||||||
|
|
||||||
|
for _, fk := range fkReferences {
|
||||||
|
createTable = createTable.ForeignKey(fk)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = createTable.Exec(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -218,3 +252,115 @@ func (dialect *dialect) AddNotNullDefaultToColumn(ctx context.Context, bun bun.I
|
|||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||||
|
if reference == "" {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||||
|
}
|
||||||
|
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||||
|
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||||
|
|
||||||
|
columnType, err := dialect.GetColumnType(ctx, bun, oldTableName, Identity)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if columnType == Text {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fkReference := ""
|
||||||
|
if reference == Org {
|
||||||
|
fkReference = OrgReference
|
||||||
|
} else if reference == User {
|
||||||
|
fkReference = UserReference
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = bun.
|
||||||
|
NewCreateTable().
|
||||||
|
IfNotExists().
|
||||||
|
Model(newModel).
|
||||||
|
ForeignKey(fkReference).
|
||||||
|
Exec(ctx)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = cb(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = bun.
|
||||||
|
NewDropTable().
|
||||||
|
IfExists().
|
||||||
|
Model(oldModel).
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = bun.
|
||||||
|
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||||
|
if reference == "" {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||||
|
}
|
||||||
|
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||||
|
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||||
|
|
||||||
|
identityExists, err := dialect.ColumnExists(ctx, bun, oldTableName, Identity)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if identityExists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fkReference := ""
|
||||||
|
if reference == Org {
|
||||||
|
fkReference = OrgReference
|
||||||
|
} else if reference == User {
|
||||||
|
fkReference = UserReference
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = bun.
|
||||||
|
NewCreateTable().
|
||||||
|
IfNotExists().
|
||||||
|
Model(newModel).
|
||||||
|
ForeignKey(fkReference).
|
||||||
|
Exec(ctx)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = cb(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = bun.
|
||||||
|
NewDropTable().
|
||||||
|
IfExists().
|
||||||
|
Model(oldModel).
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = bun.
|
||||||
|
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/types"
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/uptrace/bun"
|
"github.com/uptrace/bun"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -28,11 +29,10 @@ func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type StorablePersonalAccessToken struct {
|
type StorablePersonalAccessToken struct {
|
||||||
bun.BaseModel `bun:"table:personal_access_tokens"`
|
bun.BaseModel `bun:"table:personal_access_token"`
|
||||||
|
types.Identifiable
|
||||||
types.TimeAuditable
|
types.TimeAuditable
|
||||||
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
|
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
|
||||||
ID int `json:"id" bun:"id,pk,autoincrement"`
|
|
||||||
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
|
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
|
||||||
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
|
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
|
||||||
Token string `json:"token" bun:"token,type:text,notnull,unique"`
|
Token string `json:"token" bun:"token,type:text,notnull,unique"`
|
||||||
@@ -69,5 +69,8 @@ func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64)
|
|||||||
CreatedAt: now,
|
CreatedAt: now,
|
||||||
UpdatedAt: now,
|
UpdatedAt: now,
|
||||||
},
|
},
|
||||||
|
Identifiable: types.Identifiable{
|
||||||
|
ID: valuer.GenerateUUID(),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,6 +18,13 @@
|
|||||||
"field_send_resolved": "Send resolved alerts",
|
"field_send_resolved": "Send resolved alerts",
|
||||||
"field_channel_type": "Type",
|
"field_channel_type": "Type",
|
||||||
"field_webhook_url": "Webhook URL",
|
"field_webhook_url": "Webhook URL",
|
||||||
|
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
|
||||||
|
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
|
||||||
|
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
|
||||||
|
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
|
||||||
|
"tooltip_email_to": "Enter email addresses separated by commas.",
|
||||||
|
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
|
||||||
|
|
||||||
"field_slack_recipient": "Recipient",
|
"field_slack_recipient": "Recipient",
|
||||||
"field_slack_title": "Title",
|
"field_slack_title": "Title",
|
||||||
"field_slack_description": "Description",
|
"field_slack_description": "Description",
|
||||||
|
|||||||
@@ -18,6 +18,12 @@
|
|||||||
"field_send_resolved": "Send resolved alerts",
|
"field_send_resolved": "Send resolved alerts",
|
||||||
"field_channel_type": "Type",
|
"field_channel_type": "Type",
|
||||||
"field_webhook_url": "Webhook URL",
|
"field_webhook_url": "Webhook URL",
|
||||||
|
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
|
||||||
|
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
|
||||||
|
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
|
||||||
|
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
|
||||||
|
"tooltip_email_to": "Enter email addresses separated by commas.",
|
||||||
|
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
|
||||||
"field_slack_recipient": "Recipient",
|
"field_slack_recipient": "Recipient",
|
||||||
"field_slack_title": "Title",
|
"field_slack_title": "Title",
|
||||||
"field_slack_description": "Description",
|
"field_slack_description": "Description",
|
||||||
|
|||||||
@@ -1,29 +1,12 @@
|
|||||||
// keep this consistent with backend constants.go
|
// keep this consistent with backend constants.go
|
||||||
export enum FeatureKeys {
|
export enum FeatureKeys {
|
||||||
SSO = 'SSO',
|
SSO = 'SSO',
|
||||||
ENTERPRISE_PLAN = 'ENTERPRISE_PLAN',
|
|
||||||
BASIC_PLAN = 'BASIC_PLAN',
|
|
||||||
ALERT_CHANNEL_SLACK = 'ALERT_CHANNEL_SLACK',
|
|
||||||
ALERT_CHANNEL_WEBHOOK = 'ALERT_CHANNEL_WEBHOOK',
|
|
||||||
ALERT_CHANNEL_PAGERDUTY = 'ALERT_CHANNEL_PAGERDUTY',
|
|
||||||
ALERT_CHANNEL_OPSGENIE = 'ALERT_CHANNEL_OPSGENIE',
|
|
||||||
ALERT_CHANNEL_MSTEAMS = 'ALERT_CHANNEL_MSTEAMS',
|
|
||||||
DurationSort = 'DurationSort',
|
|
||||||
TimestampSort = 'TimestampSort',
|
|
||||||
SMART_TRACE_DETAIL = 'SMART_TRACE_DETAIL',
|
|
||||||
CUSTOM_METRICS_FUNCTION = 'CUSTOM_METRICS_FUNCTION',
|
|
||||||
QUERY_BUILDER_PANELS = 'QUERY_BUILDER_PANELS',
|
|
||||||
QUERY_BUILDER_ALERTS = 'QUERY_BUILDER_ALERTS',
|
|
||||||
DISABLE_UPSELL = 'DISABLE_UPSELL',
|
|
||||||
USE_SPAN_METRICS = 'USE_SPAN_METRICS',
|
USE_SPAN_METRICS = 'USE_SPAN_METRICS',
|
||||||
OSS = 'OSS',
|
|
||||||
ONBOARDING = 'ONBOARDING',
|
ONBOARDING = 'ONBOARDING',
|
||||||
CHAT_SUPPORT = 'CHAT_SUPPORT',
|
CHAT_SUPPORT = 'CHAT_SUPPORT',
|
||||||
GATEWAY = 'GATEWAY',
|
GATEWAY = 'GATEWAY',
|
||||||
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
|
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
|
||||||
QUERY_BUILDER_SEARCH_V2 = 'QUERY_BUILDER_SEARCH_V2',
|
|
||||||
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
||||||
AWS_INTEGRATION = 'AWS_INTEGRATION',
|
|
||||||
ONBOARDING_V3 = 'ONBOARDING_V3',
|
ONBOARDING_V3 = 'ONBOARDING_V3',
|
||||||
THIRD_PARTY_API = 'THIRD_PARTY_API',
|
THIRD_PARTY_API = 'THIRD_PARTY_API',
|
||||||
TRACE_FUNNELS = 'TRACE_FUNNELS',
|
TRACE_FUNNELS = 'TRACE_FUNNELS',
|
||||||
|
|||||||
@@ -31,6 +31,10 @@ jest.mock('hooks/useNotifications', () => ({
|
|||||||
})),
|
})),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||||
|
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||||
|
}));
|
||||||
|
|
||||||
describe('Create Alert Channel', () => {
|
describe('Create Alert Channel', () => {
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
|
|||||||
@@ -18,6 +18,10 @@ import { render, screen } from 'tests/test-utils';
|
|||||||
|
|
||||||
import { testLabelInputAndHelpValue } from './testUtils';
|
import { testLabelInputAndHelpValue } from './testUtils';
|
||||||
|
|
||||||
|
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||||
|
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||||
|
}));
|
||||||
|
|
||||||
describe('Create Alert Channel (Normal User)', () => {
|
describe('Create Alert Channel (Normal User)', () => {
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
|
|||||||
@@ -20,6 +20,10 @@ jest.mock('hooks/useNotifications', () => ({
|
|||||||
})),
|
})),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
|
||||||
|
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
|
||||||
|
}));
|
||||||
|
|
||||||
describe('Should check if the edit alert channel is properly displayed ', () => {
|
describe('Should check if the edit alert channel is properly displayed ', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
render(<EditAlertChannels initialValue={editAlertChannelInitialValue} />);
|
render(<EditAlertChannels initialValue={editAlertChannelInitialValue} />);
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { Form, Input } from 'antd';
|
import { Form, Input } from 'antd';
|
||||||
|
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
@@ -9,7 +10,20 @@ function MsTeams({ setSelectedConfig }: MsTeamsProps): JSX.Element {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Form.Item name="webhook_url" label={t('field_webhook_url')}>
|
<Form.Item
|
||||||
|
name="webhook_url"
|
||||||
|
label={t('field_webhook_url')}
|
||||||
|
tooltip={{
|
||||||
|
title: (
|
||||||
|
<MarkdownRenderer
|
||||||
|
markdownContent={t('tooltip_ms_teams_url')}
|
||||||
|
variables={{}}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
overlayInnerStyle: { maxWidth: 400 },
|
||||||
|
placement: 'right',
|
||||||
|
}}
|
||||||
|
>
|
||||||
<Input
|
<Input
|
||||||
onChange={(event): void => {
|
onChange={(event): void => {
|
||||||
setSelectedConfig((value) => ({
|
setSelectedConfig((value) => ({
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { Form, Input } from 'antd';
|
import { Form, Input } from 'antd';
|
||||||
|
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
import { OpsgenieChannel } from '../../CreateAlertChannels/config';
|
import { OpsgenieChannel } from '../../CreateAlertChannels/config';
|
||||||
@@ -19,7 +20,21 @@ function OpsgenieForm({ setSelectedConfig }: OpsgenieFormProps): JSX.Element {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Form.Item name="api_key" label={t('field_opsgenie_api_key')} required>
|
<Form.Item
|
||||||
|
name="api_key"
|
||||||
|
label={t('field_opsgenie_api_key')}
|
||||||
|
tooltip={{
|
||||||
|
title: (
|
||||||
|
<MarkdownRenderer
|
||||||
|
markdownContent={t('tooltip_opsgenie_api_key')}
|
||||||
|
variables={{}}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
overlayInnerStyle: { maxWidth: 400 },
|
||||||
|
placement: 'right',
|
||||||
|
}}
|
||||||
|
required
|
||||||
|
>
|
||||||
<Input
|
<Input
|
||||||
onChange={handleInputChange('api_key')}
|
onChange={handleInputChange('api_key')}
|
||||||
data-testid="opsgenie-api-key-textbox"
|
data-testid="opsgenie-api-key-textbox"
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { Form, Input } from 'antd';
|
import { Form, Input } from 'antd';
|
||||||
|
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||||
import { Dispatch, SetStateAction } from 'react';
|
import { Dispatch, SetStateAction } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
@@ -10,7 +11,20 @@ function PagerForm({ setSelectedConfig }: PagerFormProps): JSX.Element {
|
|||||||
const { t } = useTranslation('channels');
|
const { t } = useTranslation('channels');
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Form.Item name="routing_key" label={t('field_pager_routing_key')} required>
|
<Form.Item
|
||||||
|
name="routing_key"
|
||||||
|
label={t('field_pager_routing_key')}
|
||||||
|
tooltip={{
|
||||||
|
title: (
|
||||||
|
<MarkdownRenderer
|
||||||
|
markdownContent={t('tooltip_pager_routing_key')}
|
||||||
|
variables={{}}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
overlayInnerStyle: { maxWidth: 400 },
|
||||||
|
placement: 'right',
|
||||||
|
}}
|
||||||
|
>
|
||||||
<Input
|
<Input
|
||||||
onChange={(event): void => {
|
onChange={(event): void => {
|
||||||
setSelectedConfig((value) => ({
|
setSelectedConfig((value) => ({
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { Form, Input } from 'antd';
|
import { Form, Input } from 'antd';
|
||||||
|
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||||
import { Dispatch, SetStateAction } from 'react';
|
import { Dispatch, SetStateAction } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
@@ -11,7 +12,20 @@ function Slack({ setSelectedConfig }: SlackProps): JSX.Element {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Form.Item name="api_url" label={t('field_webhook_url')}>
|
<Form.Item
|
||||||
|
name="api_url"
|
||||||
|
label={t('field_webhook_url')}
|
||||||
|
tooltip={{
|
||||||
|
title: (
|
||||||
|
<MarkdownRenderer
|
||||||
|
markdownContent={t('tooltip_slack_url')}
|
||||||
|
variables={{}}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
overlayInnerStyle: { maxWidth: 400 },
|
||||||
|
placement: 'right',
|
||||||
|
}}
|
||||||
|
>
|
||||||
<Input
|
<Input
|
||||||
onChange={(event): void => {
|
onChange={(event): void => {
|
||||||
setSelectedConfig((value) => ({
|
setSelectedConfig((value) => ({
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { Form, Input } from 'antd';
|
import { Form, Input } from 'antd';
|
||||||
|
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||||
import { Dispatch, SetStateAction } from 'react';
|
import { Dispatch, SetStateAction } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
@@ -9,7 +10,20 @@ function WebhookSettings({ setSelectedConfig }: WebhookProps): JSX.Element {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Form.Item name="api_url" label={t('field_webhook_url')}>
|
<Form.Item
|
||||||
|
name="api_url"
|
||||||
|
label={t('field_webhook_url')}
|
||||||
|
tooltip={{
|
||||||
|
title: (
|
||||||
|
<MarkdownRenderer
|
||||||
|
markdownContent={t('tooltip_webhook_url')}
|
||||||
|
variables={{}}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
overlayInnerStyle: { maxWidth: 400 },
|
||||||
|
placement: 'right',
|
||||||
|
}}
|
||||||
|
>
|
||||||
<Input
|
<Input
|
||||||
onChange={(event): void => {
|
onChange={(event): void => {
|
||||||
setSelectedConfig((value) => ({
|
setSelectedConfig((value) => ({
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { Form, FormInstance, Input, Select, Switch, Typography } from 'antd';
|
import { Form, FormInstance, Input, Select, Switch, Typography } from 'antd';
|
||||||
import { Store } from 'antd/lib/form/interface';
|
import { Store } from 'antd/lib/form/interface';
|
||||||
import { FeatureKeys } from 'constants/features';
|
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import {
|
import {
|
||||||
ChannelType,
|
ChannelType,
|
||||||
@@ -11,11 +10,8 @@ import {
|
|||||||
WebhookChannel,
|
WebhookChannel,
|
||||||
} from 'container/CreateAlertChannels/config';
|
} from 'container/CreateAlertChannels/config';
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { useAppContext } from 'providers/App/App';
|
|
||||||
import { Dispatch, ReactElement, SetStateAction } from 'react';
|
import { Dispatch, ReactElement, SetStateAction } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { FeatureFlagProps } from 'types/api/features/getFeaturesFlags';
|
|
||||||
import { isFeatureKeys } from 'utils/app';
|
|
||||||
|
|
||||||
import EmailSettings from './Settings/Email';
|
import EmailSettings from './Settings/Email';
|
||||||
import MsTeamsSettings from './Settings/MsTeams';
|
import MsTeamsSettings from './Settings/MsTeams';
|
||||||
@@ -39,17 +35,6 @@ function FormAlertChannels({
|
|||||||
editing = false,
|
editing = false,
|
||||||
}: FormAlertChannelsProps): JSX.Element {
|
}: FormAlertChannelsProps): JSX.Element {
|
||||||
const { t } = useTranslation('channels');
|
const { t } = useTranslation('channels');
|
||||||
const { featureFlags } = useAppContext();
|
|
||||||
|
|
||||||
const feature = `ALERT_CHANNEL_${type.toUpperCase()}`;
|
|
||||||
|
|
||||||
const featureKey = isFeatureKeys(feature)
|
|
||||||
? feature
|
|
||||||
: FeatureKeys.ALERT_CHANNEL_SLACK;
|
|
||||||
|
|
||||||
const hasFeature = featureFlags?.find(
|
|
||||||
(flag: FeatureFlagProps) => flag.name === featureKey,
|
|
||||||
);
|
|
||||||
|
|
||||||
const renderSettings = (): ReactElement | null => {
|
const renderSettings = (): ReactElement | null => {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
@@ -146,7 +131,7 @@ function FormAlertChannels({
|
|||||||
|
|
||||||
<Form.Item>
|
<Form.Item>
|
||||||
<Button
|
<Button
|
||||||
disabled={savingState || !hasFeature}
|
disabled={savingState}
|
||||||
loading={savingState}
|
loading={savingState}
|
||||||
type="primary"
|
type="primary"
|
||||||
onClick={(): void => onSaveHandler(type)}
|
onClick={(): void => onSaveHandler(type)}
|
||||||
@@ -154,7 +139,7 @@ function FormAlertChannels({
|
|||||||
{t('button_save_channel')}
|
{t('button_save_channel')}
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
<Button
|
||||||
disabled={testingState || !hasFeature}
|
disabled={testingState}
|
||||||
loading={testingState}
|
loading={testingState}
|
||||||
onClick={(): void => onTestHandler(type)}
|
onClick={(): void => onTestHandler(type)}
|
||||||
>
|
>
|
||||||
|
|||||||
@@ -467,10 +467,6 @@ function FormAlertRules({
|
|||||||
panelType,
|
panelType,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const isAlertAvailable =
|
|
||||||
!featureFlags?.find((flag) => flag.name === FeatureKeys.QUERY_BUILDER_ALERTS)
|
|
||||||
?.active || false;
|
|
||||||
|
|
||||||
const saveRule = useCallback(async () => {
|
const saveRule = useCallback(async () => {
|
||||||
if (!isFormValid()) {
|
if (!isFormValid()) {
|
||||||
return;
|
return;
|
||||||
@@ -688,11 +684,6 @@ function FormAlertRules({
|
|||||||
|
|
||||||
const isAlertNameMissing = !formInstance.getFieldValue('alert');
|
const isAlertNameMissing = !formInstance.getFieldValue('alert');
|
||||||
|
|
||||||
const isAlertAvailableToSave =
|
|
||||||
isAlertAvailable &&
|
|
||||||
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
|
||||||
alertType !== AlertTypes.METRICS_BASED_ALERT;
|
|
||||||
|
|
||||||
const onUnitChangeHandler = (value: string): void => {
|
const onUnitChangeHandler = (value: string): void => {
|
||||||
setYAxisUnit(value);
|
setYAxisUnit(value);
|
||||||
// reset target unit
|
// reset target unit
|
||||||
@@ -865,7 +856,6 @@ function FormAlertRules({
|
|||||||
icon={<SaveOutlined />}
|
icon={<SaveOutlined />}
|
||||||
disabled={
|
disabled={
|
||||||
isAlertNameMissing ||
|
isAlertNameMissing ||
|
||||||
isAlertAvailableToSave ||
|
|
||||||
!isChannelConfigurationValid ||
|
!isChannelConfigurationValid ||
|
||||||
queryStatus === 'error'
|
queryStatus === 'error'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import { WarningOutlined } from '@ant-design/icons';
|
|||||||
import { Button, Flex, Modal, Space, Typography } from 'antd';
|
import { Button, Flex, Modal, Space, Typography } from 'antd';
|
||||||
import logEvent from 'api/common/logEvent';
|
import logEvent from 'api/common/logEvent';
|
||||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||||
import { FeatureKeys } from 'constants/features';
|
|
||||||
import { QueryParams } from 'constants/query';
|
import { QueryParams } from 'constants/query';
|
||||||
import {
|
import {
|
||||||
initialQueriesMap,
|
initialQueriesMap,
|
||||||
@@ -27,7 +26,6 @@ import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
|||||||
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
|
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
|
||||||
import { Check, X } from 'lucide-react';
|
import { Check, X } from 'lucide-react';
|
||||||
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';
|
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';
|
||||||
import { useAppContext } from 'providers/App/App';
|
|
||||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||||
import {
|
import {
|
||||||
getNextWidgets,
|
getNextWidgets,
|
||||||
@@ -79,8 +77,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
|||||||
|
|
||||||
const { t } = useTranslation(['dashboard']);
|
const { t } = useTranslation(['dashboard']);
|
||||||
|
|
||||||
const { featureFlags } = useAppContext();
|
|
||||||
|
|
||||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||||
|
|
||||||
const {
|
const {
|
||||||
@@ -566,12 +562,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const isQueryBuilderActive =
|
|
||||||
!featureFlags?.find((flag) => flag.name === FeatureKeys.QUERY_BUILDER_PANELS)
|
|
||||||
?.active || false;
|
|
||||||
|
|
||||||
const isNewTraceLogsAvailable =
|
const isNewTraceLogsAvailable =
|
||||||
isQueryBuilderActive &&
|
|
||||||
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
||||||
currentQuery.builder.queryData.find(
|
currentQuery.builder.queryData.find(
|
||||||
(query) => query.dataSource !== DataSource.METRICS,
|
(query) => query.dataSource !== DataSource.METRICS,
|
||||||
|
|||||||
@@ -15,11 +15,12 @@ import {
|
|||||||
import logEvent from 'api/common/logEvent';
|
import logEvent from 'api/common/logEvent';
|
||||||
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
|
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
|
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { isEmpty } from 'lodash-es';
|
import { isEmpty } from 'lodash-es';
|
||||||
import { ArrowRight, X } from 'lucide-react';
|
import { CheckIcon, Goal, UserPlus, X } from 'lucide-react';
|
||||||
import { useAppContext } from 'providers/App/App';
|
import { useAppContext } from 'providers/App/App';
|
||||||
import React, { useEffect, useRef, useState } from 'react';
|
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||||
|
|
||||||
import OnboardingIngestionDetails from '../IngestionDetails/IngestionDetails';
|
import OnboardingIngestionDetails from '../IngestionDetails/IngestionDetails';
|
||||||
import InviteTeamMembers from '../InviteTeamMembers/InviteTeamMembers';
|
import InviteTeamMembers from '../InviteTeamMembers/InviteTeamMembers';
|
||||||
@@ -68,6 +69,7 @@ interface Entity {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
tags: string[];
|
tags: string[];
|
||||||
|
relatedSearchKeywords?: string[];
|
||||||
link?: string;
|
link?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -99,8 +101,11 @@ const ONBOARDING_V3_ANALYTICS_EVENTS_MAP = {
|
|||||||
GET_EXPERT_ASSISTANCE_BUTTON_CLICKED: 'Get expert assistance clicked',
|
GET_EXPERT_ASSISTANCE_BUTTON_CLICKED: 'Get expert assistance clicked',
|
||||||
INVITE_TEAM_MEMBER_BUTTON_CLICKED: 'Invite team member clicked',
|
INVITE_TEAM_MEMBER_BUTTON_CLICKED: 'Invite team member clicked',
|
||||||
CLOSE_ONBOARDING_CLICKED: 'Close onboarding clicked',
|
CLOSE_ONBOARDING_CLICKED: 'Close onboarding clicked',
|
||||||
|
DATA_SOURCE_REQUESTED: 'Datasource requested',
|
||||||
|
DATA_SOURCE_SEARCHED: 'Searched',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||||
function OnboardingAddDataSource(): JSX.Element {
|
function OnboardingAddDataSource(): JSX.Element {
|
||||||
const [groupedDataSources, setGroupedDataSources] = useState<{
|
const [groupedDataSources, setGroupedDataSources] = useState<{
|
||||||
[tag: string]: Entity[];
|
[tag: string]: Entity[];
|
||||||
@@ -110,6 +115,8 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
|
|
||||||
const [setupStepItems, setSetupStepItems] = useState(setupStepItemsBase);
|
const [setupStepItems, setSetupStepItems] = useState(setupStepItemsBase);
|
||||||
|
|
||||||
|
const [searchQuery, setSearchQuery] = useState<string>('');
|
||||||
|
|
||||||
const question2Ref = useRef<HTMLDivElement | null>(null);
|
const question2Ref = useRef<HTMLDivElement | null>(null);
|
||||||
const question3Ref = useRef<HTMLDivElement | null>(null);
|
const question3Ref = useRef<HTMLDivElement | null>(null);
|
||||||
const configureProdRef = useRef<HTMLDivElement | null>(null);
|
const configureProdRef = useRef<HTMLDivElement | null>(null);
|
||||||
@@ -120,8 +127,15 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
|
|
||||||
const [currentStep, setCurrentStep] = useState(1);
|
const [currentStep, setCurrentStep] = useState(1);
|
||||||
|
|
||||||
|
const [dataSourceRequest, setDataSourceRequest] = useState<string>('');
|
||||||
|
|
||||||
const [hasMoreQuestions, setHasMoreQuestions] = useState<boolean>(true);
|
const [hasMoreQuestions, setHasMoreQuestions] = useState<boolean>(true);
|
||||||
|
|
||||||
|
const [
|
||||||
|
showRequestDataSourceModal,
|
||||||
|
setShowRequestDataSourceModal,
|
||||||
|
] = useState<boolean>(false);
|
||||||
|
|
||||||
const [
|
const [
|
||||||
showInviteTeamMembersModal,
|
showInviteTeamMembersModal,
|
||||||
setShowInviteTeamMembersModal,
|
setShowInviteTeamMembersModal,
|
||||||
@@ -145,6 +159,11 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
|
|
||||||
const [selectedCategory, setSelectedCategory] = useState<string>('All');
|
const [selectedCategory, setSelectedCategory] = useState<string>('All');
|
||||||
|
|
||||||
|
const [
|
||||||
|
dataSourceRequestSubmitted,
|
||||||
|
setDataSourceRequestSubmitted,
|
||||||
|
] = useState<boolean>(false);
|
||||||
|
|
||||||
const handleScrollToStep = (ref: React.RefObject<HTMLDivElement>): void => {
|
const handleScrollToStep = (ref: React.RefObject<HTMLDivElement>): void => {
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
ref.current?.scrollIntoView({
|
ref.current?.scrollIntoView({
|
||||||
@@ -286,8 +305,10 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
setGroupedDataSources(groupedDataSources);
|
setGroupedDataSources(groupedDataSources);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const handleSearch = (e: React.ChangeEvent<HTMLInputElement>): void => {
|
const debouncedUpdate = useDebouncedFn((query) => {
|
||||||
const query = e.target.value.toLowerCase();
|
setSearchQuery(query as string);
|
||||||
|
|
||||||
|
setDataSourceRequestSubmitted(false);
|
||||||
|
|
||||||
if (query === '') {
|
if (query === '') {
|
||||||
setGroupedDataSources(
|
setGroupedDataSources(
|
||||||
@@ -298,15 +319,35 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
|
|
||||||
const filteredDataSources = onboardingConfigWithLinks.filter(
|
const filteredDataSources = onboardingConfigWithLinks.filter(
|
||||||
(dataSource) =>
|
(dataSource) =>
|
||||||
dataSource.label.toLowerCase().includes(query) ||
|
dataSource.label.toLowerCase().includes(query as string) ||
|
||||||
dataSource.tags.some((tag) => tag.toLowerCase().includes(query)),
|
dataSource.tags.some((tag) =>
|
||||||
|
tag.toLowerCase().includes(query as string),
|
||||||
|
) ||
|
||||||
|
dataSource.relatedSearchKeywords?.some((keyword) =>
|
||||||
|
keyword?.toLowerCase().includes(query as string),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
setGroupedDataSources(
|
setGroupedDataSources(
|
||||||
groupDataSourcesByTags(filteredDataSources as Entity[]),
|
groupDataSourcesByTags(filteredDataSources as Entity[]),
|
||||||
);
|
);
|
||||||
};
|
|
||||||
|
|
||||||
|
logEvent(
|
||||||
|
`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.DATA_SOURCE_SEARCHED}`,
|
||||||
|
{
|
||||||
|
searchedDataSource: query,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}, 300);
|
||||||
|
|
||||||
|
const handleSearch = useCallback(
|
||||||
|
(e: React.ChangeEvent<HTMLInputElement>): void => {
|
||||||
|
const query = e.target.value.trim().toLowerCase();
|
||||||
|
|
||||||
|
debouncedUpdate(query || '');
|
||||||
|
},
|
||||||
|
[debouncedUpdate],
|
||||||
|
);
|
||||||
const handleFilterByCategory = (category: string): void => {
|
const handleFilterByCategory = (category: string): void => {
|
||||||
setSelectedDataSource(null);
|
setSelectedDataSource(null);
|
||||||
setSelectedFramework(null);
|
setSelectedFramework(null);
|
||||||
@@ -409,6 +450,129 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
setShowInviteTeamMembersModal(true);
|
setShowInviteTeamMembersModal(true);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleSubmitDataSourceRequest = (): void => {
|
||||||
|
logEvent(
|
||||||
|
`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.DATA_SOURCE_REQUESTED}`,
|
||||||
|
{
|
||||||
|
requestedDataSource: dataSourceRequest,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
setShowRequestDataSourceModal(false);
|
||||||
|
setDataSourceRequestSubmitted(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleRequestDataSource = (): void => {
|
||||||
|
setShowRequestDataSourceModal(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleRaiseRequest = (): void => {
|
||||||
|
logEvent(
|
||||||
|
`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.DATA_SOURCE_REQUESTED}`,
|
||||||
|
{
|
||||||
|
requestedDataSource: searchQuery,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
setDataSourceRequestSubmitted(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const renderRequestDataSource = (): JSX.Element => {
|
||||||
|
const isSearchQueryEmpty = searchQuery.length === 0;
|
||||||
|
const isNoResultsFound = Object.keys(groupedDataSources).length === 0;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="request-data-source-container">
|
||||||
|
{!isNoResultsFound && (
|
||||||
|
<>
|
||||||
|
<Typography.Text>Can’t find what you’re looking for?</Typography.Text>
|
||||||
|
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
width="279"
|
||||||
|
height="2"
|
||||||
|
viewBox="0 0 279 2"
|
||||||
|
fill="none"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
d="M0 1L279 1"
|
||||||
|
stroke="#7190F9"
|
||||||
|
strokeOpacity="0.2"
|
||||||
|
strokeDasharray="4 4"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
|
||||||
|
{!dataSourceRequestSubmitted && (
|
||||||
|
<Button
|
||||||
|
type="default"
|
||||||
|
className="periscope-btn request-data-source-btn secondary"
|
||||||
|
icon={<Goal size={16} />}
|
||||||
|
onClick={handleRequestDataSource}
|
||||||
|
>
|
||||||
|
Request Data Source
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{dataSourceRequestSubmitted && (
|
||||||
|
<Button
|
||||||
|
type="default"
|
||||||
|
className="periscope-btn request-data-source-btn success"
|
||||||
|
icon={<CheckIcon size={16} />}
|
||||||
|
>
|
||||||
|
Request raised
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{isNoResultsFound && !isSearchQueryEmpty && (
|
||||||
|
<>
|
||||||
|
<Typography.Text>
|
||||||
|
Our team can help add{' '}
|
||||||
|
<span className="request-data-source-search-query">{searchQuery}</span>{' '}
|
||||||
|
support for you
|
||||||
|
</Typography.Text>
|
||||||
|
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
width="279"
|
||||||
|
height="2"
|
||||||
|
viewBox="0 0 279 2"
|
||||||
|
fill="none"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
d="M0 1L279 1"
|
||||||
|
stroke="#7190F9"
|
||||||
|
strokeOpacity="0.2"
|
||||||
|
strokeDasharray="4 4"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
|
||||||
|
{!dataSourceRequestSubmitted && (
|
||||||
|
<Button
|
||||||
|
type="default"
|
||||||
|
className="periscope-btn request-data-source-btn secondary"
|
||||||
|
icon={<Goal size={16} />}
|
||||||
|
onClick={handleRaiseRequest}
|
||||||
|
>
|
||||||
|
Raise request
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{dataSourceRequestSubmitted && (
|
||||||
|
<Button
|
||||||
|
type="default"
|
||||||
|
className="periscope-btn request-data-source-btn success"
|
||||||
|
icon={<CheckIcon size={16} />}
|
||||||
|
>
|
||||||
|
Request raised
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="onboarding-v2">
|
<div className="onboarding-v2">
|
||||||
<Layout>
|
<Layout>
|
||||||
@@ -433,6 +597,15 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="header-right-section">
|
<div className="header-right-section">
|
||||||
|
<Button
|
||||||
|
type="default"
|
||||||
|
className="periscope-btn invite-teammate-btn outlined"
|
||||||
|
onClick={handleShowInviteTeamMembersModal}
|
||||||
|
icon={<UserPlus size={16} />}
|
||||||
|
>
|
||||||
|
Invite a teammate
|
||||||
|
</Button>
|
||||||
|
|
||||||
<LaunchChatSupport
|
<LaunchChatSupport
|
||||||
attributes={{
|
attributes={{
|
||||||
dataSource: selectedDataSource?.dataSource,
|
dataSource: selectedDataSource?.dataSource,
|
||||||
@@ -442,7 +615,7 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
}}
|
}}
|
||||||
eventName={`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.GET_HELP_BUTTON_CLICKED}`}
|
eventName={`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.GET_HELP_BUTTON_CLICKED}`}
|
||||||
message=""
|
message=""
|
||||||
buttonText="Get Help"
|
buttonText="Contact Support"
|
||||||
className="periscope-btn get-help-btn outlined"
|
className="periscope-btn get-help-btn outlined"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
@@ -461,7 +634,11 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
</Header>
|
</Header>
|
||||||
|
|
||||||
<div className="onboarding-product-setup-container">
|
<div className="onboarding-product-setup-container">
|
||||||
<div className="onboarding-product-setup-container_left-section">
|
<div
|
||||||
|
className={`onboarding-product-setup-container_left-section ${
|
||||||
|
currentStep === 1 ? 'step-id-1' : 'step-id-2'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
<div className="perlian-bg" />
|
<div className="perlian-bg" />
|
||||||
|
|
||||||
{currentStep === 1 && (
|
{currentStep === 1 && (
|
||||||
@@ -491,6 +668,7 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
<div className="onboarding-data-source-search">
|
<div className="onboarding-data-source-search">
|
||||||
<Input
|
<Input
|
||||||
placeholder="Search"
|
placeholder="Search"
|
||||||
|
maxLength={20}
|
||||||
onChange={handleSearch}
|
onChange={handleSearch}
|
||||||
addonAfter={<SearchOutlined />}
|
addonAfter={<SearchOutlined />}
|
||||||
/>
|
/>
|
||||||
@@ -525,6 +703,14 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
))}
|
))}
|
||||||
|
|
||||||
|
{Object.keys(groupedDataSources).length === 0 && (
|
||||||
|
<div className="no-results-found-container">
|
||||||
|
<Typography.Text>No results for {searchQuery} :/</Typography.Text>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{!selectedDataSource && renderRequestDataSource()}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="data-source-categories-filter-container">
|
<div className="data-source-categories-filter-container">
|
||||||
@@ -534,33 +720,66 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
Filters{' '}
|
Filters{' '}
|
||||||
</Typography.Title>
|
</Typography.Title>
|
||||||
|
|
||||||
<Typography.Title
|
<div
|
||||||
level={5}
|
key="all"
|
||||||
className={`onboarding-filters-item-title ${
|
className="onboarding-data-source-category-item"
|
||||||
selectedCategory === 'All' ? 'selected' : ''
|
|
||||||
}`}
|
|
||||||
onClick={(): void => handleFilterByCategory('All')}
|
onClick={(): void => handleFilterByCategory('All')}
|
||||||
|
role="button"
|
||||||
|
tabIndex={0}
|
||||||
|
onKeyDown={(e): void => {
|
||||||
|
if (e.key === 'Enter' || e.key === ' ') {
|
||||||
|
handleFilterByCategory('All');
|
||||||
|
}
|
||||||
|
}}
|
||||||
>
|
>
|
||||||
All ({onboardingConfigWithLinks.length})
|
<Typography.Title
|
||||||
</Typography.Title>
|
level={5}
|
||||||
|
className={`onboarding-filters-item-title ${
|
||||||
|
selectedCategory === 'All' ? 'selected' : ''
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
All
|
||||||
|
</Typography.Title>
|
||||||
|
|
||||||
|
<div className="line-divider" />
|
||||||
|
|
||||||
|
<Typography.Text className="onboarding-filters-item-count">
|
||||||
|
{onboardingConfigWithLinks.length}
|
||||||
|
</Typography.Text>
|
||||||
|
</div>
|
||||||
|
|
||||||
{Object.keys(groupedDataSources).map((tag) => (
|
{Object.keys(groupedDataSources).map((tag) => (
|
||||||
<div key={tag} className="onboarding-data-source-category-item">
|
<div
|
||||||
|
key={tag}
|
||||||
|
className="onboarding-data-source-category-item"
|
||||||
|
onClick={(): void => handleFilterByCategory(tag)}
|
||||||
|
role="button"
|
||||||
|
tabIndex={0}
|
||||||
|
onKeyDown={(e): void => {
|
||||||
|
if (e.key === 'Enter' || e.key === ' ') {
|
||||||
|
handleFilterByCategory(tag);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
<Typography.Title
|
<Typography.Title
|
||||||
level={5}
|
level={5}
|
||||||
className={`onboarding-filters-item-title ${
|
className={`onboarding-filters-item-title ${
|
||||||
selectedCategory === tag ? 'selected' : ''
|
selectedCategory === tag ? 'selected' : ''
|
||||||
}`}
|
}`}
|
||||||
onClick={(): void => handleFilterByCategory(tag)}
|
|
||||||
>
|
>
|
||||||
{tag} ({groupedDataSources[tag].length})
|
{tag}
|
||||||
</Typography.Title>
|
</Typography.Title>
|
||||||
|
|
||||||
|
<div className="line-divider" />
|
||||||
|
|
||||||
|
<Typography.Text className="onboarding-filters-item-count">
|
||||||
|
{groupedDataSources[tag].length}
|
||||||
|
</Typography.Text>
|
||||||
</div>
|
</div>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{selectedDataSource &&
|
{selectedDataSource &&
|
||||||
selectedDataSource?.question &&
|
selectedDataSource?.question &&
|
||||||
!isEmpty(selectedDataSource?.question) && (
|
!isEmpty(selectedDataSource?.question) && (
|
||||||
@@ -615,7 +834,6 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{selectedFramework &&
|
{selectedFramework &&
|
||||||
selectedFramework?.question &&
|
selectedFramework?.question &&
|
||||||
!isEmpty(selectedFramework?.question) && (
|
!isEmpty(selectedFramework?.question) && (
|
||||||
@@ -659,7 +877,6 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{!hasMoreQuestions && showConfigureProduct && (
|
{!hasMoreQuestions && showConfigureProduct && (
|
||||||
<div className="questionaire-footer" ref={configureProdRef}>
|
<div className="questionaire-footer" ref={configureProdRef}>
|
||||||
<Button
|
<Button
|
||||||
@@ -767,39 +984,6 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="onboarding-product-setup-container_right-section">
|
<div className="onboarding-product-setup-container_right-section">
|
||||||
{currentStep === 1 && (
|
|
||||||
<div className="invite-user-section-content">
|
|
||||||
<Button
|
|
||||||
type="default"
|
|
||||||
shape="round"
|
|
||||||
className="invite-user-section-content-button"
|
|
||||||
onClick={handleShowInviteTeamMembersModal}
|
|
||||||
>
|
|
||||||
Invite a team member to help with this step
|
|
||||||
<ArrowRight size={14} />
|
|
||||||
</Button>
|
|
||||||
<div className="need-help-section-content-divider">Or</div>
|
|
||||||
<div className="need-help-section-content">
|
|
||||||
<Typography.Text>
|
|
||||||
Need help with setup? Upgrade now and get expert assistance.
|
|
||||||
</Typography.Text>
|
|
||||||
|
|
||||||
<LaunchChatSupport
|
|
||||||
attributes={{
|
|
||||||
dataSource: selectedDataSource?.dataSource,
|
|
||||||
framework: selectedFramework?.label,
|
|
||||||
environment: selectedEnvironment?.label,
|
|
||||||
currentPage: setupStepItems[currentStep]?.title || '',
|
|
||||||
}}
|
|
||||||
eventName={`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.GET_EXPERT_ASSISTANCE_BUTTON_CLICKED}`}
|
|
||||||
message=""
|
|
||||||
buttonText="Get Expert Assistance"
|
|
||||||
className="periscope-btn get-help-btn rounded-btn outlined"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{currentStep === 2 && <OnboardingIngestionDetails />}
|
{currentStep === 2 && <OnboardingIngestionDetails />}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -824,6 +1008,46 @@ function OnboardingAddDataSource(): JSX.Element {
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</Modal>
|
</Modal>
|
||||||
|
|
||||||
|
<Modal
|
||||||
|
className="request-data-source-modal"
|
||||||
|
title={<span className="title">Request Data Source</span>}
|
||||||
|
open={showRequestDataSourceModal}
|
||||||
|
closable
|
||||||
|
onCancel={(): void => setShowRequestDataSourceModal(false)}
|
||||||
|
width="640px"
|
||||||
|
footer={[
|
||||||
|
<Button
|
||||||
|
type="default"
|
||||||
|
className="periscope-btn outlined"
|
||||||
|
key="back"
|
||||||
|
onClick={(): void => setShowRequestDataSourceModal(false)}
|
||||||
|
icon={<X size={16} />}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</Button>,
|
||||||
|
<Button
|
||||||
|
key="submit"
|
||||||
|
type="primary"
|
||||||
|
className="periscope-btn primary"
|
||||||
|
disabled={dataSourceRequest.length <= 0}
|
||||||
|
onClick={handleSubmitDataSourceRequest}
|
||||||
|
icon={<CheckIcon size={16} />}
|
||||||
|
>
|
||||||
|
Submit request
|
||||||
|
</Button>,
|
||||||
|
]}
|
||||||
|
destroyOnClose
|
||||||
|
>
|
||||||
|
<div className="request-data-source-modal-content">
|
||||||
|
<Typography.Text>Enter your request</Typography.Text>
|
||||||
|
<Input
|
||||||
|
placeholder="Eg: Kotlin"
|
||||||
|
className="request-data-source-modal-input"
|
||||||
|
onChange={(e): void => setDataSourceRequest(e.target.value)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</Modal>
|
||||||
</Layout>
|
</Layout>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -4,14 +4,15 @@
|
|||||||
|
|
||||||
&__header {
|
&__header {
|
||||||
background: rgba(11, 12, 14, 0.7);
|
background: rgba(11, 12, 14, 0.7);
|
||||||
|
border-bottom: 1px solid var(--bg-slate-500);
|
||||||
backdrop-filter: blur(20px);
|
backdrop-filter: blur(20px);
|
||||||
padding: 16px 0px 0px 0px;
|
padding: 12px 0px;
|
||||||
|
|
||||||
&--sticky {
|
&--sticky {
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
padding: 0px 1rem;
|
padding: 0px 1rem;
|
||||||
// margin-top: 16px;
|
margin-top: 12px;
|
||||||
|
|
||||||
background: rgba(11, 12, 14, 0.7);
|
background: rgba(11, 12, 14, 0.7);
|
||||||
backdrop-filter: blur(20px);
|
backdrop-filter: blur(20px);
|
||||||
@@ -323,7 +324,8 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.get-help-btn {
|
.get-help-btn,
|
||||||
|
.invite-teammate-btn {
|
||||||
font-size: 11px;
|
font-size: 11px;
|
||||||
padding: 6px 16px;
|
padding: 6px 16px;
|
||||||
border: 1px solid var(--bg-slate-400) !important;
|
border: 1px solid var(--bg-slate-400) !important;
|
||||||
@@ -610,15 +612,61 @@
|
|||||||
display: flex;
|
display: flex;
|
||||||
|
|
||||||
.data-sources-container {
|
.data-sources-container {
|
||||||
flex: 0 0 70%;
|
flex: 0 0 80%;
|
||||||
max-width: 70%;
|
max-width: 80%;
|
||||||
|
|
||||||
margin-right: 32px;
|
margin-right: 32px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.data-source-categories-filter-container {
|
.data-source-categories-filter-container {
|
||||||
flex: 0 0 30%;
|
flex: 0 0 20%;
|
||||||
max-width: 30%;
|
max-width: 20%;
|
||||||
|
|
||||||
|
.onboarding-data-source-category {
|
||||||
|
.onboarding-data-source-category-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
|
||||||
|
margin-bottom: 8px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.onboarding-filters-item-title {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
|
||||||
|
margin-bottom: 0px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.line-divider {
|
||||||
|
height: 1px;
|
||||||
|
margin: 0 16px;
|
||||||
|
flex-grow: 1;
|
||||||
|
border-top: 2px dotted var(--bg-slate-400);
|
||||||
|
}
|
||||||
|
|
||||||
|
.onboarding-filters-item-count {
|
||||||
|
color: var(--text-vanilla-400);
|
||||||
|
font-family: Inter;
|
||||||
|
font-size: 10px;
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
line-height: 18px; /* 150% */
|
||||||
|
|
||||||
|
background-color: var(--bg-ink-400);
|
||||||
|
border-radius: 4px;
|
||||||
|
|
||||||
|
width: 24px;
|
||||||
|
height: 24px;
|
||||||
|
border-radius: 50%;
|
||||||
|
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -643,8 +691,14 @@
|
|||||||
max-width: 70%;
|
max-width: 70%;
|
||||||
|
|
||||||
display: flex;
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
gap: 24px;
|
gap: 24px;
|
||||||
|
|
||||||
|
&.step-id-1 {
|
||||||
|
flex: 0 0 90%;
|
||||||
|
max-width: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
// border-right: 1px solid var(--Greyscale-Slate-400, #1d212d);
|
// border-right: 1px solid var(--Greyscale-Slate-400, #1d212d);
|
||||||
|
|
||||||
.perlian-bg {
|
.perlian-bg {
|
||||||
@@ -678,7 +732,7 @@
|
|||||||
&_right-section {
|
&_right-section {
|
||||||
flex: 1;
|
flex: 1;
|
||||||
max-width: 30%;
|
max-width: 30%;
|
||||||
height: calc(100vh - 120px);
|
height: calc(100vh - 130px);
|
||||||
|
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
@@ -972,6 +1026,52 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.no-results-found-container {
|
||||||
|
.ant-typography {
|
||||||
|
color: rgba(192, 193, 195, 0.6);
|
||||||
|
font-family: Inter;
|
||||||
|
font-size: 11px;
|
||||||
|
font-style: normal;
|
||||||
|
line-height: 18px; /* 150% */
|
||||||
|
letter-spacing: 0.48px;
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.request-data-source-container {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 16px;
|
||||||
|
|
||||||
|
margin: 36px 0;
|
||||||
|
|
||||||
|
display: flex;
|
||||||
|
|
||||||
|
width: fit-content;
|
||||||
|
gap: 24px;
|
||||||
|
|
||||||
|
padding: 12px 12px 12px 16px;
|
||||||
|
border-radius: 6px;
|
||||||
|
background: rgba(171, 189, 255, 0.06);
|
||||||
|
|
||||||
|
.request-data-source-search-query {
|
||||||
|
border-radius: 2px;
|
||||||
|
border: 1px solid rgba(173, 127, 88, 0.1);
|
||||||
|
background: rgba(173, 127, 88, 0.1);
|
||||||
|
|
||||||
|
color: var(--Sienna-400, #bd9979);
|
||||||
|
font-size: 13px;
|
||||||
|
padding: 2px;
|
||||||
|
line-height: 20px; /* 142.857% */
|
||||||
|
}
|
||||||
|
|
||||||
|
.request-data-source-btn {
|
||||||
|
border-radius: 2px;
|
||||||
|
border: 1px solid var(--Slate-200, #2c3140);
|
||||||
|
background: var(--Ink-200, #23262e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.onboarding-data-source-category-container {
|
.onboarding-data-source-category-container {
|
||||||
flex: 1;
|
flex: 1;
|
||||||
max-width: 30%;
|
max-width: 30%;
|
||||||
@@ -996,7 +1096,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.onboarding-configure-container {
|
.onboarding-configure-container {
|
||||||
height: calc(100vh - 120px);
|
height: calc(100vh - 130px);
|
||||||
width: 100%;
|
width: 100%;
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
@@ -1070,7 +1170,8 @@
|
|||||||
height: 18px;
|
height: 18px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.invite-team-member-modal {
|
.invite-team-member-modal,
|
||||||
|
.request-data-source-modal {
|
||||||
.ant-modal-content {
|
.ant-modal-content {
|
||||||
background-color: var(--bg-ink-500);
|
background-color: var(--bg-ink-500);
|
||||||
}
|
}
|
||||||
@@ -1079,9 +1180,26 @@
|
|||||||
background-color: var(--bg-ink-500);
|
background-color: var(--bg-ink-500);
|
||||||
}
|
}
|
||||||
|
|
||||||
.invite-team-member-modal-content {
|
.invite-team-member-modal-content,
|
||||||
|
.request-data-source-modal-content {
|
||||||
background-color: var(--bg-ink-500);
|
background-color: var(--bg-ink-500);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.request-data-source-modal-content {
|
||||||
|
padding: 12px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.request-data-source-modal-input {
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.request-data-source-modal {
|
||||||
|
.ant-modal-footer {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.ingestion-setup-details-links {
|
.ingestion-setup-details-links {
|
||||||
@@ -1262,7 +1380,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.onboarding-v2 {
|
.onboarding-v2 {
|
||||||
.get-help-btn {
|
.get-help-btn,
|
||||||
|
.invite-teammate-btn {
|
||||||
border: 1px solid var(--bg-vanilla-300) !important;
|
border: 1px solid var(--bg-vanilla-300) !important;
|
||||||
color: var(--bg-ink-300) !important;
|
color: var(--bg-ink-300) !important;
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -13,11 +13,7 @@ function OrganizationSettings(): JSX.Element {
|
|||||||
const isNotSSO =
|
const isNotSSO =
|
||||||
!featureFlags?.find((flag) => flag.name === FeatureKeys.SSO)?.active || false;
|
!featureFlags?.find((flag) => flag.name === FeatureKeys.SSO)?.active || false;
|
||||||
|
|
||||||
const isNoUpSell =
|
const isAuthDomain = !isNotSSO;
|
||||||
!featureFlags?.find((flag) => flag.name === FeatureKeys.DISABLE_UPSELL)
|
|
||||||
?.active || false;
|
|
||||||
|
|
||||||
const isAuthDomain = !isNoUpSell || (isNoUpSell && !isNotSSO);
|
|
||||||
|
|
||||||
if (!org) {
|
if (!org) {
|
||||||
return <div />;
|
return <div />;
|
||||||
|
|||||||
@@ -453,7 +453,7 @@ export const Query = memo(function Query({
|
|||||||
</Col>
|
</Col>
|
||||||
)}
|
)}
|
||||||
<Col flex="1" className="qb-search-container">
|
<Col flex="1" className="qb-search-container">
|
||||||
{[DataSource.LOGS, DataSource.TRACES].includes(query.dataSource) ? (
|
{query.dataSource === DataSource.LOGS ? (
|
||||||
<QueryBuilderSearchV2
|
<QueryBuilderSearchV2
|
||||||
query={query}
|
query={query}
|
||||||
onChange={handleChangeTagFilters}
|
onChange={handleChangeTagFilters}
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
import './QueryBuilderSearchV2.styles.scss';
|
import './QueryBuilderSearchV2.styles.scss';
|
||||||
|
|
||||||
import { Typography } from 'antd';
|
import { Typography } from 'antd';
|
||||||
import cx from 'classnames';
|
|
||||||
import {
|
import {
|
||||||
ArrowDown,
|
ArrowDown,
|
||||||
ArrowUp,
|
ArrowUp,
|
||||||
@@ -26,7 +25,6 @@ interface ICustomDropdownProps {
|
|||||||
exampleQueries: TagFilter[];
|
exampleQueries: TagFilter[];
|
||||||
onChange: (value: TagFilter) => void;
|
onChange: (value: TagFilter) => void;
|
||||||
currentFilterItem?: ITag;
|
currentFilterItem?: ITag;
|
||||||
isLogsDataSource: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default function QueryBuilderSearchDropdown(
|
export default function QueryBuilderSearchDropdown(
|
||||||
@@ -40,14 +38,11 @@ export default function QueryBuilderSearchDropdown(
|
|||||||
exampleQueries,
|
exampleQueries,
|
||||||
options,
|
options,
|
||||||
onChange,
|
onChange,
|
||||||
isLogsDataSource,
|
|
||||||
} = props;
|
} = props;
|
||||||
const userOs = getUserOperatingSystem();
|
const userOs = getUserOperatingSystem();
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div
|
<div className="content">
|
||||||
className={cx('content', { 'non-logs-data-source': !isLogsDataSource })}
|
|
||||||
>
|
|
||||||
{!currentFilterItem?.key ? (
|
{!currentFilterItem?.key ? (
|
||||||
<div className="suggested-filters">Suggested Filters</div>
|
<div className="suggested-filters">Suggested Filters</div>
|
||||||
) : !currentFilterItem?.op ? (
|
) : !currentFilterItem?.op ? (
|
||||||
|
|||||||
@@ -11,11 +11,6 @@
|
|||||||
.rc-virtual-list-holder {
|
.rc-virtual-list-holder {
|
||||||
height: 115px;
|
height: 115px;
|
||||||
}
|
}
|
||||||
&.non-logs-data-source {
|
|
||||||
.rc-virtual-list-holder {
|
|
||||||
height: 256px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -689,29 +689,12 @@ function QueryBuilderSearchV2(
|
|||||||
})),
|
})),
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
setDropdownOptions([
|
setDropdownOptions(
|
||||||
// Add user typed option if it doesn't exist in the payload
|
data?.payload?.attributeKeys?.map((key) => ({
|
||||||
...(!isEmpty(tagKey) &&
|
|
||||||
!data?.payload?.attributeKeys?.some((val) => isEqual(val.key, tagKey))
|
|
||||||
? [
|
|
||||||
{
|
|
||||||
label: tagKey,
|
|
||||||
value: {
|
|
||||||
key: tagKey,
|
|
||||||
dataType: DataTypes.EMPTY,
|
|
||||||
type: '',
|
|
||||||
isColumn: false,
|
|
||||||
isJSON: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
: []),
|
|
||||||
// Map existing attribute keys from payload
|
|
||||||
...(data?.payload?.attributeKeys?.map((key) => ({
|
|
||||||
label: key.key,
|
label: key.key,
|
||||||
value: key,
|
value: key,
|
||||||
})) || []),
|
})) || [],
|
||||||
]);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (currentState === DropdownState.OPERATOR) {
|
if (currentState === DropdownState.OPERATOR) {
|
||||||
@@ -981,7 +964,6 @@ function QueryBuilderSearchV2(
|
|||||||
exampleQueries={suggestionsData?.payload?.example_queries || []}
|
exampleQueries={suggestionsData?.payload?.example_queries || []}
|
||||||
tags={tags}
|
tags={tags}
|
||||||
currentFilterItem={currentFilterItem}
|
currentFilterItem={currentFilterItem}
|
||||||
isLogsDataSource={isLogsDataSource}
|
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
|
|||||||
@@ -170,7 +170,11 @@ export const useOptions = (
|
|||||||
(option, index, self) =>
|
(option, index, self) =>
|
||||||
index ===
|
index ===
|
||||||
self.findIndex(
|
self.findIndex(
|
||||||
(o) => o.label === option.label && o.value === option.value, // to remove duplicate & empty options from list
|
(o) =>
|
||||||
|
// to remove duplicate & empty options from list
|
||||||
|
o.label === option.label &&
|
||||||
|
o.value === option.value &&
|
||||||
|
o.dataType?.toLowerCase() === option.dataType?.toLowerCase(), // handle case sensitivity
|
||||||
) && option.value !== '',
|
) && option.value !== '',
|
||||||
) || []
|
) || []
|
||||||
).map((option) => {
|
).map((option) => {
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
.header {
|
.traces-funnels-header {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
gap: 4px;
|
gap: 4px;
|
||||||
|
|
||||||
&__title {
|
.traces-funnels-header-title {
|
||||||
color: var(--bg-vanilla-100);
|
color: var(--bg-vanilla-100);
|
||||||
font-size: 18px;
|
font-size: 18px;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
&__subtitle {
|
.traces-funnels-header-subtitle {
|
||||||
color: var(--bg-vanilla-400);
|
color: var(--bg-vanilla-400);
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
line-height: 20px;
|
line-height: 20px;
|
||||||
@@ -21,13 +21,13 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.lightMode {
|
.lightMode {
|
||||||
.header {
|
.traces-funnels-header {
|
||||||
&__title {
|
.traces-funnels-header-title {
|
||||||
color: var(--bg-ink-500);
|
color: var(--bg-ink-500);
|
||||||
}
|
}
|
||||||
|
|
||||||
&__subtitle {
|
.traces-funnels-header-subtitle {
|
||||||
color: var(--bg-ink-400);
|
color: var(--bg-ink-400);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
function Header(): JSX.Element {
|
function Header(): JSX.Element {
|
||||||
return (
|
return (
|
||||||
<div className="header">
|
<div className="traces-funnels-header">
|
||||||
<div className="header__title">Funnels</div>
|
<div className="traces-funnels-header-title">Funnels</div>
|
||||||
<div className="header__subtitle">Create and manage tracing funnels.</div>
|
<div className="traces-funnels-header-subtitle">
|
||||||
|
Create and manage tracing funnels.
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -45,6 +45,13 @@
|
|||||||
font-size: 11px;
|
font-size: 11px;
|
||||||
font-weight: 400;
|
font-weight: 400;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
&.success {
|
||||||
|
color: var(--bg-forest-400) !important;
|
||||||
|
border-radius: 2px;
|
||||||
|
border: 1px solid rgba(37, 225, 146, 0.1);
|
||||||
|
background: rgba(37, 225, 146, 0.1) !important;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.periscope-tab {
|
.periscope-tab {
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ import { useQuery } from 'react-query';
|
|||||||
import { FeatureFlagProps as FeatureFlags } from 'types/api/features/getFeaturesFlags';
|
import { FeatureFlagProps as FeatureFlags } from 'types/api/features/getFeaturesFlags';
|
||||||
import { PayloadProps as LicensesResModel } from 'types/api/licenses/getAll';
|
import { PayloadProps as LicensesResModel } from 'types/api/licenses/getAll';
|
||||||
import {
|
import {
|
||||||
|
LicensePlatform,
|
||||||
LicenseState,
|
LicenseState,
|
||||||
LicenseV3ResModel,
|
LicenseV3ResModel,
|
||||||
TrialInfo,
|
TrialInfo,
|
||||||
@@ -145,7 +146,8 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
|
|||||||
).unix(),
|
).unix(),
|
||||||
onTrial: isOnTrial,
|
onTrial: isOnTrial,
|
||||||
workSpaceBlock:
|
workSpaceBlock:
|
||||||
activeLicenseV3Data.payload.state === LicenseState.EVALUATION_EXPIRED,
|
activeLicenseV3Data.payload.state === LicenseState.EVALUATION_EXPIRED &&
|
||||||
|
activeLicenseV3Data.payload.platform === LicensePlatform.CLOUD,
|
||||||
trialConvertedToSubscription:
|
trialConvertedToSubscription:
|
||||||
activeLicenseV3Data.payload.state !== LicenseState.ISSUED &&
|
activeLicenseV3Data.payload.state !== LicenseState.ISSUED &&
|
||||||
activeLicenseV3Data.payload.state !== LicenseState.EVALUATING &&
|
activeLicenseV3Data.payload.state !== LicenseState.EVALUATING &&
|
||||||
|
|||||||
@@ -186,83 +186,6 @@ export function getAppContextMock(
|
|||||||
usage_limit: -1,
|
usage_limit: -1,
|
||||||
route: '',
|
route: '',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: FeatureKeys.OSS,
|
|
||||||
active: false,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.DISABLE_UPSELL,
|
|
||||||
active: false,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.SMART_TRACE_DETAIL,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.CUSTOM_METRICS_FUNCTION,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.QUERY_BUILDER_PANELS,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.QUERY_BUILDER_ALERTS,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.ALERT_CHANNEL_SLACK,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.ALERT_CHANNEL_WEBHOOK,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.ALERT_CHANNEL_PAGERDUTY,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.ALERT_CHANNEL_OPSGENIE,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.ALERT_CHANNEL_MSTEAMS,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: FeatureKeys.USE_SPAN_METRICS,
|
name: FeatureKeys.USE_SPAN_METRICS,
|
||||||
active: false,
|
active: false,
|
||||||
@@ -291,20 +214,6 @@ export function getAppContextMock(
|
|||||||
usage_limit: -1,
|
usage_limit: -1,
|
||||||
route: '',
|
route: '',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: FeatureKeys.DurationSort,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: FeatureKeys.TimestampSort,
|
|
||||||
active: true,
|
|
||||||
usage: 0,
|
|
||||||
usage_limit: -1,
|
|
||||||
route: '',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: FeatureKeys.ONBOARDING,
|
name: FeatureKeys.ONBOARDING,
|
||||||
active: true,
|
active: true,
|
||||||
|
|||||||
9
go.mod
9
go.mod
@@ -22,12 +22,13 @@ require (
|
|||||||
github.com/go-redis/redismock/v8 v8.11.5
|
github.com/go-redis/redismock/v8 v8.11.5
|
||||||
github.com/go-viper/mapstructure/v2 v2.1.0
|
github.com/go-viper/mapstructure/v2 v2.1.0
|
||||||
github.com/gojek/heimdall/v7 v7.0.3
|
github.com/gojek/heimdall/v7 v7.0.3
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1
|
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/gorilla/handlers v1.5.1
|
github.com/gorilla/handlers v1.5.1
|
||||||
github.com/gorilla/mux v1.8.1
|
github.com/gorilla/mux v1.8.1
|
||||||
github.com/gorilla/websocket v1.5.0
|
github.com/gorilla/websocket v1.5.0
|
||||||
github.com/gosimple/slug v1.10.0
|
github.com/gosimple/slug v1.10.0
|
||||||
|
github.com/huandu/go-sqlbuilder v1.35.0
|
||||||
github.com/jackc/pgx/v5 v5.7.2
|
github.com/jackc/pgx/v5 v5.7.2
|
||||||
github.com/jmoiron/sqlx v1.3.4
|
github.com/jmoiron/sqlx v1.3.4
|
||||||
github.com/json-iterator/go v1.1.12
|
github.com/json-iterator/go v1.1.12
|
||||||
@@ -77,7 +78,6 @@ require (
|
|||||||
gopkg.in/segmentio/analytics-go.v3 v3.1.0
|
gopkg.in/segmentio/analytics-go.v3 v3.1.0
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
honnef.co/go/tools v0.0.1-2020.1.4
|
|
||||||
k8s.io/apimachinery v0.31.3
|
k8s.io/apimachinery v0.31.3
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -89,10 +89,10 @@ require (
|
|||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
|
||||||
github.com/BurntSushi/toml v0.3.1 // indirect
|
|
||||||
github.com/ClickHouse/ch-go v0.61.5 // indirect
|
github.com/ClickHouse/ch-go v0.61.5 // indirect
|
||||||
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
|
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
|
||||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||||
|
github.com/antlr4-go/antlr/v4 v4.13.1 // indirect
|
||||||
github.com/armon/go-metrics v0.4.1 // indirect
|
github.com/armon/go-metrics v0.4.1 // indirect
|
||||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||||
github.com/aws/aws-sdk-go v1.55.5 // indirect
|
github.com/aws/aws-sdk-go v1.55.5 // indirect
|
||||||
@@ -110,7 +110,7 @@ require (
|
|||||||
github.com/ebitengine/purego v0.8.0 // indirect
|
github.com/ebitengine/purego v0.8.0 // indirect
|
||||||
github.com/edsrzf/mmap-go v1.2.0 // indirect
|
github.com/edsrzf/mmap-go v1.2.0 // indirect
|
||||||
github.com/elastic/lunes v0.1.0 // indirect
|
github.com/elastic/lunes v0.1.0 // indirect
|
||||||
github.com/expr-lang/expr v1.16.9 // indirect
|
github.com/expr-lang/expr v1.17.0 // indirect
|
||||||
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
|
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
|
||||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||||
github.com/fsnotify/fsnotify v1.8.0 // indirect
|
github.com/fsnotify/fsnotify v1.8.0 // indirect
|
||||||
@@ -152,6 +152,7 @@ require (
|
|||||||
github.com/hashicorp/golang-lru v1.0.2 // indirect
|
github.com/hashicorp/golang-lru v1.0.2 // indirect
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||||
github.com/hashicorp/memberlist v0.5.1 // indirect
|
github.com/hashicorp/memberlist v0.5.1 // indirect
|
||||||
|
github.com/huandu/xstrings v1.4.0 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
|
|||||||
18
go.sum
18
go.sum
@@ -83,7 +83,6 @@ github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJ
|
|||||||
github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE=
|
github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE=
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU=
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU=
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||||
github.com/ClickHouse/ch-go v0.61.5 h1:zwR8QbYI0tsMiEcze/uIMK+Tz1D3XZXLdNrlaOpeEI4=
|
github.com/ClickHouse/ch-go v0.61.5 h1:zwR8QbYI0tsMiEcze/uIMK+Tz1D3XZXLdNrlaOpeEI4=
|
||||||
@@ -114,6 +113,8 @@ github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b/go.mod h1:fvzegU4
|
|||||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||||
|
github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ=
|
||||||
|
github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw=
|
||||||
github.com/antonmedv/expr v1.15.3 h1:q3hOJZNvLvhqE8OHBs1cFRdbXFNKuA+bHmRaI+AmRmI=
|
github.com/antonmedv/expr v1.15.3 h1:q3hOJZNvLvhqE8OHBs1cFRdbXFNKuA+bHmRaI+AmRmI=
|
||||||
github.com/antonmedv/expr v1.15.3/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE=
|
github.com/antonmedv/expr v1.15.3/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE=
|
||||||
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||||
@@ -232,8 +233,8 @@ github.com/envoyproxy/go-control-plane v0.13.1/go.mod h1:X45hY0mufo6Fd0KW3rqsGvQ
|
|||||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||||
github.com/envoyproxy/protoc-gen-validate v1.1.0 h1:tntQDh69XqOCOZsDz0lVJQez/2L6Uu2PdjCQwWCJ3bM=
|
github.com/envoyproxy/protoc-gen-validate v1.1.0 h1:tntQDh69XqOCOZsDz0lVJQez/2L6Uu2PdjCQwWCJ3bM=
|
||||||
github.com/envoyproxy/protoc-gen-validate v1.1.0/go.mod h1:sXRDRVmzEbkM7CVcM06s9shE/m23dg3wzjl0UWqJ2q4=
|
github.com/envoyproxy/protoc-gen-validate v1.1.0/go.mod h1:sXRDRVmzEbkM7CVcM06s9shE/m23dg3wzjl0UWqJ2q4=
|
||||||
github.com/expr-lang/expr v1.16.9 h1:WUAzmR0JNI9JCiF0/ewwHB1gmcGw5wW7nWt8gc6PpCI=
|
github.com/expr-lang/expr v1.17.0 h1:+vpszOyzKLQXC9VF+wA8cVA0tlA984/Wabc/1hF9Whg=
|
||||||
github.com/expr-lang/expr v1.16.9/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4=
|
github.com/expr-lang/expr v1.17.0/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4=
|
||||||
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb h1:IT4JYU7k4ikYg1SCxNI1/Tieq/NFvh6dzLdgi7eu0tM=
|
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb h1:IT4JYU7k4ikYg1SCxNI1/Tieq/NFvh6dzLdgi7eu0tM=
|
||||||
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb/go.mod h1:bH6Xx7IW64qjjJq8M2u4dxNaBiDfKK+z/3eGDpXEQhc=
|
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb/go.mod h1:bH6Xx7IW64qjjJq8M2u4dxNaBiDfKK+z/3eGDpXEQhc=
|
||||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||||
@@ -334,8 +335,8 @@ github.com/gojek/heimdall/v7 v7.0.3 h1:+5sAhl8S0m+qRRL8IVeHCJudFh/XkG3wyO++nvOg+
|
|||||||
github.com/gojek/heimdall/v7 v7.0.3/go.mod h1:Z43HtMid7ysSjmsedPTXAki6jcdcNVnjn5pmsTyiMic=
|
github.com/gojek/heimdall/v7 v7.0.3/go.mod h1:Z43HtMid7ysSjmsedPTXAki6jcdcNVnjn5pmsTyiMic=
|
||||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf h1:5xRGbUdOmZKoDXkGx5evVLehuCMpuO1hl701bEQqXOM=
|
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf h1:5xRGbUdOmZKoDXkGx5evVLehuCMpuO1hl701bEQqXOM=
|
||||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf/go.mod h1:QzhUKaYKJmcbTnCYCAVQrroCOY7vOOI8cSQ4NbuhYf0=
|
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf/go.mod h1:QzhUKaYKJmcbTnCYCAVQrroCOY7vOOI8cSQ4NbuhYf0=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
@@ -538,6 +539,12 @@ github.com/hetznercloud/hcloud-go/v2 v2.13.1/go.mod h1:dhix40Br3fDiBhwaSG/zgaYOF
|
|||||||
github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs=
|
github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs=
|
||||||
github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E=
|
github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E=
|
||||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||||
|
github.com/huandu/go-assert v1.1.6 h1:oaAfYxq9KNDi9qswn/6aE0EydfxSa+tWZC1KabNitYs=
|
||||||
|
github.com/huandu/go-assert v1.1.6/go.mod h1:JuIfbmYG9ykwvuxoJ3V8TB5QP+3+ajIA54Y44TmkMxs=
|
||||||
|
github.com/huandu/go-sqlbuilder v1.35.0 h1:ESvxFHN8vxCTudY1Vq63zYpU5yJBESn19sf6k4v2T5Q=
|
||||||
|
github.com/huandu/go-sqlbuilder v1.35.0/go.mod h1:mS0GAtrtW+XL6nM2/gXHRJax2RwSW1TraavWDFAc1JA=
|
||||||
|
github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU=
|
||||||
|
github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||||
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
|
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
|
||||||
@@ -1653,7 +1660,6 @@ honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWh
|
|||||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||||
honnef.co/go/tools v0.0.1-2020.1.4 h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=
|
|
||||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||||
k8s.io/api v0.31.3 h1:umzm5o8lFbdN/hIXbrK9oRpOproJO62CV1zqxXrLgk8=
|
k8s.io/api v0.31.3 h1:umzm5o8lFbdN/hIXbrK9oRpOproJO62CV1zqxXrLgk8=
|
||||||
k8s.io/api v0.31.3/go.mod h1:UJrkIp9pnMOI9K2nlL6vwpxRzzEX5sWgn8kGQe92kCE=
|
k8s.io/api v0.31.3/go.mod h1:UJrkIp9pnMOI9K2nlL6vwpxRzzEX5sWgn8kGQe92kCE=
|
||||||
|
|||||||
221
grammar/FilterQuery.g4
Normal file
221
grammar/FilterQuery.g4
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
grammar FilterQuery;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Parser Rules
|
||||||
|
*/
|
||||||
|
|
||||||
|
query
|
||||||
|
: expression
|
||||||
|
EOF
|
||||||
|
;
|
||||||
|
|
||||||
|
// Expression with standard boolean precedence:
|
||||||
|
// - parentheses > NOT > AND > OR
|
||||||
|
// - consecutive expressions with no AND/OR => implicit AND
|
||||||
|
expression
|
||||||
|
: orExpression
|
||||||
|
;
|
||||||
|
|
||||||
|
// OR expressions
|
||||||
|
orExpression
|
||||||
|
: andExpression ( OR andExpression )*
|
||||||
|
;
|
||||||
|
|
||||||
|
// AND expressions + optional chaining with implicit AND if no OR is present
|
||||||
|
andExpression
|
||||||
|
: unaryExpression ( AND unaryExpression | unaryExpression )*
|
||||||
|
;
|
||||||
|
|
||||||
|
// A unary expression handles optional NOT
|
||||||
|
unaryExpression
|
||||||
|
: NOT? primary
|
||||||
|
;
|
||||||
|
|
||||||
|
// Primary constructs: grouped expressions, a comparison (key op value),
|
||||||
|
// a function call, or a full-text string
|
||||||
|
primary
|
||||||
|
: LPAREN orExpression RPAREN
|
||||||
|
| comparison
|
||||||
|
| functionCall
|
||||||
|
| fullText
|
||||||
|
| key
|
||||||
|
;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Comparison-like filters
|
||||||
|
*
|
||||||
|
* Includes all operators: =, !=, <>, <, <=, >, >=, [NOT] LIKE, [NOT] ILIKE,
|
||||||
|
* [NOT] BETWEEN, [NOT] IN, [NOT] EXISTS, [NOT] REGEXP, [NOT] CONTAINS, etc.
|
||||||
|
*/
|
||||||
|
comparison
|
||||||
|
: key EQUALS value
|
||||||
|
| key (NOT_EQUALS | NEQ) value
|
||||||
|
| key LT value
|
||||||
|
| key LE value
|
||||||
|
| key GT value
|
||||||
|
| key GE value
|
||||||
|
|
||||||
|
| key (LIKE | ILIKE) value
|
||||||
|
| key (NOT_LIKE | NOT_ILIKE) value
|
||||||
|
|
||||||
|
| key BETWEEN value AND value
|
||||||
|
| key NOT BETWEEN value AND value
|
||||||
|
|
||||||
|
| key inClause
|
||||||
|
| key notInClause
|
||||||
|
|
||||||
|
| key EXISTS
|
||||||
|
| key NOT EXISTS
|
||||||
|
|
||||||
|
| key REGEXP value
|
||||||
|
| key NOT REGEXP value
|
||||||
|
|
||||||
|
| key CONTAINS value
|
||||||
|
| key NOT CONTAINS value
|
||||||
|
;
|
||||||
|
|
||||||
|
// in(...) or in[...]
|
||||||
|
inClause
|
||||||
|
: IN LPAREN valueList RPAREN
|
||||||
|
| IN LBRACK valueList RBRACK
|
||||||
|
;
|
||||||
|
|
||||||
|
notInClause
|
||||||
|
: NOT IN LPAREN valueList RPAREN
|
||||||
|
| NOT IN LBRACK valueList RBRACK
|
||||||
|
;
|
||||||
|
|
||||||
|
// List of values for in(...) or in[...]
|
||||||
|
valueList
|
||||||
|
: value ( COMMA value )*
|
||||||
|
;
|
||||||
|
|
||||||
|
// Full-text search: a standalone quoted string is allowed as a "primary"
|
||||||
|
// e.g. `"Waiting for response" http.status_code=200`
|
||||||
|
fullText
|
||||||
|
: QUOTED_TEXT
|
||||||
|
| FREETEXT
|
||||||
|
;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Function calls like:
|
||||||
|
* has(payload.user_ids, 123)
|
||||||
|
* hasAny(payload.user_ids, [123, 456])
|
||||||
|
* ...
|
||||||
|
*/
|
||||||
|
functionCall
|
||||||
|
: (HAS | HASANY | HASALL | HASNONE) LPAREN functionParamList RPAREN
|
||||||
|
;
|
||||||
|
|
||||||
|
// Function parameters can be keys, single scalar values, or arrays
|
||||||
|
functionParamList
|
||||||
|
: functionParam ( COMMA functionParam )*
|
||||||
|
;
|
||||||
|
|
||||||
|
functionParam
|
||||||
|
: key
|
||||||
|
| value
|
||||||
|
| array
|
||||||
|
;
|
||||||
|
|
||||||
|
// An array: [ item1, item2, item3 ]
|
||||||
|
array
|
||||||
|
: LBRACK valueList RBRACK
|
||||||
|
;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* A 'value' can be a string literal (double or single-quoted),
|
||||||
|
// a numeric literal, boolean, or a "bare" token as needed.
|
||||||
|
*/
|
||||||
|
value
|
||||||
|
: QUOTED_TEXT
|
||||||
|
| NUMBER
|
||||||
|
| BOOL
|
||||||
|
| KEY
|
||||||
|
;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* A key can include letters, digits, underscores, dots, brackets
|
||||||
|
* E.g. service.name, query_log.query_duration_ms, proto.user_objects[].name
|
||||||
|
*/
|
||||||
|
key
|
||||||
|
: KEY
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Lexer Rules
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Common punctuation / symbols
|
||||||
|
LPAREN : '(' ;
|
||||||
|
RPAREN : ')' ;
|
||||||
|
LBRACK : '[' ;
|
||||||
|
RBRACK : ']' ;
|
||||||
|
COMMA : ',' ;
|
||||||
|
|
||||||
|
EQUALS : '=' | '==' ;
|
||||||
|
NOT_EQUALS : '!=' ;
|
||||||
|
NEQ : '<>' ; // alternate not-equals operator
|
||||||
|
LT : '<' ;
|
||||||
|
LE : '<=' ;
|
||||||
|
GT : '>' ;
|
||||||
|
GE : '>=' ;
|
||||||
|
|
||||||
|
// Operators that are made of multiple keywords
|
||||||
|
LIKE : [Ll][Ii][Kk][Ee] ;
|
||||||
|
NOT_LIKE : [Nn][Oo][Tt] [ \t]+ [Ll][Ii][Kk][Ee] ;
|
||||||
|
ILIKE : [Ii][Ll][Ii][Kk][Ee] ;
|
||||||
|
NOT_ILIKE : [Nn][Oo][Tt] [ \t]+ [Ii][Ll][Ii][Kk][Ee] ;
|
||||||
|
BETWEEN : [Bb][Ee][Tt][Ww][Ee][Ee][Nn] ;
|
||||||
|
EXISTS : [Ee][Xx][Ii][Ss][Tt][Ss]? ;
|
||||||
|
REGEXP : [Rr][Ee][Gg][Ee][Xx][Pp] ;
|
||||||
|
CONTAINS : [Cc][Oo][Nn][Tt][Aa][Ii][Nn][Ss]? ;
|
||||||
|
IN : [Ii][Nn] ;
|
||||||
|
|
||||||
|
// Boolean logic
|
||||||
|
NOT : [Nn][Oo][Tt] ;
|
||||||
|
AND : [Aa][Nn][Dd] ;
|
||||||
|
OR : [Oo][Rr] ;
|
||||||
|
|
||||||
|
// For easy referencing in function calls
|
||||||
|
HAS : [Hh][Aa][Ss] ;
|
||||||
|
HASANY : [Hh][Aa][Ss][Aa][Nn][Yy] ;
|
||||||
|
HASALL : [Hh][Aa][Ss][Aa][Ll][Ll] ;
|
||||||
|
HASNONE : [Hh][Aa][Ss][Nn][Oo][Nn][Ee] ;
|
||||||
|
|
||||||
|
// Potential boolean constants
|
||||||
|
BOOL
|
||||||
|
: [Tt][Rr][Uu][Ee]
|
||||||
|
| [Ff][Aa][Ll][Ss][Ee]
|
||||||
|
;
|
||||||
|
|
||||||
|
// Numbers (integer or float). Adjust as needed for your domain.
|
||||||
|
NUMBER
|
||||||
|
: DIGIT+ ( '.' DIGIT+ )?
|
||||||
|
;
|
||||||
|
|
||||||
|
// Double/single-quoted text, capturing full text search strings, values, etc.
|
||||||
|
QUOTED_TEXT
|
||||||
|
: ( '"' ( ~["\\] | '\\' . )* '"' // double-quoted
|
||||||
|
| '\'' ( ~['\\] | '\\' . )* '\'' // single-quoted
|
||||||
|
)
|
||||||
|
;
|
||||||
|
|
||||||
|
// Keys can have letters, digits, underscores, dots, and bracket pairs
|
||||||
|
// e.g. service.name, service.namespace, db.queries[].query_duration
|
||||||
|
KEY
|
||||||
|
: [a-zA-Z0-9_] [a-zA-Z0-9_.[\]]*
|
||||||
|
;
|
||||||
|
|
||||||
|
// Ignore whitespace
|
||||||
|
WS
|
||||||
|
: [ \t\r\n]+ -> skip
|
||||||
|
;
|
||||||
|
|
||||||
|
// Digits used by NUMBER
|
||||||
|
fragment DIGIT
|
||||||
|
: [0-9]
|
||||||
|
;
|
||||||
|
|
||||||
|
FREETEXT : (~[ \t\r\n=()'"<>![\]])+ ;
|
||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -33,16 +34,16 @@ type Alertmanager interface {
|
|||||||
ListAllChannels(context.Context) ([]*alertmanagertypes.Channel, error)
|
ListAllChannels(context.Context) ([]*alertmanagertypes.Channel, error)
|
||||||
|
|
||||||
// GetChannelByID gets a channel for the organization.
|
// GetChannelByID gets a channel for the organization.
|
||||||
GetChannelByID(context.Context, string, int) (*alertmanagertypes.Channel, error)
|
GetChannelByID(context.Context, string, valuer.UUID) (*alertmanagertypes.Channel, error)
|
||||||
|
|
||||||
// UpdateChannel updates a channel for the organization.
|
// UpdateChannel updates a channel for the organization.
|
||||||
UpdateChannelByReceiverAndID(context.Context, string, alertmanagertypes.Receiver, int) error
|
UpdateChannelByReceiverAndID(context.Context, string, alertmanagertypes.Receiver, valuer.UUID) error
|
||||||
|
|
||||||
// CreateChannel creates a channel for the organization.
|
// CreateChannel creates a channel for the organization.
|
||||||
CreateChannel(context.Context, string, alertmanagertypes.Receiver) error
|
CreateChannel(context.Context, string, alertmanagertypes.Receiver) error
|
||||||
|
|
||||||
// DeleteChannelByID deletes a channel for the organization.
|
// DeleteChannelByID deletes a channel for the organization.
|
||||||
DeleteChannelByID(context.Context, string, int) error
|
DeleteChannelByID(context.Context, string, valuer.UUID) error
|
||||||
|
|
||||||
// SetConfig sets the config for the organization.
|
// SetConfig sets the config for the organization.
|
||||||
SetConfig(context.Context, *alertmanagertypes.Config) error
|
SetConfig(context.Context, *alertmanagertypes.Config) error
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/tidwall/gjson"
|
"github.com/tidwall/gjson"
|
||||||
"github.com/uptrace/bun"
|
"github.com/uptrace/bun"
|
||||||
)
|
)
|
||||||
@@ -99,7 +100,7 @@ func (store *config) CreateChannel(ctx context.Context, channel *alertmanagertyp
|
|||||||
}, opts...)
|
}, opts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *config) GetChannelByID(ctx context.Context, orgID string, id int) (*alertmanagertypes.Channel, error) {
|
func (store *config) GetChannelByID(ctx context.Context, orgID string, id valuer.UUID) (*alertmanagertypes.Channel, error) {
|
||||||
channel := new(alertmanagertypes.Channel)
|
channel := new(alertmanagertypes.Channel)
|
||||||
|
|
||||||
err := store.
|
err := store.
|
||||||
@@ -108,11 +109,11 @@ func (store *config) GetChannelByID(ctx context.Context, orgID string, id int) (
|
|||||||
NewSelect().
|
NewSelect().
|
||||||
Model(channel).
|
Model(channel).
|
||||||
Where("org_id = ?", orgID).
|
Where("org_id = ?", orgID).
|
||||||
Where("id = ?", id).
|
Where("id = ?", id.StringValue()).
|
||||||
Scan(ctx)
|
Scan(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
return nil, errors.Newf(errors.TypeNotFound, alertmanagertypes.ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %d", id)
|
return nil, errors.Newf(errors.TypeNotFound, alertmanagertypes.ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %s", id.StringValue())
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -136,7 +137,7 @@ func (store *config) UpdateChannel(ctx context.Context, orgID string, channel *a
|
|||||||
}, opts...)
|
}, opts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *config) DeleteChannelByID(ctx context.Context, orgID string, id int, opts ...alertmanagertypes.StoreOption) error {
|
func (store *config) DeleteChannelByID(ctx context.Context, orgID string, id valuer.UUID, opts ...alertmanagertypes.StoreOption) error {
|
||||||
return store.wrap(ctx, func(ctx context.Context) error {
|
return store.wrap(ctx, func(ctx context.Context) error {
|
||||||
channel := new(alertmanagertypes.Channel)
|
channel := new(alertmanagertypes.Channel)
|
||||||
|
|
||||||
@@ -146,7 +147,7 @@ func (store *config) DeleteChannelByID(ctx context.Context, orgID string, id int
|
|||||||
NewDelete().
|
NewDelete().
|
||||||
Model(channel).
|
Model(channel).
|
||||||
Where("org_id = ?", orgID).
|
Where("org_id = ?", orgID).
|
||||||
Where("id = ?", id).
|
Where("id = ?", id.StringValue()).
|
||||||
Exec(ctx); err != nil {
|
Exec(ctx); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,13 +4,13 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/http/render"
|
"github.com/SigNoz/signoz/pkg/http/render"
|
||||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -140,9 +140,9 @@ func (api *API) GetChannelByID(rw http.ResponseWriter, req *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
id, err := strconv.Atoi(idString)
|
id, err := valuer.NewUUID(idString)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid integer"))
|
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -177,9 +177,9 @@ func (api *API) UpdateChannelByID(rw http.ResponseWriter, req *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
id, err := strconv.Atoi(idString)
|
id, err := valuer.NewUUID(idString)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid integer"))
|
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -227,9 +227,9 @@ func (api *API) DeleteChannelByID(rw http.ResponseWriter, req *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
id, err := strconv.Atoi(idString)
|
id, err := valuer.NewUUID(idString)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid integer"))
|
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/tidwall/gjson"
|
"github.com/tidwall/gjson"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -269,11 +270,11 @@ func (provider *provider) ListAllChannels(ctx context.Context) ([]*alertmanagert
|
|||||||
return channels, nil
|
return channels, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID int) (*alertmanagertypes.Channel, error) {
|
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) (*alertmanagertypes.Channel, error) {
|
||||||
return provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
return provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id int) error {
|
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id valuer.UUID) error {
|
||||||
channel, err := provider.configStore.GetChannelByID(ctx, orgID, id)
|
channel, err := provider.configStore.GetChannelByID(ctx, orgID, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -378,7 +379,7 @@ func (provider *provider) CreateChannel(ctx context.Context, orgID string, recei
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID int) error {
|
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) error {
|
||||||
channel, err := provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
channel, err := provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
)
|
)
|
||||||
|
|
||||||
type provider struct {
|
type provider struct {
|
||||||
@@ -99,11 +100,11 @@ func (provider *provider) ListAllChannels(ctx context.Context) ([]*alertmanagert
|
|||||||
return nil, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "not supported by provider signoz")
|
return nil, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "not supported by provider signoz")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID int) (*alertmanagertypes.Channel, error) {
|
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) (*alertmanagertypes.Channel, error) {
|
||||||
return provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
return provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id int) error {
|
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id valuer.UUID) error {
|
||||||
channel, err := provider.configStore.GetChannelByID(ctx, orgID, id)
|
channel, err := provider.configStore.GetChannelByID(ctx, orgID, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -127,7 +128,7 @@ func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgI
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID int) error {
|
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) error {
|
||||||
channel, err := provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
channel, err := provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
149
pkg/modules/preference/api.go
Normal file
149
pkg/modules/preference/api.go
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
package preference
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/http/render"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||||
|
"github.com/gorilla/mux"
|
||||||
|
)
|
||||||
|
|
||||||
|
type API interface {
|
||||||
|
GetOrgPreference(http.ResponseWriter, *http.Request)
|
||||||
|
UpdateOrgPreference(http.ResponseWriter, *http.Request)
|
||||||
|
GetAllOrgPreferences(http.ResponseWriter, *http.Request)
|
||||||
|
|
||||||
|
GetUserPreference(http.ResponseWriter, *http.Request)
|
||||||
|
UpdateUserPreference(http.ResponseWriter, *http.Request)
|
||||||
|
GetAllUserPreferences(http.ResponseWriter, *http.Request)
|
||||||
|
}
|
||||||
|
|
||||||
|
type preferenceAPI struct {
|
||||||
|
usecase Usecase
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAPI(usecase Usecase) API {
|
||||||
|
return &preferenceAPI{usecase: usecase}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *preferenceAPI) GetOrgPreference(rw http.ResponseWriter, r *http.Request) {
|
||||||
|
preferenceId := mux.Vars(r)["preferenceId"]
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
preference, err := p.usecase.GetOrgPreference(
|
||||||
|
r.Context(), preferenceId, claims.OrgID,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
render.Error(rw, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
render.Success(rw, http.StatusOK, preference)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *preferenceAPI) UpdateOrgPreference(rw http.ResponseWriter, r *http.Request) {
|
||||||
|
preferenceId := mux.Vars(r)["preferenceId"]
|
||||||
|
req := preferencetypes.UpdatablePreference{}
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := json.NewDecoder(r.Body).Decode(&req)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
render.Error(rw, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
err = p.usecase.UpdateOrgPreference(r.Context(), preferenceId, req.PreferenceValue, claims.OrgID)
|
||||||
|
if err != nil {
|
||||||
|
render.Error(rw, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
render.Success(rw, http.StatusNoContent, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *preferenceAPI) GetAllOrgPreferences(rw http.ResponseWriter, r *http.Request) {
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
preferences, err := p.usecase.GetAllOrgPreferences(
|
||||||
|
r.Context(), claims.OrgID,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
render.Error(rw, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
render.Success(rw, http.StatusOK, preferences)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *preferenceAPI) GetUserPreference(rw http.ResponseWriter, r *http.Request) {
|
||||||
|
preferenceId := mux.Vars(r)["preferenceId"]
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
preference, err := p.usecase.GetUserPreference(
|
||||||
|
r.Context(), preferenceId, claims.OrgID, claims.UserID,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
render.Error(rw, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
render.Success(rw, http.StatusOK, preference)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *preferenceAPI) UpdateUserPreference(rw http.ResponseWriter, r *http.Request) {
|
||||||
|
preferenceId := mux.Vars(r)["preferenceId"]
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
req := preferencetypes.UpdatablePreference{}
|
||||||
|
|
||||||
|
err := json.NewDecoder(r.Body).Decode(&req)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
render.Error(rw, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
err = p.usecase.UpdateUserPreference(r.Context(), preferenceId, req.PreferenceValue, claims.UserID)
|
||||||
|
if err != nil {
|
||||||
|
render.Error(rw, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
render.Success(rw, http.StatusNoContent, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *preferenceAPI) GetAllUserPreferences(rw http.ResponseWriter, r *http.Request) {
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
preferences, err := p.usecase.GetAllUserPreferences(
|
||||||
|
r.Context(), claims.OrgID, claims.UserID,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
render.Error(rw, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
render.Success(rw, http.StatusOK, preferences)
|
||||||
|
}
|
||||||
278
pkg/modules/preference/core/preference.go
Normal file
278
pkg/modules/preference/core/preference.go
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
|
)
|
||||||
|
|
||||||
|
type usecase struct {
|
||||||
|
store preferencetypes.PreferenceStore
|
||||||
|
defaultMap map[string]preferencetypes.Preference
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewPreference(store preferencetypes.PreferenceStore, defaultMap map[string]preferencetypes.Preference) preference.Usecase {
|
||||||
|
return &usecase{store: store, defaultMap: defaultMap}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (usecase *usecase) GetOrgPreference(ctx context.Context, preferenceID string, orgID string) (*preferencetypes.GettablePreference, error) {
|
||||||
|
preference, seen := usecase.defaultMap[preferenceID]
|
||||||
|
if !seen {
|
||||||
|
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
isPreferenceEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||||
|
if !isPreferenceEnabled {
|
||||||
|
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at org scope: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
orgPreference, err := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return &preferencetypes.GettablePreference{
|
||||||
|
PreferenceID: preferenceID,
|
||||||
|
PreferenceValue: preference.DefaultValue,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the org preference: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
return &preferencetypes.GettablePreference{
|
||||||
|
PreferenceID: preferenceID,
|
||||||
|
PreferenceValue: preference.SanitizeValue(orgPreference.PreferenceValue),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (usecase *usecase) UpdateOrgPreference(ctx context.Context, preferenceID string, preferenceValue interface{}, orgID string) error {
|
||||||
|
preference, seen := usecase.defaultMap[preferenceID]
|
||||||
|
if !seen {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
isPreferenceEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||||
|
if !isPreferenceEnabled {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at org scope: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
err := preference.IsValidValue(preferenceValue)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
storablePreferenceValue, encodeErr := json.Marshal(preferenceValue)
|
||||||
|
if encodeErr != nil {
|
||||||
|
return errors.Wrapf(encodeErr, errors.TypeInvalidInput, errors.CodeInvalidInput, "error in encoding the preference value")
|
||||||
|
}
|
||||||
|
|
||||||
|
orgPreference, dberr := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
|
||||||
|
if dberr != nil && dberr != sql.ErrNoRows {
|
||||||
|
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in getting the preference value")
|
||||||
|
}
|
||||||
|
|
||||||
|
if dberr != nil {
|
||||||
|
orgPreference.ID = valuer.GenerateUUID()
|
||||||
|
orgPreference.PreferenceID = preferenceID
|
||||||
|
orgPreference.PreferenceValue = string(storablePreferenceValue)
|
||||||
|
orgPreference.OrgID = orgID
|
||||||
|
} else {
|
||||||
|
orgPreference.PreferenceValue = string(storablePreferenceValue)
|
||||||
|
}
|
||||||
|
|
||||||
|
dberr = usecase.store.UpsertOrgPreference(ctx, orgPreference)
|
||||||
|
if dberr != nil {
|
||||||
|
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in setting the preference value")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (usecase *usecase) GetAllOrgPreferences(ctx context.Context, orgID string) ([]*preferencetypes.PreferenceWithValue, error) {
|
||||||
|
allOrgPreferences := []*preferencetypes.PreferenceWithValue{}
|
||||||
|
orgPreferences, err := usecase.store.GetAllOrgPreferences(ctx, orgID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all org preference values")
|
||||||
|
}
|
||||||
|
|
||||||
|
preferenceValueMap := map[string]interface{}{}
|
||||||
|
for _, preferenceValue := range orgPreferences {
|
||||||
|
preferenceValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, preference := range usecase.defaultMap {
|
||||||
|
isEnabledForOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||||
|
if isEnabledForOrgScope {
|
||||||
|
preferenceWithValue := &preferencetypes.PreferenceWithValue{}
|
||||||
|
preferenceWithValue.Key = preference.Key
|
||||||
|
preferenceWithValue.Name = preference.Name
|
||||||
|
preferenceWithValue.Description = preference.Description
|
||||||
|
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
||||||
|
preferenceWithValue.AllowedValues = preference.AllowedValues
|
||||||
|
preferenceWithValue.DefaultValue = preference.DefaultValue
|
||||||
|
preferenceWithValue.Range = preference.Range
|
||||||
|
preferenceWithValue.ValueType = preference.ValueType
|
||||||
|
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
||||||
|
value, seen := preferenceValueMap[preference.Key]
|
||||||
|
|
||||||
|
if seen {
|
||||||
|
preferenceWithValue.Value = value
|
||||||
|
} else {
|
||||||
|
preferenceWithValue.Value = preference.DefaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||||
|
allOrgPreferences = append(allOrgPreferences, preferenceWithValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return allOrgPreferences, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (usecase *usecase) GetUserPreference(ctx context.Context, preferenceID string, orgID string, userID string) (*preferencetypes.GettablePreference, error) {
|
||||||
|
preference, seen := usecase.defaultMap[preferenceID]
|
||||||
|
if !seen {
|
||||||
|
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
preferenceValue := preferencetypes.GettablePreference{
|
||||||
|
PreferenceID: preferenceID,
|
||||||
|
PreferenceValue: preference.DefaultValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||||
|
if !isPreferenceEnabledAtUserScope {
|
||||||
|
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at user scope: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
isPreferenceEnabledAtOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||||
|
if isPreferenceEnabledAtOrgScope {
|
||||||
|
orgPreference, err := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the org preference: %s", preferenceID))
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
preferenceValue.PreferenceValue = orgPreference.PreferenceValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
userPreference, err := usecase.store.GetUserPreference(ctx, userID, preferenceID)
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the user preference: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
preferenceValue.PreferenceValue = userPreference.PreferenceValue
|
||||||
|
}
|
||||||
|
|
||||||
|
return &preferencetypes.GettablePreference{
|
||||||
|
PreferenceID: preferenceValue.PreferenceID,
|
||||||
|
PreferenceValue: preference.SanitizeValue(preferenceValue.PreferenceValue),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (usecase *usecase) UpdateUserPreference(ctx context.Context, preferenceID string, preferenceValue interface{}, userID string) error {
|
||||||
|
preference, seen := usecase.defaultMap[preferenceID]
|
||||||
|
if !seen {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||||
|
if !isPreferenceEnabledAtUserScope {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at user scope: %s", preferenceID))
|
||||||
|
}
|
||||||
|
|
||||||
|
err := preference.IsValidValue(preferenceValue)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
storablePreferenceValue, encodeErr := json.Marshal(preferenceValue)
|
||||||
|
if encodeErr != nil {
|
||||||
|
return errors.Wrapf(encodeErr, errors.TypeInvalidInput, errors.CodeInvalidInput, "error in encoding the preference value")
|
||||||
|
}
|
||||||
|
|
||||||
|
userPreference, dberr := usecase.store.GetUserPreference(ctx, userID, preferenceID)
|
||||||
|
if dberr != nil && dberr != sql.ErrNoRows {
|
||||||
|
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in getting the preference value")
|
||||||
|
}
|
||||||
|
|
||||||
|
if dberr != nil {
|
||||||
|
userPreference.ID = valuer.GenerateUUID()
|
||||||
|
userPreference.PreferenceID = preferenceID
|
||||||
|
userPreference.PreferenceValue = string(storablePreferenceValue)
|
||||||
|
userPreference.UserID = userID
|
||||||
|
} else {
|
||||||
|
userPreference.PreferenceValue = string(storablePreferenceValue)
|
||||||
|
}
|
||||||
|
|
||||||
|
dberr = usecase.store.UpsertUserPreference(ctx, userPreference)
|
||||||
|
if dberr != nil {
|
||||||
|
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in setting the preference value")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (usecase *usecase) GetAllUserPreferences(ctx context.Context, orgID string, userID string) ([]*preferencetypes.PreferenceWithValue, error) {
|
||||||
|
allUserPreferences := []*preferencetypes.PreferenceWithValue{}
|
||||||
|
|
||||||
|
orgPreferences, err := usecase.store.GetAllOrgPreferences(ctx, orgID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all org preference values")
|
||||||
|
}
|
||||||
|
|
||||||
|
preferenceOrgValueMap := map[string]interface{}{}
|
||||||
|
for _, preferenceValue := range orgPreferences {
|
||||||
|
preferenceOrgValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
|
||||||
|
}
|
||||||
|
|
||||||
|
userPreferences, err := usecase.store.GetAllUserPreferences(ctx, userID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all user preference values")
|
||||||
|
}
|
||||||
|
|
||||||
|
preferenceUserValueMap := map[string]interface{}{}
|
||||||
|
for _, preferenceValue := range userPreferences {
|
||||||
|
preferenceUserValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, preference := range usecase.defaultMap {
|
||||||
|
isEnabledForUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||||
|
|
||||||
|
if isEnabledForUserScope {
|
||||||
|
preferenceWithValue := &preferencetypes.PreferenceWithValue{}
|
||||||
|
preferenceWithValue.Key = preference.Key
|
||||||
|
preferenceWithValue.Name = preference.Name
|
||||||
|
preferenceWithValue.Description = preference.Description
|
||||||
|
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
||||||
|
preferenceWithValue.AllowedValues = preference.AllowedValues
|
||||||
|
preferenceWithValue.DefaultValue = preference.DefaultValue
|
||||||
|
preferenceWithValue.Range = preference.Range
|
||||||
|
preferenceWithValue.ValueType = preference.ValueType
|
||||||
|
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
||||||
|
preferenceWithValue.Value = preference.DefaultValue
|
||||||
|
|
||||||
|
isEnabledForOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||||
|
if isEnabledForOrgScope {
|
||||||
|
value, seen := preferenceOrgValueMap[preference.Key]
|
||||||
|
if seen {
|
||||||
|
preferenceWithValue.Value = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
value, seen := preferenceUserValueMap[preference.Key]
|
||||||
|
|
||||||
|
if seen {
|
||||||
|
preferenceWithValue.Value = value
|
||||||
|
}
|
||||||
|
|
||||||
|
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||||
|
allUserPreferences = append(allUserPreferences, preferenceWithValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return allUserPreferences, nil
|
||||||
|
}
|
||||||
116
pkg/modules/preference/core/store.go
Normal file
116
pkg/modules/preference/core/store.go
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
type store struct {
|
||||||
|
store sqlstore.SQLStore
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewStore(db sqlstore.SQLStore) preferencetypes.PreferenceStore {
|
||||||
|
return &store{store: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (store *store) GetOrgPreference(ctx context.Context, orgID string, preferenceID string) (*preferencetypes.StorableOrgPreference, error) {
|
||||||
|
orgPreference := new(preferencetypes.StorableOrgPreference)
|
||||||
|
err := store.
|
||||||
|
store.
|
||||||
|
BunDB().
|
||||||
|
NewSelect().
|
||||||
|
Model(orgPreference).
|
||||||
|
Where("preference_id = ?", preferenceID).
|
||||||
|
Where("org_id = ?", orgID).
|
||||||
|
Scan(ctx)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return orgPreference, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return orgPreference, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (store *store) GetAllOrgPreferences(ctx context.Context, orgID string) ([]*preferencetypes.StorableOrgPreference, error) {
|
||||||
|
orgPreferences := make([]*preferencetypes.StorableOrgPreference, 0)
|
||||||
|
err := store.
|
||||||
|
store.
|
||||||
|
BunDB().
|
||||||
|
NewSelect().
|
||||||
|
Model(&orgPreferences).
|
||||||
|
Where("org_id = ?", orgID).
|
||||||
|
Scan(ctx)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return orgPreferences, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return orgPreferences, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (store *store) UpsertOrgPreference(ctx context.Context, orgPreference *preferencetypes.StorableOrgPreference) error {
|
||||||
|
_, err := store.
|
||||||
|
store.
|
||||||
|
BunDB().
|
||||||
|
NewInsert().
|
||||||
|
Model(orgPreference).
|
||||||
|
On("CONFLICT (id) DO UPDATE").
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (store *store) GetUserPreference(ctx context.Context, userID string, preferenceID string) (*preferencetypes.StorableUserPreference, error) {
|
||||||
|
userPreference := new(preferencetypes.StorableUserPreference)
|
||||||
|
err := store.
|
||||||
|
store.
|
||||||
|
BunDB().
|
||||||
|
NewSelect().
|
||||||
|
Model(userPreference).
|
||||||
|
Where("preference_id = ?", preferenceID).
|
||||||
|
Where("user_id = ?", userID).
|
||||||
|
Scan(ctx)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return userPreference, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return userPreference, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (store *store) GetAllUserPreferences(ctx context.Context, userID string) ([]*preferencetypes.StorableUserPreference, error) {
|
||||||
|
userPreferences := make([]*preferencetypes.StorableUserPreference, 0)
|
||||||
|
err := store.
|
||||||
|
store.
|
||||||
|
BunDB().
|
||||||
|
NewSelect().
|
||||||
|
Model(&userPreferences).
|
||||||
|
Where("user_id = ?", userID).
|
||||||
|
Scan(ctx)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return userPreferences, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return userPreferences, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (store *store) UpsertUserPreference(ctx context.Context, userPreference *preferencetypes.StorableUserPreference) error {
|
||||||
|
_, err := store.
|
||||||
|
store.
|
||||||
|
BunDB().
|
||||||
|
NewInsert().
|
||||||
|
Model(userPreference).
|
||||||
|
On("CONFLICT (id) DO UPDATE").
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
17
pkg/modules/preference/usecase.go
Normal file
17
pkg/modules/preference/usecase.go
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package preference
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Usecase interface {
|
||||||
|
GetOrgPreference(ctx context.Context, preferenceId string, orgId string) (*preferencetypes.GettablePreference, error)
|
||||||
|
UpdateOrgPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, orgId string) error
|
||||||
|
GetAllOrgPreferences(ctx context.Context, orgId string) ([]*preferencetypes.PreferenceWithValue, error)
|
||||||
|
|
||||||
|
GetUserPreference(ctx context.Context, preferenceId string, orgId string, userId string) (*preferencetypes.GettablePreference, error)
|
||||||
|
UpdateUserPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, userId string) error
|
||||||
|
GetAllUserPreferences(ctx context.Context, orgId string, userId string) ([]*preferencetypes.PreferenceWithValue, error)
|
||||||
|
}
|
||||||
96
pkg/parser/grammar/FilterQuery.interp
Normal file
96
pkg/parser/grammar/FilterQuery.interp
Normal file
File diff suppressed because one or more lines are too long
45
pkg/parser/grammar/FilterQuery.tokens
Normal file
45
pkg/parser/grammar/FilterQuery.tokens
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
LPAREN=1
|
||||||
|
RPAREN=2
|
||||||
|
LBRACK=3
|
||||||
|
RBRACK=4
|
||||||
|
COMMA=5
|
||||||
|
EQUALS=6
|
||||||
|
NOT_EQUALS=7
|
||||||
|
NEQ=8
|
||||||
|
LT=9
|
||||||
|
LE=10
|
||||||
|
GT=11
|
||||||
|
GE=12
|
||||||
|
LIKE=13
|
||||||
|
NOT_LIKE=14
|
||||||
|
ILIKE=15
|
||||||
|
NOT_ILIKE=16
|
||||||
|
BETWEEN=17
|
||||||
|
EXISTS=18
|
||||||
|
REGEXP=19
|
||||||
|
CONTAINS=20
|
||||||
|
IN=21
|
||||||
|
NOT=22
|
||||||
|
AND=23
|
||||||
|
OR=24
|
||||||
|
HAS=25
|
||||||
|
HASANY=26
|
||||||
|
HASALL=27
|
||||||
|
HASNONE=28
|
||||||
|
BOOL=29
|
||||||
|
NUMBER=30
|
||||||
|
QUOTED_TEXT=31
|
||||||
|
KEY=32
|
||||||
|
WS=33
|
||||||
|
FREETEXT=34
|
||||||
|
'('=1
|
||||||
|
')'=2
|
||||||
|
'['=3
|
||||||
|
']'=4
|
||||||
|
','=5
|
||||||
|
'!='=7
|
||||||
|
'<>'=8
|
||||||
|
'<'=9
|
||||||
|
'<='=10
|
||||||
|
'>'=11
|
||||||
|
'>='=12
|
||||||
120
pkg/parser/grammar/FilterQueryLexer.interp
Normal file
120
pkg/parser/grammar/FilterQueryLexer.interp
Normal file
File diff suppressed because one or more lines are too long
45
pkg/parser/grammar/FilterQueryLexer.tokens
Normal file
45
pkg/parser/grammar/FilterQueryLexer.tokens
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
LPAREN=1
|
||||||
|
RPAREN=2
|
||||||
|
LBRACK=3
|
||||||
|
RBRACK=4
|
||||||
|
COMMA=5
|
||||||
|
EQUALS=6
|
||||||
|
NOT_EQUALS=7
|
||||||
|
NEQ=8
|
||||||
|
LT=9
|
||||||
|
LE=10
|
||||||
|
GT=11
|
||||||
|
GE=12
|
||||||
|
LIKE=13
|
||||||
|
NOT_LIKE=14
|
||||||
|
ILIKE=15
|
||||||
|
NOT_ILIKE=16
|
||||||
|
BETWEEN=17
|
||||||
|
EXISTS=18
|
||||||
|
REGEXP=19
|
||||||
|
CONTAINS=20
|
||||||
|
IN=21
|
||||||
|
NOT=22
|
||||||
|
AND=23
|
||||||
|
OR=24
|
||||||
|
HAS=25
|
||||||
|
HASANY=26
|
||||||
|
HASALL=27
|
||||||
|
HASNONE=28
|
||||||
|
BOOL=29
|
||||||
|
NUMBER=30
|
||||||
|
QUOTED_TEXT=31
|
||||||
|
KEY=32
|
||||||
|
WS=33
|
||||||
|
FREETEXT=34
|
||||||
|
'('=1
|
||||||
|
')'=2
|
||||||
|
'['=3
|
||||||
|
']'=4
|
||||||
|
','=5
|
||||||
|
'!='=7
|
||||||
|
'<>'=8
|
||||||
|
'<'=9
|
||||||
|
'<='=10
|
||||||
|
'>'=11
|
||||||
|
'>='=12
|
||||||
124
pkg/parser/grammar/filterquery_base_listener.go
Normal file
124
pkg/parser/grammar/filterquery_base_listener.go
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
|
package parser // FilterQuery
|
||||||
|
|
||||||
|
import "github.com/antlr4-go/antlr/v4"
|
||||||
|
|
||||||
|
// BaseFilterQueryListener is a complete listener for a parse tree produced by FilterQueryParser.
|
||||||
|
type BaseFilterQueryListener struct{}
|
||||||
|
|
||||||
|
var _ FilterQueryListener = &BaseFilterQueryListener{}
|
||||||
|
|
||||||
|
// VisitTerminal is called when a terminal node is visited.
|
||||||
|
func (s *BaseFilterQueryListener) VisitTerminal(node antlr.TerminalNode) {}
|
||||||
|
|
||||||
|
// VisitErrorNode is called when an error node is visited.
|
||||||
|
func (s *BaseFilterQueryListener) VisitErrorNode(node antlr.ErrorNode) {}
|
||||||
|
|
||||||
|
// EnterEveryRule is called when any rule is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}
|
||||||
|
|
||||||
|
// ExitEveryRule is called when any rule is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}
|
||||||
|
|
||||||
|
// EnterQuery is called when production query is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterQuery(ctx *QueryContext) {}
|
||||||
|
|
||||||
|
// ExitQuery is called when production query is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitQuery(ctx *QueryContext) {}
|
||||||
|
|
||||||
|
// EnterExpression is called when production expression is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterExpression(ctx *ExpressionContext) {}
|
||||||
|
|
||||||
|
// ExitExpression is called when production expression is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitExpression(ctx *ExpressionContext) {}
|
||||||
|
|
||||||
|
// EnterOrExpression is called when production orExpression is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterOrExpression(ctx *OrExpressionContext) {}
|
||||||
|
|
||||||
|
// ExitOrExpression is called when production orExpression is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitOrExpression(ctx *OrExpressionContext) {}
|
||||||
|
|
||||||
|
// EnterAndExpression is called when production andExpression is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterAndExpression(ctx *AndExpressionContext) {}
|
||||||
|
|
||||||
|
// ExitAndExpression is called when production andExpression is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitAndExpression(ctx *AndExpressionContext) {}
|
||||||
|
|
||||||
|
// EnterUnaryExpression is called when production unaryExpression is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterUnaryExpression(ctx *UnaryExpressionContext) {}
|
||||||
|
|
||||||
|
// ExitUnaryExpression is called when production unaryExpression is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitUnaryExpression(ctx *UnaryExpressionContext) {}
|
||||||
|
|
||||||
|
// EnterPrimary is called when production primary is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterPrimary(ctx *PrimaryContext) {}
|
||||||
|
|
||||||
|
// ExitPrimary is called when production primary is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitPrimary(ctx *PrimaryContext) {}
|
||||||
|
|
||||||
|
// EnterComparison is called when production comparison is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterComparison(ctx *ComparisonContext) {}
|
||||||
|
|
||||||
|
// ExitComparison is called when production comparison is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitComparison(ctx *ComparisonContext) {}
|
||||||
|
|
||||||
|
// EnterInClause is called when production inClause is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterInClause(ctx *InClauseContext) {}
|
||||||
|
|
||||||
|
// ExitInClause is called when production inClause is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitInClause(ctx *InClauseContext) {}
|
||||||
|
|
||||||
|
// EnterNotInClause is called when production notInClause is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterNotInClause(ctx *NotInClauseContext) {}
|
||||||
|
|
||||||
|
// ExitNotInClause is called when production notInClause is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitNotInClause(ctx *NotInClauseContext) {}
|
||||||
|
|
||||||
|
// EnterValueList is called when production valueList is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterValueList(ctx *ValueListContext) {}
|
||||||
|
|
||||||
|
// ExitValueList is called when production valueList is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitValueList(ctx *ValueListContext) {}
|
||||||
|
|
||||||
|
// EnterFullText is called when production fullText is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterFullText(ctx *FullTextContext) {}
|
||||||
|
|
||||||
|
// ExitFullText is called when production fullText is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitFullText(ctx *FullTextContext) {}
|
||||||
|
|
||||||
|
// EnterFunctionCall is called when production functionCall is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterFunctionCall(ctx *FunctionCallContext) {}
|
||||||
|
|
||||||
|
// ExitFunctionCall is called when production functionCall is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitFunctionCall(ctx *FunctionCallContext) {}
|
||||||
|
|
||||||
|
// EnterFunctionParamList is called when production functionParamList is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterFunctionParamList(ctx *FunctionParamListContext) {}
|
||||||
|
|
||||||
|
// ExitFunctionParamList is called when production functionParamList is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitFunctionParamList(ctx *FunctionParamListContext) {}
|
||||||
|
|
||||||
|
// EnterFunctionParam is called when production functionParam is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterFunctionParam(ctx *FunctionParamContext) {}
|
||||||
|
|
||||||
|
// ExitFunctionParam is called when production functionParam is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitFunctionParam(ctx *FunctionParamContext) {}
|
||||||
|
|
||||||
|
// EnterArray is called when production array is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterArray(ctx *ArrayContext) {}
|
||||||
|
|
||||||
|
// ExitArray is called when production array is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitArray(ctx *ArrayContext) {}
|
||||||
|
|
||||||
|
// EnterValue is called when production value is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterValue(ctx *ValueContext) {}
|
||||||
|
|
||||||
|
// ExitValue is called when production value is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitValue(ctx *ValueContext) {}
|
||||||
|
|
||||||
|
// EnterKey is called when production key is entered.
|
||||||
|
func (s *BaseFilterQueryListener) EnterKey(ctx *KeyContext) {}
|
||||||
|
|
||||||
|
// ExitKey is called when production key is exited.
|
||||||
|
func (s *BaseFilterQueryListener) ExitKey(ctx *KeyContext) {}
|
||||||
77
pkg/parser/grammar/filterquery_base_visitor.go
Normal file
77
pkg/parser/grammar/filterquery_base_visitor.go
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
|
package parser // FilterQuery
|
||||||
|
|
||||||
|
import "github.com/antlr4-go/antlr/v4"
|
||||||
|
|
||||||
|
type BaseFilterQueryVisitor struct {
|
||||||
|
*antlr.BaseParseTreeVisitor
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitQuery(ctx *QueryContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitExpression(ctx *ExpressionContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitOrExpression(ctx *OrExpressionContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitAndExpression(ctx *AndExpressionContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitUnaryExpression(ctx *UnaryExpressionContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitPrimary(ctx *PrimaryContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitComparison(ctx *ComparisonContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitInClause(ctx *InClauseContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitNotInClause(ctx *NotInClauseContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitValueList(ctx *ValueListContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitFullText(ctx *FullTextContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitFunctionCall(ctx *FunctionCallContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitFunctionParamList(ctx *FunctionParamListContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitFunctionParam(ctx *FunctionParamContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitArray(ctx *ArrayContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitValue(ctx *ValueContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *BaseFilterQueryVisitor) VisitKey(ctx *KeyContext) interface{} {
|
||||||
|
return v.VisitChildren(ctx)
|
||||||
|
}
|
||||||
271
pkg/parser/grammar/filterquery_lexer.go
Normal file
271
pkg/parser/grammar/filterquery_lexer.go
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
|
package parser
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/antlr4-go/antlr/v4"
|
||||||
|
"sync"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Suppress unused import error
|
||||||
|
var _ = fmt.Printf
|
||||||
|
var _ = sync.Once{}
|
||||||
|
var _ = unicode.IsLetter
|
||||||
|
|
||||||
|
type FilterQueryLexer struct {
|
||||||
|
*antlr.BaseLexer
|
||||||
|
channelNames []string
|
||||||
|
modeNames []string
|
||||||
|
// TODO: EOF string
|
||||||
|
}
|
||||||
|
|
||||||
|
var FilterQueryLexerLexerStaticData struct {
|
||||||
|
once sync.Once
|
||||||
|
serializedATN []int32
|
||||||
|
ChannelNames []string
|
||||||
|
ModeNames []string
|
||||||
|
LiteralNames []string
|
||||||
|
SymbolicNames []string
|
||||||
|
RuleNames []string
|
||||||
|
PredictionContextCache *antlr.PredictionContextCache
|
||||||
|
atn *antlr.ATN
|
||||||
|
decisionToDFA []*antlr.DFA
|
||||||
|
}
|
||||||
|
|
||||||
|
func filterquerylexerLexerInit() {
|
||||||
|
staticData := &FilterQueryLexerLexerStaticData
|
||||||
|
staticData.ChannelNames = []string{
|
||||||
|
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
|
||||||
|
}
|
||||||
|
staticData.ModeNames = []string{
|
||||||
|
"DEFAULT_MODE",
|
||||||
|
}
|
||||||
|
staticData.LiteralNames = []string{
|
||||||
|
"", "'('", "')'", "'['", "']'", "','", "", "'!='", "'<>'", "'<'", "'<='",
|
||||||
|
"'>'", "'>='",
|
||||||
|
}
|
||||||
|
staticData.SymbolicNames = []string{
|
||||||
|
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||||
|
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
|
||||||
|
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
|
||||||
|
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
|
||||||
|
"KEY", "WS", "FREETEXT",
|
||||||
|
}
|
||||||
|
staticData.RuleNames = []string{
|
||||||
|
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||||
|
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
|
||||||
|
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
|
||||||
|
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
|
||||||
|
"KEY", "WS", "DIGIT", "FREETEXT",
|
||||||
|
}
|
||||||
|
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||||
|
staticData.serializedATN = []int32{
|
||||||
|
4, 0, 34, 280, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||||
|
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
|
||||||
|
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
|
||||||
|
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
|
||||||
|
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
|
||||||
|
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
|
||||||
|
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 1, 0, 1, 0, 1, 1,
|
||||||
|
1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 85, 8,
|
||||||
|
5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1,
|
||||||
|
10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13,
|
||||||
|
1, 13, 1, 13, 1, 13, 4, 13, 112, 8, 13, 11, 13, 12, 13, 113, 1, 13, 1,
|
||||||
|
13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15,
|
||||||
|
1, 15, 1, 15, 1, 15, 4, 15, 131, 8, 15, 11, 15, 12, 15, 132, 1, 15, 1,
|
||||||
|
15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
|
||||||
|
1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 155, 8,
|
||||||
|
17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19,
|
||||||
|
1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 172, 8, 19, 1, 20, 1, 20, 1,
|
||||||
|
20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23,
|
||||||
|
1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1,
|
||||||
|
25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27,
|
||||||
|
1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1,
|
||||||
|
28, 1, 28, 1, 28, 1, 28, 1, 28, 3, 28, 223, 8, 28, 1, 29, 4, 29, 226, 8,
|
||||||
|
29, 11, 29, 12, 29, 227, 1, 29, 1, 29, 4, 29, 232, 8, 29, 11, 29, 12, 29,
|
||||||
|
233, 3, 29, 236, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 242, 8, 30,
|
||||||
|
10, 30, 12, 30, 245, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 252,
|
||||||
|
8, 30, 10, 30, 12, 30, 255, 9, 30, 1, 30, 3, 30, 258, 8, 30, 1, 31, 1,
|
||||||
|
31, 5, 31, 262, 8, 31, 10, 31, 12, 31, 265, 9, 31, 1, 32, 4, 32, 268, 8,
|
||||||
|
32, 11, 32, 12, 32, 269, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 4, 34, 277,
|
||||||
|
8, 34, 11, 34, 12, 34, 278, 0, 0, 35, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11,
|
||||||
|
6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15,
|
||||||
|
31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24,
|
||||||
|
49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33,
|
||||||
|
67, 0, 69, 34, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105,
|
||||||
|
2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110,
|
||||||
|
2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2,
|
||||||
|
0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0,
|
||||||
|
83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0,
|
||||||
|
80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68,
|
||||||
|
68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85,
|
||||||
|
85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39,
|
||||||
|
92, 92, 4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 6, 0, 46, 46, 48, 57, 65,
|
||||||
|
91, 93, 93, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57,
|
||||||
|
7, 0, 9, 10, 13, 13, 32, 34, 39, 41, 60, 62, 91, 91, 93, 93, 295, 0, 1,
|
||||||
|
1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9,
|
||||||
|
1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0,
|
||||||
|
17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0,
|
||||||
|
0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0,
|
||||||
|
0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0,
|
||||||
|
0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1,
|
||||||
|
0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55,
|
||||||
|
1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0,
|
||||||
|
63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 1, 71, 1, 0, 0, 0,
|
||||||
|
3, 73, 1, 0, 0, 0, 5, 75, 1, 0, 0, 0, 7, 77, 1, 0, 0, 0, 9, 79, 1, 0, 0,
|
||||||
|
0, 11, 84, 1, 0, 0, 0, 13, 86, 1, 0, 0, 0, 15, 89, 1, 0, 0, 0, 17, 92,
|
||||||
|
1, 0, 0, 0, 19, 94, 1, 0, 0, 0, 21, 97, 1, 0, 0, 0, 23, 99, 1, 0, 0, 0,
|
||||||
|
25, 102, 1, 0, 0, 0, 27, 107, 1, 0, 0, 0, 29, 120, 1, 0, 0, 0, 31, 126,
|
||||||
|
1, 0, 0, 0, 33, 140, 1, 0, 0, 0, 35, 148, 1, 0, 0, 0, 37, 156, 1, 0, 0,
|
||||||
|
0, 39, 163, 1, 0, 0, 0, 41, 173, 1, 0, 0, 0, 43, 176, 1, 0, 0, 0, 45, 180,
|
||||||
|
1, 0, 0, 0, 47, 184, 1, 0, 0, 0, 49, 187, 1, 0, 0, 0, 51, 191, 1, 0, 0,
|
||||||
|
0, 53, 198, 1, 0, 0, 0, 55, 205, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 225,
|
||||||
|
1, 0, 0, 0, 61, 257, 1, 0, 0, 0, 63, 259, 1, 0, 0, 0, 65, 267, 1, 0, 0,
|
||||||
|
0, 67, 273, 1, 0, 0, 0, 69, 276, 1, 0, 0, 0, 71, 72, 5, 40, 0, 0, 72, 2,
|
||||||
|
1, 0, 0, 0, 73, 74, 5, 41, 0, 0, 74, 4, 1, 0, 0, 0, 75, 76, 5, 91, 0, 0,
|
||||||
|
76, 6, 1, 0, 0, 0, 77, 78, 5, 93, 0, 0, 78, 8, 1, 0, 0, 0, 79, 80, 5, 44,
|
||||||
|
0, 0, 80, 10, 1, 0, 0, 0, 81, 85, 5, 61, 0, 0, 82, 83, 5, 61, 0, 0, 83,
|
||||||
|
85, 5, 61, 0, 0, 84, 81, 1, 0, 0, 0, 84, 82, 1, 0, 0, 0, 85, 12, 1, 0,
|
||||||
|
0, 0, 86, 87, 5, 33, 0, 0, 87, 88, 5, 61, 0, 0, 88, 14, 1, 0, 0, 0, 89,
|
||||||
|
90, 5, 60, 0, 0, 90, 91, 5, 62, 0, 0, 91, 16, 1, 0, 0, 0, 92, 93, 5, 60,
|
||||||
|
0, 0, 93, 18, 1, 0, 0, 0, 94, 95, 5, 60, 0, 0, 95, 96, 5, 61, 0, 0, 96,
|
||||||
|
20, 1, 0, 0, 0, 97, 98, 5, 62, 0, 0, 98, 22, 1, 0, 0, 0, 99, 100, 5, 62,
|
||||||
|
0, 0, 100, 101, 5, 61, 0, 0, 101, 24, 1, 0, 0, 0, 102, 103, 7, 0, 0, 0,
|
||||||
|
103, 104, 7, 1, 0, 0, 104, 105, 7, 2, 0, 0, 105, 106, 7, 3, 0, 0, 106,
|
||||||
|
26, 1, 0, 0, 0, 107, 108, 7, 4, 0, 0, 108, 109, 7, 5, 0, 0, 109, 111, 7,
|
||||||
|
6, 0, 0, 110, 112, 7, 7, 0, 0, 111, 110, 1, 0, 0, 0, 112, 113, 1, 0, 0,
|
||||||
|
0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115,
|
||||||
|
116, 7, 0, 0, 0, 116, 117, 7, 1, 0, 0, 117, 118, 7, 2, 0, 0, 118, 119,
|
||||||
|
7, 3, 0, 0, 119, 28, 1, 0, 0, 0, 120, 121, 7, 1, 0, 0, 121, 122, 7, 0,
|
||||||
|
0, 0, 122, 123, 7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0,
|
||||||
|
125, 30, 1, 0, 0, 0, 126, 127, 7, 4, 0, 0, 127, 128, 7, 5, 0, 0, 128, 130,
|
||||||
|
7, 6, 0, 0, 129, 131, 7, 7, 0, 0, 130, 129, 1, 0, 0, 0, 131, 132, 1, 0,
|
||||||
|
0, 0, 132, 130, 1, 0, 0, 0, 132, 133, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0,
|
||||||
|
134, 135, 7, 1, 0, 0, 135, 136, 7, 0, 0, 0, 136, 137, 7, 1, 0, 0, 137,
|
||||||
|
138, 7, 2, 0, 0, 138, 139, 7, 3, 0, 0, 139, 32, 1, 0, 0, 0, 140, 141, 7,
|
||||||
|
8, 0, 0, 141, 142, 7, 3, 0, 0, 142, 143, 7, 6, 0, 0, 143, 144, 7, 9, 0,
|
||||||
|
0, 144, 145, 7, 3, 0, 0, 145, 146, 7, 3, 0, 0, 146, 147, 7, 4, 0, 0, 147,
|
||||||
|
34, 1, 0, 0, 0, 148, 149, 7, 3, 0, 0, 149, 150, 7, 10, 0, 0, 150, 151,
|
||||||
|
7, 1, 0, 0, 151, 152, 7, 11, 0, 0, 152, 154, 7, 6, 0, 0, 153, 155, 7, 11,
|
||||||
|
0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 36, 1, 0, 0, 0,
|
||||||
|
156, 157, 7, 12, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159, 7, 13, 0, 0, 159,
|
||||||
|
160, 7, 3, 0, 0, 160, 161, 7, 10, 0, 0, 161, 162, 7, 14, 0, 0, 162, 38,
|
||||||
|
1, 0, 0, 0, 163, 164, 7, 15, 0, 0, 164, 165, 7, 5, 0, 0, 165, 166, 7, 4,
|
||||||
|
0, 0, 166, 167, 7, 6, 0, 0, 167, 168, 7, 16, 0, 0, 168, 169, 7, 1, 0, 0,
|
||||||
|
169, 171, 7, 4, 0, 0, 170, 172, 7, 11, 0, 0, 171, 170, 1, 0, 0, 0, 171,
|
||||||
|
172, 1, 0, 0, 0, 172, 40, 1, 0, 0, 0, 173, 174, 7, 1, 0, 0, 174, 175, 7,
|
||||||
|
4, 0, 0, 175, 42, 1, 0, 0, 0, 176, 177, 7, 4, 0, 0, 177, 178, 7, 5, 0,
|
||||||
|
0, 178, 179, 7, 6, 0, 0, 179, 44, 1, 0, 0, 0, 180, 181, 7, 16, 0, 0, 181,
|
||||||
|
182, 7, 4, 0, 0, 182, 183, 7, 17, 0, 0, 183, 46, 1, 0, 0, 0, 184, 185,
|
||||||
|
7, 5, 0, 0, 185, 186, 7, 12, 0, 0, 186, 48, 1, 0, 0, 0, 187, 188, 7, 18,
|
||||||
|
0, 0, 188, 189, 7, 16, 0, 0, 189, 190, 7, 11, 0, 0, 190, 50, 1, 0, 0, 0,
|
||||||
|
191, 192, 7, 18, 0, 0, 192, 193, 7, 16, 0, 0, 193, 194, 7, 11, 0, 0, 194,
|
||||||
|
195, 7, 16, 0, 0, 195, 196, 7, 4, 0, 0, 196, 197, 7, 19, 0, 0, 197, 52,
|
||||||
|
1, 0, 0, 0, 198, 199, 7, 18, 0, 0, 199, 200, 7, 16, 0, 0, 200, 201, 7,
|
||||||
|
11, 0, 0, 201, 202, 7, 16, 0, 0, 202, 203, 7, 0, 0, 0, 203, 204, 7, 0,
|
||||||
|
0, 0, 204, 54, 1, 0, 0, 0, 205, 206, 7, 18, 0, 0, 206, 207, 7, 16, 0, 0,
|
||||||
|
207, 208, 7, 11, 0, 0, 208, 209, 7, 4, 0, 0, 209, 210, 7, 5, 0, 0, 210,
|
||||||
|
211, 7, 4, 0, 0, 211, 212, 7, 3, 0, 0, 212, 56, 1, 0, 0, 0, 213, 214, 7,
|
||||||
|
6, 0, 0, 214, 215, 7, 12, 0, 0, 215, 216, 7, 20, 0, 0, 216, 223, 7, 3,
|
||||||
|
0, 0, 217, 218, 7, 21, 0, 0, 218, 219, 7, 16, 0, 0, 219, 220, 7, 0, 0,
|
||||||
|
0, 220, 221, 7, 11, 0, 0, 221, 223, 7, 3, 0, 0, 222, 213, 1, 0, 0, 0, 222,
|
||||||
|
217, 1, 0, 0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 67, 33, 0, 225, 224,
|
||||||
|
1, 0, 0, 0, 226, 227, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 227, 228, 1, 0,
|
||||||
|
0, 0, 228, 235, 1, 0, 0, 0, 229, 231, 5, 46, 0, 0, 230, 232, 3, 67, 33,
|
||||||
|
0, 231, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 233,
|
||||||
|
234, 1, 0, 0, 0, 234, 236, 1, 0, 0, 0, 235, 229, 1, 0, 0, 0, 235, 236,
|
||||||
|
1, 0, 0, 0, 236, 60, 1, 0, 0, 0, 237, 243, 5, 34, 0, 0, 238, 242, 8, 22,
|
||||||
|
0, 0, 239, 240, 5, 92, 0, 0, 240, 242, 9, 0, 0, 0, 241, 238, 1, 0, 0, 0,
|
||||||
|
241, 239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243,
|
||||||
|
244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 258,
|
||||||
|
5, 34, 0, 0, 247, 253, 5, 39, 0, 0, 248, 252, 8, 23, 0, 0, 249, 250, 5,
|
||||||
|
92, 0, 0, 250, 252, 9, 0, 0, 0, 251, 248, 1, 0, 0, 0, 251, 249, 1, 0, 0,
|
||||||
|
0, 252, 255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254,
|
||||||
|
256, 1, 0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 258, 5, 39, 0, 0, 257, 237,
|
||||||
|
1, 0, 0, 0, 257, 247, 1, 0, 0, 0, 258, 62, 1, 0, 0, 0, 259, 263, 7, 24,
|
||||||
|
0, 0, 260, 262, 7, 25, 0, 0, 261, 260, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0,
|
||||||
|
263, 261, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 64, 1, 0, 0, 0, 265, 263,
|
||||||
|
1, 0, 0, 0, 266, 268, 7, 26, 0, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0,
|
||||||
|
0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0,
|
||||||
|
271, 272, 6, 32, 0, 0, 272, 66, 1, 0, 0, 0, 273, 274, 7, 27, 0, 0, 274,
|
||||||
|
68, 1, 0, 0, 0, 275, 277, 8, 28, 0, 0, 276, 275, 1, 0, 0, 0, 277, 278,
|
||||||
|
1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 70, 1, 0,
|
||||||
|
0, 0, 18, 0, 84, 113, 132, 154, 171, 222, 227, 233, 235, 241, 243, 251,
|
||||||
|
253, 257, 263, 269, 278, 1, 6, 0, 0,
|
||||||
|
}
|
||||||
|
deserializer := antlr.NewATNDeserializer(nil)
|
||||||
|
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||||
|
atn := staticData.atn
|
||||||
|
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
|
||||||
|
decisionToDFA := staticData.decisionToDFA
|
||||||
|
for index, state := range atn.DecisionToState {
|
||||||
|
decisionToDFA[index] = antlr.NewDFA(state, index)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilterQueryLexerInit initializes any static state used to implement FilterQueryLexer. By default the
|
||||||
|
// static state used to implement the lexer is lazily initialized during the first call to
|
||||||
|
// NewFilterQueryLexer(). You can call this function if you wish to initialize the static state ahead
|
||||||
|
// of time.
|
||||||
|
func FilterQueryLexerInit() {
|
||||||
|
staticData := &FilterQueryLexerLexerStaticData
|
||||||
|
staticData.once.Do(filterquerylexerLexerInit)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewFilterQueryLexer produces a new lexer instance for the optional input antlr.CharStream.
|
||||||
|
func NewFilterQueryLexer(input antlr.CharStream) *FilterQueryLexer {
|
||||||
|
FilterQueryLexerInit()
|
||||||
|
l := new(FilterQueryLexer)
|
||||||
|
l.BaseLexer = antlr.NewBaseLexer(input)
|
||||||
|
staticData := &FilterQueryLexerLexerStaticData
|
||||||
|
l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache)
|
||||||
|
l.channelNames = staticData.ChannelNames
|
||||||
|
l.modeNames = staticData.ModeNames
|
||||||
|
l.RuleNames = staticData.RuleNames
|
||||||
|
l.LiteralNames = staticData.LiteralNames
|
||||||
|
l.SymbolicNames = staticData.SymbolicNames
|
||||||
|
l.GrammarFileName = "FilterQuery.g4"
|
||||||
|
// TODO: l.EOF = antlr.TokenEOF
|
||||||
|
|
||||||
|
return l
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilterQueryLexer tokens.
|
||||||
|
const (
|
||||||
|
FilterQueryLexerLPAREN = 1
|
||||||
|
FilterQueryLexerRPAREN = 2
|
||||||
|
FilterQueryLexerLBRACK = 3
|
||||||
|
FilterQueryLexerRBRACK = 4
|
||||||
|
FilterQueryLexerCOMMA = 5
|
||||||
|
FilterQueryLexerEQUALS = 6
|
||||||
|
FilterQueryLexerNOT_EQUALS = 7
|
||||||
|
FilterQueryLexerNEQ = 8
|
||||||
|
FilterQueryLexerLT = 9
|
||||||
|
FilterQueryLexerLE = 10
|
||||||
|
FilterQueryLexerGT = 11
|
||||||
|
FilterQueryLexerGE = 12
|
||||||
|
FilterQueryLexerLIKE = 13
|
||||||
|
FilterQueryLexerNOT_LIKE = 14
|
||||||
|
FilterQueryLexerILIKE = 15
|
||||||
|
FilterQueryLexerNOT_ILIKE = 16
|
||||||
|
FilterQueryLexerBETWEEN = 17
|
||||||
|
FilterQueryLexerEXISTS = 18
|
||||||
|
FilterQueryLexerREGEXP = 19
|
||||||
|
FilterQueryLexerCONTAINS = 20
|
||||||
|
FilterQueryLexerIN = 21
|
||||||
|
FilterQueryLexerNOT = 22
|
||||||
|
FilterQueryLexerAND = 23
|
||||||
|
FilterQueryLexerOR = 24
|
||||||
|
FilterQueryLexerHAS = 25
|
||||||
|
FilterQueryLexerHASANY = 26
|
||||||
|
FilterQueryLexerHASALL = 27
|
||||||
|
FilterQueryLexerHASNONE = 28
|
||||||
|
FilterQueryLexerBOOL = 29
|
||||||
|
FilterQueryLexerNUMBER = 30
|
||||||
|
FilterQueryLexerQUOTED_TEXT = 31
|
||||||
|
FilterQueryLexerKEY = 32
|
||||||
|
FilterQueryLexerWS = 33
|
||||||
|
FilterQueryLexerFREETEXT = 34
|
||||||
|
)
|
||||||
112
pkg/parser/grammar/filterquery_listener.go
Normal file
112
pkg/parser/grammar/filterquery_listener.go
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
|
package parser // FilterQuery
|
||||||
|
|
||||||
|
import "github.com/antlr4-go/antlr/v4"
|
||||||
|
|
||||||
|
// FilterQueryListener is a complete listener for a parse tree produced by FilterQueryParser.
|
||||||
|
type FilterQueryListener interface {
|
||||||
|
antlr.ParseTreeListener
|
||||||
|
|
||||||
|
// EnterQuery is called when entering the query production.
|
||||||
|
EnterQuery(c *QueryContext)
|
||||||
|
|
||||||
|
// EnterExpression is called when entering the expression production.
|
||||||
|
EnterExpression(c *ExpressionContext)
|
||||||
|
|
||||||
|
// EnterOrExpression is called when entering the orExpression production.
|
||||||
|
EnterOrExpression(c *OrExpressionContext)
|
||||||
|
|
||||||
|
// EnterAndExpression is called when entering the andExpression production.
|
||||||
|
EnterAndExpression(c *AndExpressionContext)
|
||||||
|
|
||||||
|
// EnterUnaryExpression is called when entering the unaryExpression production.
|
||||||
|
EnterUnaryExpression(c *UnaryExpressionContext)
|
||||||
|
|
||||||
|
// EnterPrimary is called when entering the primary production.
|
||||||
|
EnterPrimary(c *PrimaryContext)
|
||||||
|
|
||||||
|
// EnterComparison is called when entering the comparison production.
|
||||||
|
EnterComparison(c *ComparisonContext)
|
||||||
|
|
||||||
|
// EnterInClause is called when entering the inClause production.
|
||||||
|
EnterInClause(c *InClauseContext)
|
||||||
|
|
||||||
|
// EnterNotInClause is called when entering the notInClause production.
|
||||||
|
EnterNotInClause(c *NotInClauseContext)
|
||||||
|
|
||||||
|
// EnterValueList is called when entering the valueList production.
|
||||||
|
EnterValueList(c *ValueListContext)
|
||||||
|
|
||||||
|
// EnterFullText is called when entering the fullText production.
|
||||||
|
EnterFullText(c *FullTextContext)
|
||||||
|
|
||||||
|
// EnterFunctionCall is called when entering the functionCall production.
|
||||||
|
EnterFunctionCall(c *FunctionCallContext)
|
||||||
|
|
||||||
|
// EnterFunctionParamList is called when entering the functionParamList production.
|
||||||
|
EnterFunctionParamList(c *FunctionParamListContext)
|
||||||
|
|
||||||
|
// EnterFunctionParam is called when entering the functionParam production.
|
||||||
|
EnterFunctionParam(c *FunctionParamContext)
|
||||||
|
|
||||||
|
// EnterArray is called when entering the array production.
|
||||||
|
EnterArray(c *ArrayContext)
|
||||||
|
|
||||||
|
// EnterValue is called when entering the value production.
|
||||||
|
EnterValue(c *ValueContext)
|
||||||
|
|
||||||
|
// EnterKey is called when entering the key production.
|
||||||
|
EnterKey(c *KeyContext)
|
||||||
|
|
||||||
|
// ExitQuery is called when exiting the query production.
|
||||||
|
ExitQuery(c *QueryContext)
|
||||||
|
|
||||||
|
// ExitExpression is called when exiting the expression production.
|
||||||
|
ExitExpression(c *ExpressionContext)
|
||||||
|
|
||||||
|
// ExitOrExpression is called when exiting the orExpression production.
|
||||||
|
ExitOrExpression(c *OrExpressionContext)
|
||||||
|
|
||||||
|
// ExitAndExpression is called when exiting the andExpression production.
|
||||||
|
ExitAndExpression(c *AndExpressionContext)
|
||||||
|
|
||||||
|
// ExitUnaryExpression is called when exiting the unaryExpression production.
|
||||||
|
ExitUnaryExpression(c *UnaryExpressionContext)
|
||||||
|
|
||||||
|
// ExitPrimary is called when exiting the primary production.
|
||||||
|
ExitPrimary(c *PrimaryContext)
|
||||||
|
|
||||||
|
// ExitComparison is called when exiting the comparison production.
|
||||||
|
ExitComparison(c *ComparisonContext)
|
||||||
|
|
||||||
|
// ExitInClause is called when exiting the inClause production.
|
||||||
|
ExitInClause(c *InClauseContext)
|
||||||
|
|
||||||
|
// ExitNotInClause is called when exiting the notInClause production.
|
||||||
|
ExitNotInClause(c *NotInClauseContext)
|
||||||
|
|
||||||
|
// ExitValueList is called when exiting the valueList production.
|
||||||
|
ExitValueList(c *ValueListContext)
|
||||||
|
|
||||||
|
// ExitFullText is called when exiting the fullText production.
|
||||||
|
ExitFullText(c *FullTextContext)
|
||||||
|
|
||||||
|
// ExitFunctionCall is called when exiting the functionCall production.
|
||||||
|
ExitFunctionCall(c *FunctionCallContext)
|
||||||
|
|
||||||
|
// ExitFunctionParamList is called when exiting the functionParamList production.
|
||||||
|
ExitFunctionParamList(c *FunctionParamListContext)
|
||||||
|
|
||||||
|
// ExitFunctionParam is called when exiting the functionParam production.
|
||||||
|
ExitFunctionParam(c *FunctionParamContext)
|
||||||
|
|
||||||
|
// ExitArray is called when exiting the array production.
|
||||||
|
ExitArray(c *ArrayContext)
|
||||||
|
|
||||||
|
// ExitValue is called when exiting the value production.
|
||||||
|
ExitValue(c *ValueContext)
|
||||||
|
|
||||||
|
// ExitKey is called when exiting the key production.
|
||||||
|
ExitKey(c *KeyContext)
|
||||||
|
}
|
||||||
3539
pkg/parser/grammar/filterquery_parser.go
Normal file
3539
pkg/parser/grammar/filterquery_parser.go
Normal file
File diff suppressed because it is too large
Load Diff
61
pkg/parser/grammar/filterquery_visitor.go
Normal file
61
pkg/parser/grammar/filterquery_visitor.go
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
|
package parser // FilterQuery
|
||||||
|
|
||||||
|
import "github.com/antlr4-go/antlr/v4"
|
||||||
|
|
||||||
|
// A complete Visitor for a parse tree produced by FilterQueryParser.
|
||||||
|
type FilterQueryVisitor interface {
|
||||||
|
antlr.ParseTreeVisitor
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#query.
|
||||||
|
VisitQuery(ctx *QueryContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#expression.
|
||||||
|
VisitExpression(ctx *ExpressionContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#orExpression.
|
||||||
|
VisitOrExpression(ctx *OrExpressionContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#andExpression.
|
||||||
|
VisitAndExpression(ctx *AndExpressionContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#unaryExpression.
|
||||||
|
VisitUnaryExpression(ctx *UnaryExpressionContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#primary.
|
||||||
|
VisitPrimary(ctx *PrimaryContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#comparison.
|
||||||
|
VisitComparison(ctx *ComparisonContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#inClause.
|
||||||
|
VisitInClause(ctx *InClauseContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#notInClause.
|
||||||
|
VisitNotInClause(ctx *NotInClauseContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#valueList.
|
||||||
|
VisitValueList(ctx *ValueListContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#fullText.
|
||||||
|
VisitFullText(ctx *FullTextContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#functionCall.
|
||||||
|
VisitFunctionCall(ctx *FunctionCallContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#functionParamList.
|
||||||
|
VisitFunctionParamList(ctx *FunctionParamListContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#functionParam.
|
||||||
|
VisitFunctionParam(ctx *FunctionParamContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#array.
|
||||||
|
VisitArray(ctx *ArrayContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#value.
|
||||||
|
VisitValue(ctx *ValueContext) interface{}
|
||||||
|
|
||||||
|
// Visit a parse tree produced by FilterQueryParser#key.
|
||||||
|
VisitKey(ctx *KeyContext) interface{}
|
||||||
|
}
|
||||||
22
pkg/query-service/Dockerfile.multi-arch
Normal file
22
pkg/query-service/Dockerfile.multi-arch
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
|
||||||
|
|
||||||
|
FROM alpine@sha256:${ALPINE_SHA}
|
||||||
|
LABEL maintainer="signoz"
|
||||||
|
WORKDIR /root
|
||||||
|
|
||||||
|
ARG OS="linux"
|
||||||
|
ARG ARCH
|
||||||
|
|
||||||
|
RUN apk update && \
|
||||||
|
apk add ca-certificates && \
|
||||||
|
rm -rf /var/cache/apk/*
|
||||||
|
|
||||||
|
COPY ./target/${OS}-${ARCH}/signoz-community /root/signoz-community
|
||||||
|
COPY ./conf/prometheus.yml /root/config/prometheus.yml
|
||||||
|
COPY ./templates/email /root/templates
|
||||||
|
COPY frontend/build/ /etc/signoz/web/
|
||||||
|
|
||||||
|
RUN chmod 755 /root /root/signoz-community
|
||||||
|
|
||||||
|
ENTRYPOINT ["./signoz-community"]
|
||||||
|
CMD ["-config", "/root/config/prometheus.yml"]
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/types"
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
@@ -41,7 +40,7 @@ func InitDB(sqlStore sqlstore.SQLStore) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateDashboard creates a new dashboard
|
// CreateDashboard creates a new dashboard
|
||||||
func CreateDashboard(ctx context.Context, orgID string, email string, data map[string]interface{}, fm interfaces.FeatureLookup) (*types.Dashboard, *model.ApiError) {
|
func CreateDashboard(ctx context.Context, orgID string, email string, data map[string]interface{}) (*types.Dashboard, *model.ApiError) {
|
||||||
dash := &types.Dashboard{
|
dash := &types.Dashboard{
|
||||||
Data: data,
|
Data: data,
|
||||||
}
|
}
|
||||||
@@ -77,7 +76,7 @@ func GetDashboards(ctx context.Context, orgID string) ([]types.Dashboard, *model
|
|||||||
return dashboards, nil
|
return dashboards, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func DeleteDashboard(ctx context.Context, orgID, uuid string, fm interfaces.FeatureLookup) *model.ApiError {
|
func DeleteDashboard(ctx context.Context, orgID, uuid string) *model.ApiError {
|
||||||
|
|
||||||
dashboard, dErr := GetDashboard(ctx, orgID, uuid)
|
dashboard, dErr := GetDashboard(ctx, orgID, uuid)
|
||||||
if dErr != nil {
|
if dErr != nil {
|
||||||
@@ -116,7 +115,7 @@ func GetDashboard(ctx context.Context, orgID, uuid string) (*types.Dashboard, *m
|
|||||||
return &dashboard, nil
|
return &dashboard, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func UpdateDashboard(ctx context.Context, orgID, userEmail, uuid string, data map[string]interface{}, fm interfaces.FeatureLookup) (*types.Dashboard, *model.ApiError) {
|
func UpdateDashboard(ctx context.Context, orgID, userEmail, uuid string, data map[string]interface{}) (*types.Dashboard, *model.ApiError) {
|
||||||
|
|
||||||
mapData, err := json.Marshal(data)
|
mapData, err := json.Marshal(data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/http/render"
|
"github.com/SigNoz/signoz/pkg/http/render"
|
||||||
|
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
|
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
|
||||||
"github.com/SigNoz/signoz/pkg/signoz"
|
"github.com/SigNoz/signoz/pkg/signoz"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
@@ -44,7 +45,6 @@ import (
|
|||||||
logsv4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
logsv4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/metrics"
|
"github.com/SigNoz/signoz/pkg/query-service/app/metrics"
|
||||||
metricsv3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
metricsv3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
|
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
|
||||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||||
@@ -142,6 +142,8 @@ type APIHandler struct {
|
|||||||
AlertmanagerAPI *alertmanager.API
|
AlertmanagerAPI *alertmanager.API
|
||||||
|
|
||||||
Signoz *signoz.SigNoz
|
Signoz *signoz.SigNoz
|
||||||
|
|
||||||
|
Preference preference.API
|
||||||
}
|
}
|
||||||
|
|
||||||
type APIHandlerOpts struct {
|
type APIHandlerOpts struct {
|
||||||
@@ -187,6 +189,8 @@ type APIHandlerOpts struct {
|
|||||||
AlertmanagerAPI *alertmanager.API
|
AlertmanagerAPI *alertmanager.API
|
||||||
|
|
||||||
Signoz *signoz.SigNoz
|
Signoz *signoz.SigNoz
|
||||||
|
|
||||||
|
Preference preference.API
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewAPIHandler returns an APIHandler
|
// NewAPIHandler returns an APIHandler
|
||||||
@@ -196,7 +200,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
|||||||
Cache: opts.Cache,
|
Cache: opts.Cache,
|
||||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||||
FluxInterval: opts.FluxInterval,
|
FluxInterval: opts.FluxInterval,
|
||||||
FeatureLookup: opts.FeatureFlags,
|
|
||||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||||
}
|
}
|
||||||
@@ -206,7 +209,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
|||||||
Cache: opts.Cache,
|
Cache: opts.Cache,
|
||||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||||
FluxInterval: opts.FluxInterval,
|
FluxInterval: opts.FluxInterval,
|
||||||
FeatureLookup: opts.FeatureFlags,
|
|
||||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||||
}
|
}
|
||||||
@@ -257,6 +259,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
|||||||
SummaryService: summaryService,
|
SummaryService: summaryService,
|
||||||
AlertmanagerAPI: opts.AlertmanagerAPI,
|
AlertmanagerAPI: opts.AlertmanagerAPI,
|
||||||
Signoz: opts.Signoz,
|
Signoz: opts.Signoz,
|
||||||
|
Preference: opts.Preference,
|
||||||
}
|
}
|
||||||
|
|
||||||
logsQueryBuilder := logsv3.PrepareLogsQuery
|
logsQueryBuilder := logsv3.PrepareLogsQuery
|
||||||
@@ -274,7 +277,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
|||||||
BuildTraceQuery: tracesQueryBuilder,
|
BuildTraceQuery: tracesQueryBuilder,
|
||||||
BuildLogQuery: logsQueryBuilder,
|
BuildLogQuery: logsQueryBuilder,
|
||||||
}
|
}
|
||||||
aH.queryBuilder = queryBuilder.NewQueryBuilder(builderOpts, aH.featureFlags)
|
aH.queryBuilder = queryBuilder.NewQueryBuilder(builderOpts)
|
||||||
|
|
||||||
// check if at least one user is created
|
// check if at least one user is created
|
||||||
hasUsers, err := aH.appDao.GetUsersWithOpts(context.Background(), 1)
|
hasUsers, err := aH.appDao.GetUsersWithOpts(context.Background(), 1)
|
||||||
@@ -1143,7 +1146,7 @@ func (aH *APIHandler) deleteDashboard(w http.ResponseWriter, r *http.Request) {
|
|||||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
err := dashboards.DeleteDashboard(r.Context(), claims.OrgID, uuid, aH.featureFlags)
|
err := dashboards.DeleteDashboard(r.Context(), claims.OrgID, uuid)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
RespondError(w, err, nil)
|
RespondError(w, err, nil)
|
||||||
@@ -1235,7 +1238,7 @@ func (aH *APIHandler) updateDashboard(w http.ResponseWriter, r *http.Request) {
|
|||||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
dashboard, apiError := dashboards.UpdateDashboard(r.Context(), claims.OrgID, claims.Email, uuid, postData, aH.featureFlags)
|
dashboard, apiError := dashboards.UpdateDashboard(r.Context(), claims.OrgID, claims.Email, uuid, postData)
|
||||||
if apiError != nil {
|
if apiError != nil {
|
||||||
RespondError(w, apiError, nil)
|
RespondError(w, apiError, nil)
|
||||||
return
|
return
|
||||||
@@ -1308,7 +1311,7 @@ func (aH *APIHandler) createDashboards(w http.ResponseWriter, r *http.Request) {
|
|||||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
dash, apiErr := dashboards.CreateDashboard(r.Context(), claims.OrgID, claims.Email, postData, aH.featureFlags)
|
dash, apiErr := dashboards.CreateDashboard(r.Context(), claims.OrgID, claims.Email, postData)
|
||||||
|
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, nil)
|
RespondError(w, apiErr, nil)
|
||||||
@@ -1723,14 +1726,13 @@ func (aH *APIHandler) getServicesList(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) SearchTraces(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) SearchTraces(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
params, err := ParseSearchTracesParams(r)
|
params, err := ParseSearchTracesParams(r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := aH.reader.SearchTraces(r.Context(), params, nil)
|
result, err := aH.reader.SearchTraces(r.Context(), params)
|
||||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -1865,8 +1867,15 @@ func (aH *APIHandler) setTTL(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ctx := r.Context()
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
RespondError(w, &model.ApiError{Err: errors.New("failed to get org id from context"), Typ: model.ErrorInternal}, nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Context is not used here as TTL is long duration DB operation
|
// Context is not used here as TTL is long duration DB operation
|
||||||
result, apiErr := aH.reader.SetTTL(context.Background(), ttlParams)
|
result, apiErr := aH.reader.SetTTL(context.Background(), claims.OrgID, ttlParams)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
if apiErr.Typ == model.ErrorConflict {
|
if apiErr.Typ == model.ErrorConflict {
|
||||||
aH.HandleError(w, apiErr.Err, http.StatusConflict)
|
aH.HandleError(w, apiErr.Err, http.StatusConflict)
|
||||||
@@ -1886,7 +1895,14 @@ func (aH *APIHandler) getTTL(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, apiErr := aH.reader.GetTTL(r.Context(), ttlParams)
|
ctx := r.Context()
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
RespondError(w, &model.ApiError{Err: errors.New("failed to get org id from context"), Typ: model.ErrorInternal}, nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
result, apiErr := aH.reader.GetTTL(r.Context(), claims.OrgID, ttlParams)
|
||||||
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -3415,132 +3431,37 @@ func (aH *APIHandler) getProducerConsumerEval(
|
|||||||
func (aH *APIHandler) getUserPreference(
|
func (aH *APIHandler) getUserPreference(
|
||||||
w http.ResponseWriter, r *http.Request,
|
w http.ResponseWriter, r *http.Request,
|
||||||
) {
|
) {
|
||||||
preferenceId := mux.Vars(r)["preferenceId"]
|
aH.Preference.GetUserPreference(w, r)
|
||||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
|
||||||
if !ok {
|
|
||||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
preference, apiErr := preferences.GetUserPreference(
|
|
||||||
r.Context(), preferenceId, claims.OrgID, claims.UserID,
|
|
||||||
)
|
|
||||||
if apiErr != nil {
|
|
||||||
RespondError(w, apiErr, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, preference)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) updateUserPreference(
|
func (aH *APIHandler) updateUserPreference(
|
||||||
w http.ResponseWriter, r *http.Request,
|
w http.ResponseWriter, r *http.Request,
|
||||||
) {
|
) {
|
||||||
preferenceId := mux.Vars(r)["preferenceId"]
|
aH.Preference.UpdateUserPreference(w, r)
|
||||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
|
||||||
if !ok {
|
|
||||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
req := preferences.UpdatePreference{}
|
|
||||||
|
|
||||||
err := json.NewDecoder(r.Body).Decode(&req)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, model.BadRequest(err), nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
preference, apiErr := preferences.UpdateUserPreference(r.Context(), preferenceId, req.PreferenceValue, claims.UserID)
|
|
||||||
if apiErr != nil {
|
|
||||||
RespondError(w, apiErr, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, preference)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) getAllUserPreferences(
|
func (aH *APIHandler) getAllUserPreferences(
|
||||||
w http.ResponseWriter, r *http.Request,
|
w http.ResponseWriter, r *http.Request,
|
||||||
) {
|
) {
|
||||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
aH.Preference.GetAllUserPreferences(w, r)
|
||||||
if !ok {
|
|
||||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
preference, apiErr := preferences.GetAllUserPreferences(
|
|
||||||
r.Context(), claims.OrgID, claims.UserID,
|
|
||||||
)
|
|
||||||
if apiErr != nil {
|
|
||||||
RespondError(w, apiErr, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, preference)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) getOrgPreference(
|
func (aH *APIHandler) getOrgPreference(
|
||||||
w http.ResponseWriter, r *http.Request,
|
w http.ResponseWriter, r *http.Request,
|
||||||
) {
|
) {
|
||||||
preferenceId := mux.Vars(r)["preferenceId"]
|
aH.Preference.GetOrgPreference(w, r)
|
||||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
|
||||||
if !ok {
|
|
||||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
preference, apiErr := preferences.GetOrgPreference(
|
|
||||||
r.Context(), preferenceId, claims.OrgID,
|
|
||||||
)
|
|
||||||
if apiErr != nil {
|
|
||||||
RespondError(w, apiErr, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, preference)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) updateOrgPreference(
|
func (aH *APIHandler) updateOrgPreference(
|
||||||
w http.ResponseWriter, r *http.Request,
|
w http.ResponseWriter, r *http.Request,
|
||||||
) {
|
) {
|
||||||
preferenceId := mux.Vars(r)["preferenceId"]
|
aH.Preference.UpdateOrgPreference(w, r)
|
||||||
req := preferences.UpdatePreference{}
|
|
||||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
|
||||||
if !ok {
|
|
||||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
err := json.NewDecoder(r.Body).Decode(&req)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, model.BadRequest(err), nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
preference, apiErr := preferences.UpdateOrgPreference(r.Context(), preferenceId, req.PreferenceValue, claims.OrgID)
|
|
||||||
if apiErr != nil {
|
|
||||||
RespondError(w, apiErr, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, preference)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) getAllOrgPreferences(
|
func (aH *APIHandler) getAllOrgPreferences(
|
||||||
w http.ResponseWriter, r *http.Request,
|
w http.ResponseWriter, r *http.Request,
|
||||||
) {
|
) {
|
||||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
aH.Preference.GetAllOrgPreferences(w, r)
|
||||||
if !ok {
|
|
||||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
preference, apiErr := preferences.GetAllOrgPreferences(
|
|
||||||
r.Context(), claims.OrgID,
|
|
||||||
)
|
|
||||||
if apiErr != nil {
|
|
||||||
RespondError(w, apiErr, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, preference)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// RegisterIntegrationRoutes Registers all Integrations
|
// RegisterIntegrationRoutes Registers all Integrations
|
||||||
|
|||||||
@@ -246,7 +246,7 @@ func buildLogsTimeSeriesFilterQuery(fs *v3.FilterSet, groupBy []v3.AttributeKey,
|
|||||||
return queryString, nil
|
return queryString, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.BuilderQuery, graphLimitQtype string, preferRPM bool) (string, error) {
|
func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.BuilderQuery, graphLimitQtype string) (string, error) {
|
||||||
|
|
||||||
filterSubQuery, err := buildLogsTimeSeriesFilterQuery(mq.Filters, mq.GroupBy, mq.AggregateAttribute)
|
filterSubQuery, err := buildLogsTimeSeriesFilterQuery(mq.Filters, mq.GroupBy, mq.AggregateAttribute)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -315,9 +315,6 @@ func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.Build
|
|||||||
switch mq.AggregateOperator {
|
switch mq.AggregateOperator {
|
||||||
case v3.AggregateOperatorRate:
|
case v3.AggregateOperatorRate:
|
||||||
rate := float64(step)
|
rate := float64(step)
|
||||||
if preferRPM {
|
|
||||||
rate = rate / 60.0
|
|
||||||
}
|
|
||||||
|
|
||||||
op := fmt.Sprintf("count(%s)/%f", aggregationKey, rate)
|
op := fmt.Sprintf("count(%s)/%f", aggregationKey, rate)
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
||||||
@@ -328,9 +325,6 @@ func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.Build
|
|||||||
v3.AggregateOperatorRateAvg,
|
v3.AggregateOperatorRateAvg,
|
||||||
v3.AggregateOperatorRateMin:
|
v3.AggregateOperatorRateMin:
|
||||||
rate := float64(step)
|
rate := float64(step)
|
||||||
if preferRPM {
|
|
||||||
rate = rate / 60.0
|
|
||||||
}
|
|
||||||
|
|
||||||
op := fmt.Sprintf("%s(%s)/%f", AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey, rate)
|
op := fmt.Sprintf("%s(%s)/%f", AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey, rate)
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
||||||
@@ -513,7 +507,7 @@ func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.Pan
|
|||||||
return query, nil
|
return query, nil
|
||||||
} else if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
|
} else if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
|
||||||
// give me just the groupby names
|
// give me just the groupby names
|
||||||
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype, options.PreferRPM)
|
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
@@ -521,14 +515,14 @@ func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.Pan
|
|||||||
|
|
||||||
return query, nil
|
return query, nil
|
||||||
} else if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
} else if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
||||||
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype, options.PreferRPM)
|
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return query, nil
|
return query, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype, options.PreferRPM)
|
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -352,7 +352,6 @@ var testBuildLogsQueryData = []struct {
|
|||||||
AggregateOperator v3.AggregateOperator
|
AggregateOperator v3.AggregateOperator
|
||||||
ExpectedQuery string
|
ExpectedQuery string
|
||||||
Type int
|
Type int
|
||||||
PreferRPM bool
|
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
Name: "Test aggregate count on select field",
|
Name: "Test aggregate count on select field",
|
||||||
@@ -698,9 +697,8 @@ var testBuildLogsQueryData = []struct {
|
|||||||
OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}},
|
OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}},
|
||||||
},
|
},
|
||||||
TableName: "logs",
|
TableName: "logs",
|
||||||
PreferRPM: true,
|
|
||||||
ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`" +
|
ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`" +
|
||||||
", sum(`attribute_float64_bytes`)/1.000000 as value from signoz_logs.distributed_logs " +
|
", sum(`attribute_float64_bytes`)/60.000000 as value from signoz_logs.distributed_logs " +
|
||||||
"where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " +
|
"where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " +
|
||||||
"AND has(attributes_string_key, 'method') " +
|
"AND has(attributes_string_key, 'method') " +
|
||||||
"AND `attribute_float64_bytes_exists`=true " +
|
"AND `attribute_float64_bytes_exists`=true " +
|
||||||
@@ -722,7 +720,6 @@ var testBuildLogsQueryData = []struct {
|
|||||||
OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}},
|
OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}},
|
||||||
},
|
},
|
||||||
TableName: "logs",
|
TableName: "logs",
|
||||||
PreferRPM: false,
|
|
||||||
ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`" +
|
ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`" +
|
||||||
", count(attributes_float64_value[indexOf(attributes_float64_key, 'bytes')])/60.000000 as value " +
|
", count(attributes_float64_value[indexOf(attributes_float64_key, 'bytes')])/60.000000 as value " +
|
||||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " +
|
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " +
|
||||||
@@ -747,10 +744,9 @@ var testBuildLogsQueryData = []struct {
|
|||||||
OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}},
|
OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}},
|
||||||
},
|
},
|
||||||
TableName: "logs",
|
TableName: "logs",
|
||||||
PreferRPM: true,
|
|
||||||
ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, " +
|
ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, " +
|
||||||
"attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, " +
|
"attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, " +
|
||||||
"sum(attributes_float64_value[indexOf(attributes_float64_key, 'bytes')])/1.000000 as value " +
|
"sum(attributes_float64_value[indexOf(attributes_float64_key, 'bytes')])/60.000000 as value " +
|
||||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " +
|
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " +
|
||||||
"AND has(attributes_string_key, 'method') " +
|
"AND has(attributes_string_key, 'method') " +
|
||||||
"AND has(attributes_float64_key, 'bytes') " +
|
"AND has(attributes_float64_key, 'bytes') " +
|
||||||
@@ -1061,7 +1057,7 @@ var testBuildLogsQueryData = []struct {
|
|||||||
func TestBuildLogsQuery(t *testing.T) {
|
func TestBuildLogsQuery(t *testing.T) {
|
||||||
for _, tt := range testBuildLogsQueryData {
|
for _, tt := range testBuildLogsQueryData {
|
||||||
Convey("TestBuildLogsQuery", t, func() {
|
Convey("TestBuildLogsQuery", t, func() {
|
||||||
query, err := buildLogsQuery(tt.PanelType, tt.Start, tt.End, tt.BuilderQuery.StepInterval, tt.BuilderQuery, "", tt.PreferRPM)
|
query, err := buildLogsQuery(tt.PanelType, tt.Start, tt.End, tt.BuilderQuery.StepInterval, tt.BuilderQuery, "")
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
So(query, ShouldEqual, tt.ExpectedQuery)
|
So(query, ShouldEqual, tt.ExpectedQuery)
|
||||||
|
|
||||||
@@ -1238,7 +1234,7 @@ var testPrepLogsQueryData = []struct {
|
|||||||
},
|
},
|
||||||
TableName: "logs",
|
TableName: "logs",
|
||||||
ExpectedQuery: "SELECT `method` from (SELECT attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND has(attributes_string_key, 'method') AND has(attributes_string_key, 'name') group by `method` order by value DESC) LIMIT 10",
|
ExpectedQuery: "SELECT `method` from (SELECT attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND has(attributes_string_key, 'method') AND has(attributes_string_key, 'name') group by `method` order by value DESC) LIMIT 10",
|
||||||
Options: v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: true},
|
Options: v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "Test TS with limit- first - with order by value",
|
Name: "Test TS with limit- first - with order by value",
|
||||||
@@ -1261,7 +1257,7 @@ var testPrepLogsQueryData = []struct {
|
|||||||
},
|
},
|
||||||
TableName: "logs",
|
TableName: "logs",
|
||||||
ExpectedQuery: "SELECT `method` from (SELECT attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND has(attributes_string_key, 'method') AND has(attributes_string_key, 'name') group by `method` order by value ASC) LIMIT 10",
|
ExpectedQuery: "SELECT `method` from (SELECT attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND has(attributes_string_key, 'method') AND has(attributes_string_key, 'name') group by `method` order by value ASC) LIMIT 10",
|
||||||
Options: v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: true},
|
Options: v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "Test TS with limit- first - with order by attribute",
|
Name: "Test TS with limit- first - with order by attribute",
|
||||||
@@ -1284,7 +1280,7 @@ var testPrepLogsQueryData = []struct {
|
|||||||
},
|
},
|
||||||
TableName: "logs",
|
TableName: "logs",
|
||||||
ExpectedQuery: "SELECT `method` from (SELECT attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND has(attributes_string_key, 'method') AND has(attributes_string_key, 'name') group by `method` order by `method` ASC) LIMIT 10",
|
ExpectedQuery: "SELECT `method` from (SELECT attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND has(attributes_string_key, 'method') AND has(attributes_string_key, 'name') group by `method` order by `method` ASC) LIMIT 10",
|
||||||
Options: v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: true},
|
Options: v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "Test TS with limit- second",
|
Name: "Test TS with limit- second",
|
||||||
|
|||||||
@@ -285,7 +285,6 @@ func orderByAttributeKeyTags(panelType v3.PanelType, items []v3.OrderBy, tags []
|
|||||||
func generateAggregateClause(aggOp v3.AggregateOperator,
|
func generateAggregateClause(aggOp v3.AggregateOperator,
|
||||||
aggKey string,
|
aggKey string,
|
||||||
step int64,
|
step int64,
|
||||||
preferRPM bool,
|
|
||||||
timeFilter string,
|
timeFilter string,
|
||||||
whereClause string,
|
whereClause string,
|
||||||
groupBy string,
|
groupBy string,
|
||||||
@@ -299,9 +298,6 @@ func generateAggregateClause(aggOp v3.AggregateOperator,
|
|||||||
switch aggOp {
|
switch aggOp {
|
||||||
case v3.AggregateOperatorRate:
|
case v3.AggregateOperatorRate:
|
||||||
rate := float64(step)
|
rate := float64(step)
|
||||||
if preferRPM {
|
|
||||||
rate = rate / 60.0
|
|
||||||
}
|
|
||||||
|
|
||||||
op := fmt.Sprintf("count(%s)/%f", aggKey, rate)
|
op := fmt.Sprintf("count(%s)/%f", aggKey, rate)
|
||||||
query := fmt.Sprintf(queryTmpl, op, whereClause, groupBy, having, orderBy)
|
query := fmt.Sprintf(queryTmpl, op, whereClause, groupBy, having, orderBy)
|
||||||
@@ -312,9 +308,6 @@ func generateAggregateClause(aggOp v3.AggregateOperator,
|
|||||||
v3.AggregateOperatorRateAvg,
|
v3.AggregateOperatorRateAvg,
|
||||||
v3.AggregateOperatorRateMin:
|
v3.AggregateOperatorRateMin:
|
||||||
rate := float64(step)
|
rate := float64(step)
|
||||||
if preferRPM {
|
|
||||||
rate = rate / 60.0
|
|
||||||
}
|
|
||||||
|
|
||||||
op := fmt.Sprintf("%s(%s)/%f", logsV3.AggregateOperatorToSQLFunc[aggOp], aggKey, rate)
|
op := fmt.Sprintf("%s(%s)/%f", logsV3.AggregateOperatorToSQLFunc[aggOp], aggKey, rate)
|
||||||
query := fmt.Sprintf(queryTmpl, op, whereClause, groupBy, having, orderBy)
|
query := fmt.Sprintf(queryTmpl, op, whereClause, groupBy, having, orderBy)
|
||||||
@@ -349,7 +342,7 @@ func generateAggregateClause(aggOp v3.AggregateOperator,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.BuilderQuery, graphLimitQtype string, preferRPM bool) (string, error) {
|
func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.BuilderQuery, graphLimitQtype string) (string, error) {
|
||||||
// timerange will be sent in epoch millisecond
|
// timerange will be sent in epoch millisecond
|
||||||
logsStart := utils.GetEpochNanoSecs(start)
|
logsStart := utils.GetEpochNanoSecs(start)
|
||||||
logsEnd := utils.GetEpochNanoSecs(end)
|
logsEnd := utils.GetEpochNanoSecs(end)
|
||||||
@@ -425,7 +418,7 @@ func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.Build
|
|||||||
filterSubQuery = filterSubQuery + " AND " + fmt.Sprintf("(%s) GLOBAL IN (", logsV3.GetSelectKeys(mq.AggregateOperator, mq.GroupBy)) + "#LIMIT_PLACEHOLDER)"
|
filterSubQuery = filterSubQuery + " AND " + fmt.Sprintf("(%s) GLOBAL IN (", logsV3.GetSelectKeys(mq.AggregateOperator, mq.GroupBy)) + "#LIMIT_PLACEHOLDER)"
|
||||||
}
|
}
|
||||||
|
|
||||||
aggClause, err := generateAggregateClause(mq.AggregateOperator, aggregationKey, step, preferRPM, timeFilter, filterSubQuery, groupBy, having, orderBy)
|
aggClause, err := generateAggregateClause(mq.AggregateOperator, aggregationKey, step, timeFilter, filterSubQuery, groupBy, having, orderBy)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
@@ -505,7 +498,7 @@ func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.Pan
|
|||||||
return query, nil
|
return query, nil
|
||||||
} else if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
|
} else if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
|
||||||
// give me just the group_by names (no values)
|
// give me just the group_by names (no values)
|
||||||
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype, options.PreferRPM)
|
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
@@ -513,14 +506,14 @@ func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.Pan
|
|||||||
|
|
||||||
return query, nil
|
return query, nil
|
||||||
} else if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
} else if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
||||||
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype, options.PreferRPM)
|
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return query, nil
|
return query, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype, options.PreferRPM)
|
query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq, options.GraphLimitQtype)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -574,7 +574,6 @@ func Test_generateAggregateClause(t *testing.T) {
|
|||||||
op v3.AggregateOperator
|
op v3.AggregateOperator
|
||||||
aggKey string
|
aggKey string
|
||||||
step int64
|
step int64
|
||||||
preferRPM bool
|
|
||||||
timeFilter string
|
timeFilter string
|
||||||
whereClause string
|
whereClause string
|
||||||
groupBy string
|
groupBy string
|
||||||
@@ -593,7 +592,6 @@ func Test_generateAggregateClause(t *testing.T) {
|
|||||||
op: v3.AggregateOperatorRate,
|
op: v3.AggregateOperatorRate,
|
||||||
aggKey: "test",
|
aggKey: "test",
|
||||||
step: 60,
|
step: 60,
|
||||||
preferRPM: false,
|
|
||||||
timeFilter: "(timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458)",
|
timeFilter: "(timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458)",
|
||||||
whereClause: " AND attributes_string['service.name'] = 'test'",
|
whereClause: " AND attributes_string['service.name'] = 'test'",
|
||||||
groupBy: " group by `user_name`",
|
groupBy: " group by `user_name`",
|
||||||
@@ -610,7 +608,6 @@ func Test_generateAggregateClause(t *testing.T) {
|
|||||||
op: v3.AggregateOperatorRate,
|
op: v3.AggregateOperatorRate,
|
||||||
aggKey: "test",
|
aggKey: "test",
|
||||||
step: 60,
|
step: 60,
|
||||||
preferRPM: false,
|
|
||||||
timeFilter: "(timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458)",
|
timeFilter: "(timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458)",
|
||||||
whereClause: " AND attributes_string['service.name'] = 'test'",
|
whereClause: " AND attributes_string['service.name'] = 'test'",
|
||||||
groupBy: " group by `user_name`",
|
groupBy: " group by `user_name`",
|
||||||
@@ -624,7 +621,7 @@ func Test_generateAggregateClause(t *testing.T) {
|
|||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
got, err := generateAggregateClause(tt.args.op, tt.args.aggKey, tt.args.step, tt.args.preferRPM, tt.args.timeFilter, tt.args.whereClause, tt.args.groupBy, tt.args.having, tt.args.orderBy)
|
got, err := generateAggregateClause(tt.args.op, tt.args.aggKey, tt.args.step, tt.args.timeFilter, tt.args.whereClause, tt.args.groupBy, tt.args.having, tt.args.orderBy)
|
||||||
if (err != nil) != tt.wantErr {
|
if (err != nil) != tt.wantErr {
|
||||||
t.Errorf("generateAggreagteClause() error = %v, wantErr %v", err, tt.wantErr)
|
t.Errorf("generateAggreagteClause() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
return
|
return
|
||||||
@@ -644,7 +641,6 @@ func Test_buildLogsQuery(t *testing.T) {
|
|||||||
step int64
|
step int64
|
||||||
mq *v3.BuilderQuery
|
mq *v3.BuilderQuery
|
||||||
graphLimitQtype string
|
graphLimitQtype string
|
||||||
preferRPM bool
|
|
||||||
}
|
}
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -789,7 +785,7 @@ func Test_buildLogsQuery(t *testing.T) {
|
|||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
got, err := buildLogsQuery(tt.args.panelType, tt.args.start, tt.args.end, tt.args.step, tt.args.mq, tt.args.graphLimitQtype, tt.args.preferRPM)
|
got, err := buildLogsQuery(tt.args.panelType, tt.args.start, tt.args.end, tt.args.step, tt.args.mq, tt.args.graphLimitQtype)
|
||||||
if (err != nil) != tt.wantErr {
|
if (err != nil) != tt.wantErr {
|
||||||
t.Errorf("buildLogsQuery() error = %v, wantErr %v", err, tt.wantErr)
|
t.Errorf("buildLogsQuery() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
return
|
return
|
||||||
@@ -877,7 +873,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
|||||||
Limit: 10,
|
Limit: 10,
|
||||||
GroupBy: []v3.AttributeKey{{Key: "user", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}},
|
GroupBy: []v3.AttributeKey{{Key: "user", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}},
|
||||||
},
|
},
|
||||||
options: v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: true},
|
options: v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit},
|
||||||
},
|
},
|
||||||
want: "SELECT `user` from (SELECT attributes_string['user'] as `user`, toFloat64(count(distinct(attributes_string['name']))) as value from signoz_logs.distributed_logs_v2 " +
|
want: "SELECT `user` from (SELECT attributes_string['user'] as `user`, toFloat64(count(distinct(attributes_string['name']))) as value from signoz_logs.distributed_logs_v2 " +
|
||||||
"where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND attributes_string['method'] = 'GET' " +
|
"where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND attributes_string['method'] = 'GET' " +
|
||||||
|
|||||||
@@ -14,9 +14,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Options struct {
|
type Options struct{}
|
||||||
PreferRPM bool
|
|
||||||
}
|
|
||||||
|
|
||||||
var aggregateOperatorToPercentile = map[v3.AggregateOperator]float64{
|
var aggregateOperatorToPercentile = map[v3.AggregateOperator]float64{
|
||||||
v3.AggregateOperatorP05: 0.05,
|
v3.AggregateOperatorP05: 0.05,
|
||||||
@@ -387,29 +385,10 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P
|
|||||||
query, err = buildMetricQuery(start, end, mq.StepInterval, mq)
|
query, err = buildMetricQuery(start, end, mq.StepInterval, mq)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
if options.PreferRPM && (mq.AggregateOperator == v3.AggregateOperatorRate ||
|
|
||||||
mq.AggregateOperator == v3.AggregateOperatorSumRate ||
|
|
||||||
mq.AggregateOperator == v3.AggregateOperatorAvgRate ||
|
|
||||||
mq.AggregateOperator == v3.AggregateOperatorMaxRate ||
|
|
||||||
mq.AggregateOperator == v3.AggregateOperatorMinRate ||
|
|
||||||
mq.AggregateOperator == v3.AggregateOperatorRateSum ||
|
|
||||||
mq.AggregateOperator == v3.AggregateOperatorRateAvg ||
|
|
||||||
mq.AggregateOperator == v3.AggregateOperatorRateMax ||
|
|
||||||
mq.AggregateOperator == v3.AggregateOperatorRateMin) {
|
|
||||||
var selectLabels string
|
|
||||||
if mq.AggregateOperator == v3.AggregateOperatorRate {
|
|
||||||
selectLabels = "fullLabels,"
|
|
||||||
} else {
|
|
||||||
selectLabels = groupSelectAttributeKeyTags(mq.GroupBy...)
|
|
||||||
}
|
|
||||||
query = `SELECT ` + selectLabels + ` ts, ceil(value * 60) as value FROM (` + query + `)`
|
|
||||||
}
|
|
||||||
|
|
||||||
if having(mq.Having) != "" {
|
if having(mq.Having) != "" {
|
||||||
query = fmt.Sprintf("SELECT * FROM (%s) HAVING %s", query, having(mq.Having))
|
query = fmt.Sprintf("SELECT * FROM (%s) HAVING %s", query, having(mq.Having))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ func TestBuildQuery(t *testing.T) {
|
|||||||
PanelType: v3.PanelTypeGraph,
|
PanelType: v3.PanelTypeGraph,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{PreferRPM: false})
|
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{})
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Contains(t, query, "WHERE metric_name IN ['name']")
|
require.Contains(t, query, "WHERE metric_name IN ['name']")
|
||||||
})
|
})
|
||||||
@@ -55,7 +55,7 @@ func TestBuildQueryWithFilters(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{PreferRPM: false})
|
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{})
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
require.Contains(t, query, "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'a') != 'b'")
|
require.Contains(t, query, "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'a') != 'b'")
|
||||||
@@ -94,7 +94,7 @@ func TestBuildQueryWithMultipleQueries(t *testing.T) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{PreferRPM: false})
|
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{})
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
require.Contains(t, query, "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'in') IN ['a','b','c']")
|
require.Contains(t, query, "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'in') IN ['a','b','c']")
|
||||||
@@ -148,60 +148,7 @@ func TestBuildQueryXRate(t *testing.T) {
|
|||||||
PanelType: v3.PanelTypeGraph,
|
PanelType: v3.PanelTypeGraph,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{PreferRPM: false})
|
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{})
|
||||||
require.NoError(t, err)
|
|
||||||
require.Equal(t, query, c.expectedQuery)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBuildQueryRPM(t *testing.T) {
|
|
||||||
t.Run("TestBuildQueryXRate", func(t *testing.T) {
|
|
||||||
|
|
||||||
tmpl := `SELECT ts, ceil(value * 60) as value FROM (SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['name'] AND temporality = '' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['name'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts)`
|
|
||||||
|
|
||||||
cases := []struct {
|
|
||||||
aggregateOperator v3.AggregateOperator
|
|
||||||
expectedQuery string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
aggregateOperator: v3.AggregateOperatorAvgRate,
|
|
||||||
expectedQuery: fmt.Sprintf(tmpl, aggregateOperatorToSQLFunc[v3.AggregateOperatorAvgRate]),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
aggregateOperator: v3.AggregateOperatorMaxRate,
|
|
||||||
expectedQuery: fmt.Sprintf(tmpl, aggregateOperatorToSQLFunc[v3.AggregateOperatorMaxRate]),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
aggregateOperator: v3.AggregateOperatorMinRate,
|
|
||||||
expectedQuery: fmt.Sprintf(tmpl, aggregateOperatorToSQLFunc[v3.AggregateOperatorMinRate]),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
aggregateOperator: v3.AggregateOperatorSumRate,
|
|
||||||
expectedQuery: fmt.Sprintf(tmpl, aggregateOperatorToSQLFunc[v3.AggregateOperatorSumRate]),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, c := range cases {
|
|
||||||
|
|
||||||
q := &v3.QueryRangeParamsV3{
|
|
||||||
Start: 1650991982000,
|
|
||||||
End: 1651078382000,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {
|
|
||||||
QueryName: "A",
|
|
||||||
StepInterval: 60,
|
|
||||||
AggregateAttribute: v3.AttributeKey{Key: "name"},
|
|
||||||
AggregateOperator: c.aggregateOperator,
|
|
||||||
Expression: "A",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeGraph,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{PreferRPM: true})
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, query, c.expectedQuery)
|
require.Equal(t, query, c.expectedQuery)
|
||||||
}
|
}
|
||||||
@@ -373,7 +320,7 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
|
|||||||
for _, testCase := range cases {
|
for _, testCase := range cases {
|
||||||
t.Run(testCase.name, func(t *testing.T) {
|
t.Run(testCase.name, func(t *testing.T) {
|
||||||
q := testCase.params
|
q := testCase.params
|
||||||
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{PreferRPM: false})
|
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{})
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
require.Contains(t, query, testCase.expected)
|
require.Contains(t, query, testCase.expected)
|
||||||
@@ -533,7 +480,7 @@ func TestBuildQueryWithDotInMetricAndAttributes(t *testing.T) {
|
|||||||
for _, testCase := range cases {
|
for _, testCase := range cases {
|
||||||
t.Run(testCase.name, func(t *testing.T) {
|
t.Run(testCase.name, func(t *testing.T) {
|
||||||
q := testCase.params
|
q := testCase.params
|
||||||
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{PreferRPM: false})
|
query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{})
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
require.Contains(t, query, testCase.expected)
|
require.Contains(t, query, testCase.expected)
|
||||||
|
|||||||
@@ -1,84 +0,0 @@
|
|||||||
package preferences
|
|
||||||
|
|
||||||
var preferenceMap = map[string]Preference{
|
|
||||||
"ORG_ONBOARDING": {
|
|
||||||
Key: "ORG_ONBOARDING",
|
|
||||||
Name: "Organisation Onboarding",
|
|
||||||
Description: "Organisation Onboarding",
|
|
||||||
ValueType: "boolean",
|
|
||||||
DefaultValue: false,
|
|
||||||
AllowedValues: []interface{}{true, false},
|
|
||||||
IsDiscreteValues: true,
|
|
||||||
AllowedScopes: []string{"org"},
|
|
||||||
},
|
|
||||||
"WELCOME_CHECKLIST_DO_LATER": {
|
|
||||||
Key: "WELCOME_CHECKLIST_DO_LATER",
|
|
||||||
Name: "Welcome Checklist Do Later",
|
|
||||||
Description: "Welcome Checklist Do Later",
|
|
||||||
ValueType: "boolean",
|
|
||||||
DefaultValue: false,
|
|
||||||
AllowedValues: []interface{}{true, false},
|
|
||||||
IsDiscreteValues: true,
|
|
||||||
AllowedScopes: []string{"user"},
|
|
||||||
},
|
|
||||||
"WELCOME_CHECKLIST_SEND_LOGS_SKIPPED": {
|
|
||||||
Key: "WELCOME_CHECKLIST_SEND_LOGS_SKIPPED",
|
|
||||||
Name: "Welcome Checklist Send Logs Skipped",
|
|
||||||
Description: "Welcome Checklist Send Logs Skipped",
|
|
||||||
ValueType: "boolean",
|
|
||||||
DefaultValue: false,
|
|
||||||
AllowedValues: []interface{}{true, false},
|
|
||||||
IsDiscreteValues: true,
|
|
||||||
AllowedScopes: []string{"user"},
|
|
||||||
},
|
|
||||||
"WELCOME_CHECKLIST_SEND_TRACES_SKIPPED": {
|
|
||||||
Key: "WELCOME_CHECKLIST_SEND_TRACES_SKIPPED",
|
|
||||||
Name: "Welcome Checklist Send Traces Skipped",
|
|
||||||
Description: "Welcome Checklist Send Traces Skipped",
|
|
||||||
ValueType: "boolean",
|
|
||||||
DefaultValue: false,
|
|
||||||
AllowedValues: []interface{}{true, false},
|
|
||||||
IsDiscreteValues: true,
|
|
||||||
AllowedScopes: []string{"user"},
|
|
||||||
},
|
|
||||||
"WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED": {
|
|
||||||
Key: "WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED",
|
|
||||||
Name: "Welcome Checklist Send Infra Metrics Skipped",
|
|
||||||
Description: "Welcome Checklist Send Infra Metrics Skipped",
|
|
||||||
ValueType: "boolean",
|
|
||||||
DefaultValue: false,
|
|
||||||
AllowedValues: []interface{}{true, false},
|
|
||||||
IsDiscreteValues: true,
|
|
||||||
AllowedScopes: []string{"user"},
|
|
||||||
},
|
|
||||||
"WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED": {
|
|
||||||
Key: "WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED",
|
|
||||||
Name: "Welcome Checklist Setup Dashboards Skipped",
|
|
||||||
Description: "Welcome Checklist Setup Dashboards Skipped",
|
|
||||||
ValueType: "boolean",
|
|
||||||
DefaultValue: false,
|
|
||||||
AllowedValues: []interface{}{true, false},
|
|
||||||
IsDiscreteValues: true,
|
|
||||||
AllowedScopes: []string{"user"},
|
|
||||||
},
|
|
||||||
"WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED": {
|
|
||||||
Key: "WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED",
|
|
||||||
Name: "Welcome Checklist Setup Alerts Skipped",
|
|
||||||
Description: "Welcome Checklist Setup Alerts Skipped",
|
|
||||||
ValueType: "boolean",
|
|
||||||
DefaultValue: false,
|
|
||||||
AllowedValues: []interface{}{true, false},
|
|
||||||
IsDiscreteValues: true,
|
|
||||||
AllowedScopes: []string{"user"},
|
|
||||||
},
|
|
||||||
"WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED": {
|
|
||||||
Key: "WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED",
|
|
||||||
Name: "Welcome Checklist Setup Saved View Skipped",
|
|
||||||
Description: "Welcome Checklist Setup Saved View Skipped",
|
|
||||||
ValueType: "boolean",
|
|
||||||
DefaultValue: false,
|
|
||||||
AllowedValues: []interface{}{true, false},
|
|
||||||
IsDiscreteValues: true,
|
|
||||||
AllowedScopes: []string{"user"},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,500 +0,0 @@
|
|||||||
package preferences
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"database/sql"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
|
||||||
"github.com/jmoiron/sqlx"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Range struct {
|
|
||||||
Min int64 `json:"min"`
|
|
||||||
Max int64 `json:"max"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Preference struct {
|
|
||||||
Key string `json:"key"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Description string `json:"description"`
|
|
||||||
ValueType string `json:"valueType"`
|
|
||||||
DefaultValue interface{} `json:"defaultValue"`
|
|
||||||
AllowedValues []interface{} `json:"allowedValues"`
|
|
||||||
IsDiscreteValues bool `json:"isDiscreteValues"`
|
|
||||||
Range Range `json:"range"`
|
|
||||||
AllowedScopes []string `json:"allowedScopes"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Preference) ErrorValueTypeMismatch() *model.ApiError {
|
|
||||||
return &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("the preference value is not of expected type: %s", p.ValueType)}
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
PreferenceValueTypeInteger string = "integer"
|
|
||||||
PreferenceValueTypeFloat string = "float"
|
|
||||||
PreferenceValueTypeString string = "string"
|
|
||||||
PreferenceValueTypeBoolean string = "boolean"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
OrgAllowedScope string = "org"
|
|
||||||
UserAllowedScope string = "user"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (p *Preference) checkIfInAllowedValues(preferenceValue interface{}) (bool, *model.ApiError) {
|
|
||||||
|
|
||||||
switch p.ValueType {
|
|
||||||
case PreferenceValueTypeInteger:
|
|
||||||
_, ok := preferenceValue.(int64)
|
|
||||||
if !ok {
|
|
||||||
return false, p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
case PreferenceValueTypeFloat:
|
|
||||||
_, ok := preferenceValue.(float64)
|
|
||||||
if !ok {
|
|
||||||
return false, p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
case PreferenceValueTypeString:
|
|
||||||
_, ok := preferenceValue.(string)
|
|
||||||
if !ok {
|
|
||||||
return false, p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
case PreferenceValueTypeBoolean:
|
|
||||||
_, ok := preferenceValue.(bool)
|
|
||||||
if !ok {
|
|
||||||
return false, p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
isInAllowedValues := false
|
|
||||||
for _, value := range p.AllowedValues {
|
|
||||||
switch p.ValueType {
|
|
||||||
case PreferenceValueTypeInteger:
|
|
||||||
allowedValue, ok := value.(int64)
|
|
||||||
if !ok {
|
|
||||||
return false, p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
|
|
||||||
if allowedValue == preferenceValue {
|
|
||||||
isInAllowedValues = true
|
|
||||||
}
|
|
||||||
case PreferenceValueTypeFloat:
|
|
||||||
allowedValue, ok := value.(float64)
|
|
||||||
if !ok {
|
|
||||||
return false, p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
|
|
||||||
if allowedValue == preferenceValue {
|
|
||||||
isInAllowedValues = true
|
|
||||||
}
|
|
||||||
case PreferenceValueTypeString:
|
|
||||||
allowedValue, ok := value.(string)
|
|
||||||
if !ok {
|
|
||||||
return false, p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
|
|
||||||
if allowedValue == preferenceValue {
|
|
||||||
isInAllowedValues = true
|
|
||||||
}
|
|
||||||
case PreferenceValueTypeBoolean:
|
|
||||||
allowedValue, ok := value.(bool)
|
|
||||||
if !ok {
|
|
||||||
return false, p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
|
|
||||||
if allowedValue == preferenceValue {
|
|
||||||
isInAllowedValues = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return isInAllowedValues, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Preference) IsValidValue(preferenceValue interface{}) *model.ApiError {
|
|
||||||
typeSafeValue := preferenceValue
|
|
||||||
switch p.ValueType {
|
|
||||||
case PreferenceValueTypeInteger:
|
|
||||||
val, ok := preferenceValue.(int64)
|
|
||||||
if !ok {
|
|
||||||
floatVal, ok := preferenceValue.(float64)
|
|
||||||
if !ok || floatVal != float64(int64(floatVal)) {
|
|
||||||
return p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
val = int64(floatVal)
|
|
||||||
typeSafeValue = val
|
|
||||||
}
|
|
||||||
if !p.IsDiscreteValues {
|
|
||||||
if val < p.Range.Min || val > p.Range.Max {
|
|
||||||
return &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("the preference value is not in the range specified, min: %v , max:%v", p.Range.Min, p.Range.Max)}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case PreferenceValueTypeString:
|
|
||||||
_, ok := preferenceValue.(string)
|
|
||||||
if !ok {
|
|
||||||
return p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
case PreferenceValueTypeFloat:
|
|
||||||
_, ok := preferenceValue.(float64)
|
|
||||||
if !ok {
|
|
||||||
return p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
case PreferenceValueTypeBoolean:
|
|
||||||
_, ok := preferenceValue.(bool)
|
|
||||||
if !ok {
|
|
||||||
return p.ErrorValueTypeMismatch()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// check the validity of the value being part of allowed values or the range specified if any
|
|
||||||
if p.IsDiscreteValues {
|
|
||||||
if p.AllowedValues != nil {
|
|
||||||
isInAllowedValues, valueMisMatchErr := p.checkIfInAllowedValues(typeSafeValue)
|
|
||||||
|
|
||||||
if valueMisMatchErr != nil {
|
|
||||||
return valueMisMatchErr
|
|
||||||
}
|
|
||||||
if !isInAllowedValues {
|
|
||||||
return &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("the preference value is not in the list of allowedValues: %v", p.AllowedValues)}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Preference) IsEnabledForScope(scope string) bool {
|
|
||||||
isPreferenceEnabledForGivenScope := false
|
|
||||||
if p.AllowedScopes != nil {
|
|
||||||
for _, allowedScope := range p.AllowedScopes {
|
|
||||||
if allowedScope == strings.ToLower(scope) {
|
|
||||||
isPreferenceEnabledForGivenScope = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return isPreferenceEnabledForGivenScope
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Preference) SanitizeValue(preferenceValue interface{}) interface{} {
|
|
||||||
switch p.ValueType {
|
|
||||||
case PreferenceValueTypeBoolean:
|
|
||||||
if preferenceValue == "1" || preferenceValue == true {
|
|
||||||
return true
|
|
||||||
} else {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return preferenceValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type AllPreferences struct {
|
|
||||||
Preference
|
|
||||||
Value interface{} `json:"value"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type PreferenceKV struct {
|
|
||||||
PreferenceId string `json:"preference_id" db:"preference_id"`
|
|
||||||
PreferenceValue interface{} `json:"preference_value" db:"preference_value"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type UpdatePreference struct {
|
|
||||||
PreferenceValue interface{} `json:"preference_value"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var db *sqlx.DB
|
|
||||||
|
|
||||||
func InitDB(inputDB *sqlx.DB) error {
|
|
||||||
db = inputDB
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// org preference functions
|
|
||||||
func GetOrgPreference(ctx context.Context, preferenceId string, orgId string) (*PreferenceKV, *model.ApiError) {
|
|
||||||
// check if the preference key exists or not
|
|
||||||
preference, seen := preferenceMap[preferenceId]
|
|
||||||
if !seen {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if the preference is enabled for org scope or not
|
|
||||||
isPreferenceEnabled := preference.IsEnabledForScope(OrgAllowedScope)
|
|
||||||
if !isPreferenceEnabled {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at org scope: %s", preferenceId)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// fetch the value from the database
|
|
||||||
var orgPreference PreferenceKV
|
|
||||||
query := `SELECT preference_id , preference_value FROM org_preference WHERE preference_id=$1 AND org_id=$2;`
|
|
||||||
err := db.Get(&orgPreference, query, preferenceId, orgId)
|
|
||||||
|
|
||||||
// if the value doesn't exist in db then return the default value
|
|
||||||
if err != nil {
|
|
||||||
if err == sql.ErrNoRows {
|
|
||||||
return &PreferenceKV{
|
|
||||||
PreferenceId: preferenceId,
|
|
||||||
PreferenceValue: preference.DefaultValue,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in fetching the org preference: %s", err.Error())}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// else return the value fetched from the org_preference table
|
|
||||||
return &PreferenceKV{
|
|
||||||
PreferenceId: preferenceId,
|
|
||||||
PreferenceValue: preference.SanitizeValue(orgPreference.PreferenceValue),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func UpdateOrgPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, orgId string) (*PreferenceKV, *model.ApiError) {
|
|
||||||
// check if the preference key exists or not
|
|
||||||
preference, seen := preferenceMap[preferenceId]
|
|
||||||
if !seen {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if the preference is enabled at org scope or not
|
|
||||||
isPreferenceEnabled := preference.IsEnabledForScope(OrgAllowedScope)
|
|
||||||
if !isPreferenceEnabled {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at org scope: %s", preferenceId)}
|
|
||||||
}
|
|
||||||
|
|
||||||
err := preference.IsValidValue(preferenceValue)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// update the values in the org_preference table and return the key and the value
|
|
||||||
query := `INSERT INTO org_preference(preference_id,preference_value,org_id) VALUES($1,$2,$3)
|
|
||||||
ON CONFLICT(preference_id,org_id) DO
|
|
||||||
UPDATE SET preference_value= $2 WHERE preference_id=$1 AND org_id=$3;`
|
|
||||||
|
|
||||||
_, dberr := db.Exec(query, preferenceId, preferenceValue, orgId)
|
|
||||||
|
|
||||||
if dberr != nil {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in setting the preference value: %s", dberr.Error())}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &PreferenceKV{
|
|
||||||
PreferenceId: preferenceId,
|
|
||||||
PreferenceValue: preferenceValue,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetAllOrgPreferences(ctx context.Context, orgId string) (*[]AllPreferences, *model.ApiError) {
|
|
||||||
// filter out all the org enabled preferences from the preference variable
|
|
||||||
allOrgPreferences := []AllPreferences{}
|
|
||||||
|
|
||||||
// fetch all the org preference values stored in org_preference table
|
|
||||||
orgPreferenceValues := []PreferenceKV{}
|
|
||||||
|
|
||||||
query := `SELECT preference_id,preference_value FROM org_preference WHERE org_id=$1;`
|
|
||||||
err := db.Select(&orgPreferenceValues, query, orgId)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting all org preference values: %s", err)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// create a map of key vs values from the above response
|
|
||||||
preferenceValueMap := map[string]interface{}{}
|
|
||||||
|
|
||||||
for _, preferenceValue := range orgPreferenceValues {
|
|
||||||
preferenceValueMap[preferenceValue.PreferenceId] = preferenceValue.PreferenceValue
|
|
||||||
}
|
|
||||||
|
|
||||||
// update in the above filtered list wherver value present in the map
|
|
||||||
for _, preference := range preferenceMap {
|
|
||||||
isEnabledForOrgScope := preference.IsEnabledForScope(OrgAllowedScope)
|
|
||||||
if isEnabledForOrgScope {
|
|
||||||
preferenceWithValue := AllPreferences{}
|
|
||||||
preferenceWithValue.Key = preference.Key
|
|
||||||
preferenceWithValue.Name = preference.Name
|
|
||||||
preferenceWithValue.Description = preference.Description
|
|
||||||
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
|
||||||
preferenceWithValue.AllowedValues = preference.AllowedValues
|
|
||||||
preferenceWithValue.DefaultValue = preference.DefaultValue
|
|
||||||
preferenceWithValue.Range = preference.Range
|
|
||||||
preferenceWithValue.ValueType = preference.ValueType
|
|
||||||
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
|
||||||
value, seen := preferenceValueMap[preference.Key]
|
|
||||||
|
|
||||||
if seen {
|
|
||||||
preferenceWithValue.Value = value
|
|
||||||
} else {
|
|
||||||
preferenceWithValue.Value = preference.DefaultValue
|
|
||||||
}
|
|
||||||
|
|
||||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
|
||||||
allOrgPreferences = append(allOrgPreferences, preferenceWithValue)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return &allOrgPreferences, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// user preference functions
|
|
||||||
func GetUserPreference(ctx context.Context, preferenceId string, orgId string, userId string) (*PreferenceKV, *model.ApiError) {
|
|
||||||
// check if the preference key exists
|
|
||||||
preference, seen := preferenceMap[preferenceId]
|
|
||||||
if !seen {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
|
||||||
}
|
|
||||||
|
|
||||||
preferenceValue := PreferenceKV{
|
|
||||||
PreferenceId: preferenceId,
|
|
||||||
PreferenceValue: preference.DefaultValue,
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if the preference is enabled at user scope
|
|
||||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(UserAllowedScope)
|
|
||||||
if !isPreferenceEnabledAtUserScope {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at user scope: %s", preferenceId)}
|
|
||||||
}
|
|
||||||
|
|
||||||
isPreferenceEnabledAtOrgScope := preference.IsEnabledForScope(OrgAllowedScope)
|
|
||||||
// get the value from the org scope if enabled at org scope
|
|
||||||
if isPreferenceEnabledAtOrgScope {
|
|
||||||
orgPreference := PreferenceKV{}
|
|
||||||
|
|
||||||
query := `SELECT preference_id , preference_value FROM org_preference WHERE preference_id=$1 AND org_id=$2;`
|
|
||||||
|
|
||||||
err := db.Get(&orgPreference, query, preferenceId, orgId)
|
|
||||||
|
|
||||||
// if there is error in getting values and its not an empty rows error return from here
|
|
||||||
if err != nil && err != sql.ErrNoRows {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting org preference values: %s", err.Error())}
|
|
||||||
}
|
|
||||||
|
|
||||||
// if there is no error update the preference value with value from org preference
|
|
||||||
if err == nil {
|
|
||||||
preferenceValue.PreferenceValue = orgPreference.PreferenceValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// get the value from the user_preference table, if exists return this value else the one calculated in the above step
|
|
||||||
userPreference := PreferenceKV{}
|
|
||||||
|
|
||||||
query := `SELECT preference_id, preference_value FROM user_preference WHERE preference_id=$1 AND user_id=$2;`
|
|
||||||
err := db.Get(&userPreference, query, preferenceId, userId)
|
|
||||||
|
|
||||||
if err != nil && err != sql.ErrNoRows {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting user preference values: %s", err.Error())}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err == nil {
|
|
||||||
preferenceValue.PreferenceValue = userPreference.PreferenceValue
|
|
||||||
}
|
|
||||||
|
|
||||||
return &PreferenceKV{
|
|
||||||
PreferenceId: preferenceValue.PreferenceId,
|
|
||||||
PreferenceValue: preference.SanitizeValue(preferenceValue.PreferenceValue),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func UpdateUserPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, userId string) (*PreferenceKV, *model.ApiError) {
|
|
||||||
// check if the preference id is valid
|
|
||||||
preference, seen := preferenceMap[preferenceId]
|
|
||||||
if !seen {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if the preference is enabled at user scope
|
|
||||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(UserAllowedScope)
|
|
||||||
if !isPreferenceEnabledAtUserScope {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at user scope: %s", preferenceId)}
|
|
||||||
}
|
|
||||||
|
|
||||||
err := preference.IsValidValue(preferenceValue)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// update the user preference values
|
|
||||||
query := `INSERT INTO user_preference(preference_id,preference_value,user_id) VALUES($1,$2,$3)
|
|
||||||
ON CONFLICT(preference_id,user_id) DO
|
|
||||||
UPDATE SET preference_value= $2 WHERE preference_id=$1 AND user_id=$3;`
|
|
||||||
|
|
||||||
_, dberrr := db.Exec(query, preferenceId, preferenceValue, userId)
|
|
||||||
|
|
||||||
if dberrr != nil {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in setting the preference value: %s", dberrr.Error())}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &PreferenceKV{
|
|
||||||
PreferenceId: preferenceId,
|
|
||||||
PreferenceValue: preferenceValue,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetAllUserPreferences(ctx context.Context, orgId string, userId string) (*[]AllPreferences, *model.ApiError) {
|
|
||||||
allUserPreferences := []AllPreferences{}
|
|
||||||
|
|
||||||
// fetch all the org preference values stored in org_preference table
|
|
||||||
orgPreferenceValues := []PreferenceKV{}
|
|
||||||
|
|
||||||
query := `SELECT preference_id,preference_value FROM org_preference WHERE org_id=$1;`
|
|
||||||
err := db.Select(&orgPreferenceValues, query, orgId)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting all org preference values: %s", err)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// create a map of key vs values from the above response
|
|
||||||
preferenceOrgValueMap := map[string]interface{}{}
|
|
||||||
|
|
||||||
for _, preferenceValue := range orgPreferenceValues {
|
|
||||||
preferenceOrgValueMap[preferenceValue.PreferenceId] = preferenceValue.PreferenceValue
|
|
||||||
}
|
|
||||||
|
|
||||||
// fetch all the user preference values stored in user_preference table
|
|
||||||
userPreferenceValues := []PreferenceKV{}
|
|
||||||
|
|
||||||
query = `SELECT preference_id,preference_value FROM user_preference WHERE user_id=$1;`
|
|
||||||
err = db.Select(&userPreferenceValues, query, userId)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting all user preference values: %s", err)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// create a map of key vs values from the above response
|
|
||||||
preferenceUserValueMap := map[string]interface{}{}
|
|
||||||
|
|
||||||
for _, preferenceValue := range userPreferenceValues {
|
|
||||||
preferenceUserValueMap[preferenceValue.PreferenceId] = preferenceValue.PreferenceValue
|
|
||||||
}
|
|
||||||
|
|
||||||
// update in the above filtered list wherver value present in the map
|
|
||||||
for _, preference := range preferenceMap {
|
|
||||||
isEnabledForUserScope := preference.IsEnabledForScope(UserAllowedScope)
|
|
||||||
|
|
||||||
if isEnabledForUserScope {
|
|
||||||
preferenceWithValue := AllPreferences{}
|
|
||||||
preferenceWithValue.Key = preference.Key
|
|
||||||
preferenceWithValue.Name = preference.Name
|
|
||||||
preferenceWithValue.Description = preference.Description
|
|
||||||
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
|
||||||
preferenceWithValue.AllowedValues = preference.AllowedValues
|
|
||||||
preferenceWithValue.DefaultValue = preference.DefaultValue
|
|
||||||
preferenceWithValue.Range = preference.Range
|
|
||||||
preferenceWithValue.ValueType = preference.ValueType
|
|
||||||
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
|
||||||
preferenceWithValue.Value = preference.DefaultValue
|
|
||||||
|
|
||||||
isEnabledForOrgScope := preference.IsEnabledForScope(OrgAllowedScope)
|
|
||||||
if isEnabledForOrgScope {
|
|
||||||
value, seen := preferenceOrgValueMap[preference.Key]
|
|
||||||
if seen {
|
|
||||||
preferenceWithValue.Value = value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
value, seen := preferenceUserValueMap[preference.Key]
|
|
||||||
|
|
||||||
if seen {
|
|
||||||
preferenceWithValue.Value = value
|
|
||||||
}
|
|
||||||
|
|
||||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
|
||||||
allUserPreferences = append(allUserPreferences, preferenceWithValue)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return &allUserPreferences, nil
|
|
||||||
}
|
|
||||||
@@ -25,7 +25,6 @@ func prepareLogsQuery(_ context.Context,
|
|||||||
end int64,
|
end int64,
|
||||||
builderQuery *v3.BuilderQuery,
|
builderQuery *v3.BuilderQuery,
|
||||||
params *v3.QueryRangeParamsV3,
|
params *v3.QueryRangeParamsV3,
|
||||||
preferRPM bool,
|
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
query := ""
|
query := ""
|
||||||
|
|
||||||
@@ -46,7 +45,7 @@ func prepareLogsQuery(_ context.Context,
|
|||||||
params.CompositeQuery.QueryType,
|
params.CompositeQuery.QueryType,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
|
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return query, err
|
return query, err
|
||||||
@@ -57,7 +56,7 @@ func prepareLogsQuery(_ context.Context,
|
|||||||
params.CompositeQuery.QueryType,
|
params.CompositeQuery.QueryType,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
|
v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return query, err
|
return query, err
|
||||||
@@ -72,7 +71,7 @@ func prepareLogsQuery(_ context.Context,
|
|||||||
params.CompositeQuery.QueryType,
|
params.CompositeQuery.QueryType,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{PreferRPM: preferRPM},
|
v3.QBOptions{},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return query, err
|
return query, err
|
||||||
@@ -91,12 +90,6 @@ func (q *querier) runBuilderQuery(
|
|||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
queryName := builderQuery.QueryName
|
queryName := builderQuery.QueryName
|
||||||
|
|
||||||
var preferRPM bool
|
|
||||||
|
|
||||||
if q.featureLookUp != nil {
|
|
||||||
preferRPM = q.featureLookUp.CheckFeature(constants.PreferRPM) == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
start := params.Start
|
start := params.Start
|
||||||
end := params.End
|
end := params.End
|
||||||
if builderQuery.ShiftBy != 0 {
|
if builderQuery.ShiftBy != 0 {
|
||||||
@@ -109,7 +102,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
var err error
|
var err error
|
||||||
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
||||||
zap.L().Info("skipping cache for logs query", zap.String("queryName", queryName), zap.Int64("start", start), zap.Int64("end", end), zap.Int64("step", builderQuery.StepInterval), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
zap.L().Info("skipping cache for logs query", zap.String("queryName", queryName), zap.Int64("start", start), zap.Int64("end", end), zap.Int64("step", builderQuery.StepInterval), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
||||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, start, end, builderQuery, params, preferRPM)
|
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, start, end, builderQuery, params)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||||
return
|
return
|
||||||
@@ -124,7 +117,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||||
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
||||||
for _, miss := range misses {
|
for _, miss := range misses {
|
||||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, miss.Start, miss.End, builderQuery, params, preferRPM)
|
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, miss.Start, miss.End, builderQuery, params)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||||
return
|
return
|
||||||
@@ -191,7 +184,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
end,
|
end,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
|
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
||||||
@@ -202,7 +195,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
end,
|
end,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
|
v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
||||||
@@ -215,7 +208,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
end,
|
end,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{PreferRPM: preferRPM},
|
v3.QBOptions{},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||||
@@ -244,7 +237,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
// If the query is not cached, we execute the query and return the result without caching it.
|
// If the query is not cached, we execute the query and return the result without caching it.
|
||||||
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
||||||
zap.L().Info("skipping cache for metrics query", zap.String("queryName", queryName), zap.Int64("start", start), zap.Int64("end", end), zap.Int64("step", builderQuery.StepInterval), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
zap.L().Info("skipping cache for metrics query", zap.String("queryName", queryName), zap.Int64("start", start), zap.Int64("end", end), zap.Int64("step", builderQuery.StepInterval), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
||||||
query, err := metricsV3.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{PreferRPM: preferRPM})
|
query, err := metricsV3.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -42,8 +42,7 @@ type querier struct {
|
|||||||
|
|
||||||
fluxInterval time.Duration
|
fluxInterval time.Duration
|
||||||
|
|
||||||
builder *queryBuilder.QueryBuilder
|
builder *queryBuilder.QueryBuilder
|
||||||
featureLookUp interfaces.FeatureLookup
|
|
||||||
|
|
||||||
// used for testing
|
// used for testing
|
||||||
// TODO(srikanthccv): remove this once we have a proper mock
|
// TODO(srikanthccv): remove this once we have a proper mock
|
||||||
@@ -59,11 +58,10 @@ type querier struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type QuerierOptions struct {
|
type QuerierOptions struct {
|
||||||
Reader interfaces.Reader
|
Reader interfaces.Reader
|
||||||
Cache cache.Cache
|
Cache cache.Cache
|
||||||
KeyGenerator cache.KeyGenerator
|
KeyGenerator cache.KeyGenerator
|
||||||
FluxInterval time.Duration
|
FluxInterval time.Duration
|
||||||
FeatureLookup interfaces.FeatureLookup
|
|
||||||
|
|
||||||
// used for testing
|
// used for testing
|
||||||
TestingMode bool
|
TestingMode bool
|
||||||
@@ -96,8 +94,7 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
|||||||
BuildTraceQuery: tracesQueryBuilder,
|
BuildTraceQuery: tracesQueryBuilder,
|
||||||
BuildLogQuery: logsQueryBuilder,
|
BuildLogQuery: logsQueryBuilder,
|
||||||
BuildMetricQuery: metricsV3.PrepareMetricQuery,
|
BuildMetricQuery: metricsV3.PrepareMetricQuery,
|
||||||
}, opts.FeatureLookup),
|
}),
|
||||||
featureLookUp: opts.FeatureLookup,
|
|
||||||
|
|
||||||
testingMode: opts.TestingMode,
|
testingMode: opts.TestingMode,
|
||||||
returnedSeries: opts.ReturnedSeries,
|
returnedSeries: opts.ReturnedSeries,
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
"github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
|
||||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||||
@@ -1370,7 +1369,6 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
|||||||
nil,
|
nil,
|
||||||
telemetryStore,
|
telemetryStore,
|
||||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||||
featureManager.StartManager(),
|
|
||||||
"",
|
"",
|
||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
@@ -1384,7 +1382,6 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
|||||||
queryBuilder.QueryBuilderOptions{
|
queryBuilder.QueryBuilderOptions{
|
||||||
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
||||||
},
|
},
|
||||||
featureManager.StartManager(),
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
// Update query parameters
|
// Update query parameters
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ func prepareLogsQuery(_ context.Context,
|
|||||||
end int64,
|
end int64,
|
||||||
builderQuery *v3.BuilderQuery,
|
builderQuery *v3.BuilderQuery,
|
||||||
params *v3.QueryRangeParamsV3,
|
params *v3.QueryRangeParamsV3,
|
||||||
preferRPM bool,
|
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
logsQueryBuilder := logsV3.PrepareLogsQuery
|
logsQueryBuilder := logsV3.PrepareLogsQuery
|
||||||
if useLogsNewSchema {
|
if useLogsNewSchema {
|
||||||
@@ -45,7 +44,7 @@ func prepareLogsQuery(_ context.Context,
|
|||||||
params.CompositeQuery.QueryType,
|
params.CompositeQuery.QueryType,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
|
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return query, err
|
return query, err
|
||||||
@@ -56,7 +55,7 @@ func prepareLogsQuery(_ context.Context,
|
|||||||
params.CompositeQuery.QueryType,
|
params.CompositeQuery.QueryType,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
|
v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return query, err
|
return query, err
|
||||||
@@ -71,7 +70,7 @@ func prepareLogsQuery(_ context.Context,
|
|||||||
params.CompositeQuery.QueryType,
|
params.CompositeQuery.QueryType,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{PreferRPM: preferRPM},
|
v3.QBOptions{},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return query, err
|
return query, err
|
||||||
@@ -89,13 +88,6 @@ func (q *querier) runBuilderQuery(
|
|||||||
) {
|
) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
queryName := builderQuery.QueryName
|
queryName := builderQuery.QueryName
|
||||||
|
|
||||||
var preferRPM bool
|
|
||||||
|
|
||||||
if q.featureLookUp != nil {
|
|
||||||
preferRPM = q.featureLookUp.CheckFeature(constants.PreferRPM) == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// making a local clone since we should not update the global params if there is sift by
|
// making a local clone since we should not update the global params if there is sift by
|
||||||
start := params.Start
|
start := params.Start
|
||||||
end := params.End
|
end := params.End
|
||||||
@@ -110,7 +102,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
var err error
|
var err error
|
||||||
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
||||||
zap.L().Info("skipping cache for logs query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
zap.L().Info("skipping cache for logs query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
||||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, start, end, builderQuery, params, preferRPM)
|
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, start, end, builderQuery, params)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||||
return
|
return
|
||||||
@@ -124,7 +116,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||||
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
||||||
for _, miss := range misses {
|
for _, miss := range misses {
|
||||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, miss.Start, miss.End, builderQuery, params, preferRPM)
|
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, miss.Start, miss.End, builderQuery, params)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||||
return
|
return
|
||||||
@@ -192,7 +184,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
end,
|
end,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
|
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
||||||
@@ -203,7 +195,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
end,
|
end,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
|
v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
||||||
@@ -216,7 +208,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
end,
|
end,
|
||||||
params.CompositeQuery.PanelType,
|
params.CompositeQuery.PanelType,
|
||||||
builderQuery,
|
builderQuery,
|
||||||
v3.QBOptions{PreferRPM: preferRPM},
|
v3.QBOptions{},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||||
@@ -245,7 +237,7 @@ func (q *querier) runBuilderQuery(
|
|||||||
// If the query is not cached, we execute the query and return the result without caching it.
|
// If the query is not cached, we execute the query and return the result without caching it.
|
||||||
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
||||||
zap.L().Info("skipping cache for metrics query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
zap.L().Info("skipping cache for metrics query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
||||||
query, err := metricsV4.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{PreferRPM: preferRPM})
|
query, err := metricsV4.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -42,8 +42,7 @@ type querier struct {
|
|||||||
|
|
||||||
fluxInterval time.Duration
|
fluxInterval time.Duration
|
||||||
|
|
||||||
builder *queryBuilder.QueryBuilder
|
builder *queryBuilder.QueryBuilder
|
||||||
featureLookUp interfaces.FeatureLookup
|
|
||||||
|
|
||||||
// used for testing
|
// used for testing
|
||||||
// TODO(srikanthccv): remove this once we have a proper mock
|
// TODO(srikanthccv): remove this once we have a proper mock
|
||||||
@@ -58,11 +57,10 @@ type querier struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type QuerierOptions struct {
|
type QuerierOptions struct {
|
||||||
Reader interfaces.Reader
|
Reader interfaces.Reader
|
||||||
Cache cache.Cache
|
Cache cache.Cache
|
||||||
KeyGenerator cache.KeyGenerator
|
KeyGenerator cache.KeyGenerator
|
||||||
FluxInterval time.Duration
|
FluxInterval time.Duration
|
||||||
FeatureLookup interfaces.FeatureLookup
|
|
||||||
|
|
||||||
// used for testing
|
// used for testing
|
||||||
TestingMode bool
|
TestingMode bool
|
||||||
@@ -96,8 +94,7 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
|||||||
BuildTraceQuery: tracesQueryBuilder,
|
BuildTraceQuery: tracesQueryBuilder,
|
||||||
BuildLogQuery: logsQueryBuilder,
|
BuildLogQuery: logsQueryBuilder,
|
||||||
BuildMetricQuery: metricsV4.PrepareMetricQuery,
|
BuildMetricQuery: metricsV4.PrepareMetricQuery,
|
||||||
}, opts.FeatureLookup),
|
}),
|
||||||
featureLookUp: opts.FeatureLookup,
|
|
||||||
|
|
||||||
testingMode: opts.TestingMode,
|
testingMode: opts.TestingMode,
|
||||||
returnedSeries: opts.ReturnedSeries,
|
returnedSeries: opts.ReturnedSeries,
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
"github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
|
||||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||||
@@ -1424,7 +1423,6 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
|||||||
nil,
|
nil,
|
||||||
telemetryStore,
|
telemetryStore,
|
||||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||||
featureManager.StartManager(),
|
|
||||||
"",
|
"",
|
||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
@@ -1438,7 +1436,6 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
|||||||
queryBuilder.QueryBuilderOptions{
|
queryBuilder.QueryBuilderOptions{
|
||||||
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
||||||
},
|
},
|
||||||
featureManager.StartManager(),
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
// Update query parameters
|
// Update query parameters
|
||||||
|
|||||||
@@ -56,10 +56,9 @@ type QueryBuilderOptions struct {
|
|||||||
BuildMetricQuery prepareMetricQueryFunc
|
BuildMetricQuery prepareMetricQueryFunc
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewQueryBuilder(options QueryBuilderOptions, featureFlags interfaces.FeatureLookup) *QueryBuilder {
|
func NewQueryBuilder(options QueryBuilderOptions) *QueryBuilder {
|
||||||
return &QueryBuilder{
|
return &QueryBuilder{
|
||||||
options: options,
|
options: options,
|
||||||
featureFlags: featureFlags,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -179,8 +178,6 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3) (map[strin
|
|||||||
compositeQuery := params.CompositeQuery
|
compositeQuery := params.CompositeQuery
|
||||||
|
|
||||||
if compositeQuery != nil {
|
if compositeQuery != nil {
|
||||||
err := qb.featureFlags.CheckFeature(constants.PreferRPM)
|
|
||||||
PreferRPMFeatureEnabled := err == nil
|
|
||||||
// Build queries for each builder query
|
// Build queries for each builder query
|
||||||
for queryName, query := range compositeQuery.BuilderQueries {
|
for queryName, query := range compositeQuery.BuilderQueries {
|
||||||
// making a local clone since we should not update the global params if there is sift by
|
// making a local clone since we should not update the global params if there is sift by
|
||||||
@@ -196,12 +193,12 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3) (map[strin
|
|||||||
// for ts query with group by and limit form two queries
|
// for ts query with group by and limit form two queries
|
||||||
if compositeQuery.PanelType == v3.PanelTypeGraph && query.Limit > 0 && len(query.GroupBy) > 0 {
|
if compositeQuery.PanelType == v3.PanelTypeGraph && query.Limit > 0 && len(query.GroupBy) > 0 {
|
||||||
limitQuery, err := qb.options.BuildTraceQuery(start, end, compositeQuery.PanelType, query,
|
limitQuery, err := qb.options.BuildTraceQuery(start, end, compositeQuery.PanelType, query,
|
||||||
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
|
v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
placeholderQuery, err := qb.options.BuildTraceQuery(start, end, compositeQuery.PanelType,
|
placeholderQuery, err := qb.options.BuildTraceQuery(start, end, compositeQuery.PanelType,
|
||||||
query, v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
|
query, v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -209,7 +206,7 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3) (map[strin
|
|||||||
queries[queryName] = query
|
queries[queryName] = query
|
||||||
} else {
|
} else {
|
||||||
queryString, err := qb.options.BuildTraceQuery(start, end, compositeQuery.PanelType,
|
queryString, err := qb.options.BuildTraceQuery(start, end, compositeQuery.PanelType,
|
||||||
query, v3.QBOptions{PreferRPM: PreferRPMFeatureEnabled, GraphLimitQtype: ""})
|
query, v3.QBOptions{GraphLimitQtype: ""})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -218,25 +215,25 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3) (map[strin
|
|||||||
case v3.DataSourceLogs:
|
case v3.DataSourceLogs:
|
||||||
// for ts query with limit replace it as it is already formed
|
// for ts query with limit replace it as it is already formed
|
||||||
if compositeQuery.PanelType == v3.PanelTypeGraph && query.Limit > 0 && len(query.GroupBy) > 0 {
|
if compositeQuery.PanelType == v3.PanelTypeGraph && query.Limit > 0 && len(query.GroupBy) > 0 {
|
||||||
limitQuery, err := qb.options.BuildLogQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
|
limitQuery, err := qb.options.BuildLogQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, v3.QBOptions{GraphLimitQtype: constants.FirstQueryGraphLimit})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
placeholderQuery, err := qb.options.BuildLogQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
|
placeholderQuery, err := qb.options.BuildLogQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, v3.QBOptions{GraphLimitQtype: constants.SecondQueryGraphLimit})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
query := fmt.Sprintf(placeholderQuery, limitQuery)
|
query := fmt.Sprintf(placeholderQuery, limitQuery)
|
||||||
queries[queryName] = query
|
queries[queryName] = query
|
||||||
} else {
|
} else {
|
||||||
queryString, err := qb.options.BuildLogQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, v3.QBOptions{PreferRPM: PreferRPMFeatureEnabled, GraphLimitQtype: ""})
|
queryString, err := qb.options.BuildLogQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, v3.QBOptions{GraphLimitQtype: ""})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
queries[queryName] = queryString
|
queries[queryName] = queryString
|
||||||
}
|
}
|
||||||
case v3.DataSourceMetrics:
|
case v3.DataSourceMetrics:
|
||||||
queryString, err := qb.options.BuildMetricQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, metricsV3.Options{PreferRPM: PreferRPMFeatureEnabled})
|
queryString, err := qb.options.BuildMetricQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, metricsV3.Options{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import (
|
|||||||
logsV4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
logsV4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||||
metricsv3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
metricsv3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
|
||||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
@@ -51,8 +50,7 @@ func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) {
|
|||||||
qbOptions := QueryBuilderOptions{
|
qbOptions := QueryBuilderOptions{
|
||||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||||
}
|
}
|
||||||
fm := featureManager.StartManager()
|
qb := NewQueryBuilder(qbOptions)
|
||||||
qb := NewQueryBuilder(qbOptions, fm)
|
|
||||||
|
|
||||||
queries, err := qb.PrepareQueries(q)
|
queries, err := qb.PrepareQueries(q)
|
||||||
|
|
||||||
@@ -93,8 +91,7 @@ func TestBuildQueryWithIncorrectQueryRef(t *testing.T) {
|
|||||||
qbOptions := QueryBuilderOptions{
|
qbOptions := QueryBuilderOptions{
|
||||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||||
}
|
}
|
||||||
fm := featureManager.StartManager()
|
qb := NewQueryBuilder(qbOptions)
|
||||||
qb := NewQueryBuilder(qbOptions, fm)
|
|
||||||
|
|
||||||
_, err := qb.PrepareQueries(q)
|
_, err := qb.PrepareQueries(q)
|
||||||
|
|
||||||
@@ -168,8 +165,7 @@ func TestBuildQueryWithThreeOrMoreQueriesRefAndFormula(t *testing.T) {
|
|||||||
qbOptions := QueryBuilderOptions{
|
qbOptions := QueryBuilderOptions{
|
||||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||||
}
|
}
|
||||||
fm := featureManager.StartManager()
|
qb := NewQueryBuilder(qbOptions)
|
||||||
qb := NewQueryBuilder(qbOptions, fm)
|
|
||||||
|
|
||||||
queries, err := qb.PrepareQueries(q)
|
queries, err := qb.PrepareQueries(q)
|
||||||
|
|
||||||
@@ -338,8 +334,7 @@ func TestBuildQueryWithThreeOrMoreQueriesRefAndFormula(t *testing.T) {
|
|||||||
qbOptions := QueryBuilderOptions{
|
qbOptions := QueryBuilderOptions{
|
||||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||||
}
|
}
|
||||||
fm := featureManager.StartManager()
|
qb := NewQueryBuilder(qbOptions)
|
||||||
qb := NewQueryBuilder(qbOptions, fm)
|
|
||||||
|
|
||||||
queries, err := qb.PrepareQueries(q)
|
queries, err := qb.PrepareQueries(q)
|
||||||
require.Contains(t, queries["F1"], "SELECT A.`os.type` as `os.type`, A.`ts` as `ts`, A.value + B.value as value FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000 AND JSONExtractString(labels, 'os.type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735036080000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) as A INNER JOIN (SELECT * FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.network.io'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system.network.io'] AND unix_milli >= 1735036020000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) HAVING value > 4) as B ON A.`os.type` = B.`os.type` AND A.`ts` = B.`ts`")
|
require.Contains(t, queries["F1"], "SELECT A.`os.type` as `os.type`, A.`ts` as `ts`, A.value + B.value as value FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000 AND JSONExtractString(labels, 'os.type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735036080000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) as A INNER JOIN (SELECT * FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.network.io'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system.network.io'] AND unix_milli >= 1735036020000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) HAVING value > 4) as B ON A.`os.type` = B.`os.type` AND A.`ts` = B.`ts`")
|
||||||
@@ -498,8 +493,7 @@ func TestDeltaQueryBuilder(t *testing.T) {
|
|||||||
qbOptions := QueryBuilderOptions{
|
qbOptions := QueryBuilderOptions{
|
||||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||||
}
|
}
|
||||||
fm := featureManager.StartManager()
|
qb := NewQueryBuilder(qbOptions)
|
||||||
qb := NewQueryBuilder(qbOptions, fm)
|
|
||||||
queries, err := qb.PrepareQueries(c.query)
|
queries, err := qb.PrepareQueries(c.query)
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@@ -703,8 +697,7 @@ func TestLogsQueryWithFormula(t *testing.T) {
|
|||||||
qbOptions := QueryBuilderOptions{
|
qbOptions := QueryBuilderOptions{
|
||||||
BuildLogQuery: logsV3.PrepareLogsQuery,
|
BuildLogQuery: logsV3.PrepareLogsQuery,
|
||||||
}
|
}
|
||||||
fm := featureManager.StartManager()
|
qb := NewQueryBuilder(qbOptions)
|
||||||
qb := NewQueryBuilder(qbOptions, fm)
|
|
||||||
|
|
||||||
for _, test := range testLogsWithFormula {
|
for _, test := range testLogsWithFormula {
|
||||||
t.Run(test.Name, func(t *testing.T) {
|
t.Run(test.Name, func(t *testing.T) {
|
||||||
@@ -914,8 +907,7 @@ func TestLogsQueryWithFormulaV2(t *testing.T) {
|
|||||||
qbOptions := QueryBuilderOptions{
|
qbOptions := QueryBuilderOptions{
|
||||||
BuildLogQuery: logsV4.PrepareLogsQuery,
|
BuildLogQuery: logsV4.PrepareLogsQuery,
|
||||||
}
|
}
|
||||||
fm := featureManager.StartManager()
|
qb := NewQueryBuilder(qbOptions)
|
||||||
qb := NewQueryBuilder(qbOptions, fm)
|
|
||||||
|
|
||||||
for _, test := range testLogsWithFormulaV2 {
|
for _, test := range testLogsWithFormulaV2 {
|
||||||
t.Run(test.Name, func(t *testing.T) {
|
t.Run(test.Name, func(t *testing.T) {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user