Compare commits
39 Commits
v0.78.0-rc
...
v0.80.0-cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
19372c8194 | ||
|
|
eb74adad44 | ||
|
|
d5c04e1342 | ||
|
|
2b9632c8fd | ||
|
|
24920ae903 | ||
|
|
6f096632a2 | ||
|
|
a42eacec4b | ||
|
|
e723399f7f | ||
|
|
48936bed9b | ||
|
|
ee70474cc7 | ||
|
|
c3fa7144ee | ||
|
|
5dd02a5b8e | ||
|
|
c0f01e4cb9 | ||
|
|
fed84cb50a | ||
|
|
80545c4d07 | ||
|
|
0b1faec092 | ||
|
|
ba6f31b1c3 | ||
|
|
eed92978a4 | ||
|
|
41cbd316b5 | ||
|
|
8d7d33393d | ||
|
|
8d143b44b1 | ||
|
|
423aebd6eb | ||
|
|
8d630707af | ||
|
|
a5b52431b7 | ||
|
|
0138d757c8 | ||
|
|
844195b84f | ||
|
|
8ff05b2e8f | ||
|
|
c8c56c544e | ||
|
|
1c43655336 | ||
|
|
c269c8c6b8 | ||
|
|
3142b6cc6d | ||
|
|
58e141685a | ||
|
|
e17f63a50c | ||
|
|
838ef5dcc5 | ||
|
|
e53d3d1269 | ||
|
|
2330420c0d | ||
|
|
65ac277074 | ||
|
|
b7982ca348 | ||
|
|
2748b49a44 |
@@ -1,6 +1,7 @@
|
||||
.git
|
||||
.github
|
||||
.vscode
|
||||
.devenv
|
||||
README.md
|
||||
deploy
|
||||
sample-apps
|
||||
|
||||
81
.github/workflows/build-community.yaml
vendored
Normal file
81
.github/workflows/build-community.yaml
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
name: build-community
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./pkg/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: dockerhub
|
||||
113
.github/workflows/build-enterprise.yaml
vendored
Normal file
113
.github/workflows/build-enterprise.yaml
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
name: build-enterprise
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker_providers: ${{ steps.set-docker-providers.outputs.providers }}
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
id: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
- name: set-docker-providers
|
||||
id: set-docker-providers
|
||||
run: |
|
||||
if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ || ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$ ]]; then
|
||||
echo "providers=dockerhub gcp" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "providers=gcp" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: create-dotenv
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: frontend/.env
|
||||
key: enterprise-dotenv-${{ github.sha }}
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}
|
||||
122
.github/workflows/build-staging.yaml
vendored
Normal file
122
.github/workflows/build-staging.yaml
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
name: build-staging
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PRIMUS_HOME: .primus
|
||||
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
|
||||
outputs:
|
||||
version: ${{ steps.build-info.outputs.version }}
|
||||
hash: ${{ steps.build-info.outputs.hash }}
|
||||
time: ${{ steps.build-info.outputs.time }}
|
||||
branch: ${{ steps.build-info.outputs.branch }}
|
||||
deployment: ${{ steps.build-info.outputs.deployment }}
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- id: token
|
||||
name: github-token-gen
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PRIMUS_APP_ID }}
|
||||
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- name: primus-checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: signoz/primus
|
||||
ref: main
|
||||
path: .primus
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: build-info
|
||||
id: build-info
|
||||
run: |
|
||||
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
|
||||
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
|
||||
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
|
||||
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
|
||||
|
||||
staging_label="${{ github.event.label.name }}"
|
||||
if [[ "${staging_label}" == "staging:"* ]]; then
|
||||
deployment=${staging_label#"staging:"}
|
||||
elif [[ "${{ github.event.ref }}" == "refs/heads/main" ]]; then
|
||||
deployment="staging"
|
||||
else
|
||||
echo "error: not able to determine deployment - please verify the PR label or the branch"
|
||||
exit 1
|
||||
fi
|
||||
echo "deployment=${deployment}" >> $GITHUB_OUTPUT
|
||||
- name: create-dotenv
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'TUNNEL_URL=https://telemetry.staging.signoz.cloud/tunnel' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN=https://telemetry.staging.signoz.cloud' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: frontend/.env
|
||||
key: staging-dotenv-${{ github.sha }}
|
||||
js-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
|
||||
needs: prepare
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: staging-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
|
||||
needs: [prepare, js-build]
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: gcp
|
||||
staging:
|
||||
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
|
||||
uses: signoz/primus.workflows/.github/workflows/github-trigger.yaml@main
|
||||
secrets: inherit
|
||||
needs: [prepare, go-build]
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GITHUB_ENVIRONMENT: staging
|
||||
GITHUB_SILENT: true
|
||||
GITHUB_REPOSITORY_NAME: charts-saas-v3-staging
|
||||
GITHUB_EVENT_NAME: releaser
|
||||
GITHUB_EVENT_PAYLOAD: "{\"deployment\": \"${{ needs.prepare.outputs.deployment }}\", \"signoz_version\": \"${{ needs.prepare.outputs.version }}\"}"
|
||||
122
.github/workflows/build.yaml
vendored
122
.github/workflows/build.yaml
vendored
@@ -1,122 +0,0 @@
|
||||
name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
enterprise:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: create-env-file
|
||||
run: |
|
||||
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
|
||||
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
|
||||
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
|
||||
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-enterprise
|
||||
|
||||
community:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: setup-qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: setup-buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
- name: docker-login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: github-ref-info
|
||||
shell: bash
|
||||
run: |
|
||||
GH_REF=${{ github.ref }}
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
PREFIX="refs/tags/"
|
||||
echo "GH_IS_TAG=true" >> $GITHUB_ENV
|
||||
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
else
|
||||
PREFIX="refs/heads/"
|
||||
echo "GH_IS_TAG=false" >> $GITHUB_ENV
|
||||
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: set-version
|
||||
run: |
|
||||
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
|
||||
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
|
||||
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
|
||||
echo "VERSION=latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: cross-compilation-tools
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: publish
|
||||
run: make docker-buildx-community
|
||||
55
.github/workflows/integrationci.yaml
vendored
Normal file
55
.github/workflows/integrationci.yaml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: integrationci
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
src:
|
||||
- bootstrap
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 24.1.2-alpine
|
||||
- 24.12-alpine
|
||||
schema-migrator-version:
|
||||
- v0.111.38
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: poetry
|
||||
run: |
|
||||
python -m pip install poetry==2.1.2
|
||||
python -m poetry config virtualenvs.in-project true
|
||||
cd tests/integration && poetry install --no-root
|
||||
- name: run
|
||||
run: |
|
||||
cd tests/integration && \
|
||||
poetry run pytest -ra \
|
||||
--basetemp=./tmp/ \
|
||||
-vv \
|
||||
--capture=no \
|
||||
src/${{matrix.src}} \
|
||||
--sqlstore-provider ${{matrix.sqlstore-provider}} \
|
||||
--postgres-version ${{matrix.postgres-version}} \
|
||||
--clickhouse-version ${{matrix.clickhouse-version}} \
|
||||
--schema-migrator-version ${{matrix.schema-migrator-version}}
|
||||
4
.github/workflows/prereleaser.yaml
vendored
4
.github/workflows/prereleaser.yaml
vendored
@@ -1,9 +1,9 @@
|
||||
name: prereleaser
|
||||
|
||||
on:
|
||||
# schedule every wednesday 9:30 AM UTC (3pm IST)
|
||||
# schedule every wednesday 6:30 AM UTC (12:00 PM IST)
|
||||
schedule:
|
||||
- cron: '30 9 * * 3'
|
||||
- cron: '30 6 * * 3'
|
||||
|
||||
# allow manual triggering of the workflow by a maintainer
|
||||
workflow_dispatch:
|
||||
|
||||
13
.github/workflows/staging-deployment.yaml
vendored
13
.github/workflows/staging-deployment.yaml
vendored
@@ -36,12 +36,17 @@ jobs:
|
||||
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
|
||||
echo "GITHUB_SHA: ${GITHUB_SHA}"
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export OTELCOL_TAG="main"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
export KAFKA_SPAN_EVAL="true"
|
||||
docker system prune --force
|
||||
docker pull signoz/signoz-otel-collector:main
|
||||
docker pull signoz/signoz-schema-migrator:main
|
||||
docker system prune --force --all
|
||||
OTELCOL_TAG=$(curl -s https://api.github.com/repos/SigNoz/signoz-otel-collector/releases/latest | jq -r '.tag_name // "not-found"')
|
||||
if [[ "${OTELCOL_TAG}" == "not-found" ]]; then
|
||||
echo "warning: unable to determine latest SigNoz OtelCollector release tag, skipping latest otelcol deployment"
|
||||
else
|
||||
export OTELCOL_TAG=${OTELCOL_TAG}
|
||||
docker pull signoz/signoz-otel-collector:${OTELCOL_TAG}
|
||||
docker pull signoz/signoz-schema-migrator:${OTELCOL_TAG}
|
||||
fi
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
|
||||
2
.github/workflows/testing-deployment.yaml
vendored
2
.github/workflows/testing-deployment.yaml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
|
||||
export DEV_BUILD="1"
|
||||
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
|
||||
docker system prune --force
|
||||
docker system prune --force --all
|
||||
cd ~/signoz
|
||||
git status
|
||||
git add .
|
||||
|
||||
147
.gitignore
vendored
147
.gitignore
vendored
@@ -80,6 +80,153 @@ deploy/common/clickhouse/user_scripts/
|
||||
|
||||
queries.active
|
||||
|
||||
# tmp
|
||||
**/tmp/**
|
||||
|
||||
# .devenv tmp files
|
||||
.devenv/**/tmp/**
|
||||
.qodo
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
### Python Patch ###
|
||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||
poetry.toml
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
17
.versions/alpine
Normal file
17
.versions/alpine
Normal file
@@ -0,0 +1,17 @@
|
||||
#### Auto generated by make docker-version-alpine. DO NOT EDIT! ####
|
||||
amd64=029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85
|
||||
unknown=5fea95373b9ec85974843f31446fa6a9df4492dddae4e1cb056193c34a20a5be
|
||||
arm=b4aef1a899e0271f06d948c9a8fa626ecdb2202d3a178bc14775dd559e23df8e
|
||||
unknown=a4d1e27e63a9d6353046eb25a2f0ec02945012b217f4364cd83a73fe6dfb0b15
|
||||
arm=4fdafe217d0922f3c3e2b4f64cf043f8403a4636685cd9c51fea2cbd1f419740
|
||||
unknown=7f21ac2018d95b2c51a5779c1d5ca6c327504adc3b0fdc747a6725d30b3f13c2
|
||||
arm64=ea3c5a9671f7b3f7eb47eab06f73bc6591df978b0d5955689a9e6f943aa368c0
|
||||
unknown=a8ba68c1a9e6eea8041b4b8f996c235163440808b9654a865976fdcbede0f433
|
||||
386=dea9f02e103e837849f984d5679305c758aba7fea1b95b7766218597f61a05ab
|
||||
unknown=3c6629bec05c8273a927d46b77428bf4a378dad911a0ae284887becdc149b734
|
||||
ppc64le=0880443bffa028dfbbc4094a32dd6b7ac25684e4c0a3d50da9e0acae355c5eaf
|
||||
unknown=bb48308f976b266e3ab39bbf9af84521959bd9c295d3c763690cf41f8df2a626
|
||||
riscv64=d76e6fbe348ff20c2931bb7f101e49379648e026de95dd37f96e00ce1909dcf7
|
||||
unknown=dd807544365f6dc187cbe6de0806adce2ea9de3e7124717d1d8e8b7a18b77b64
|
||||
s390x=b815fadf80495594eb6296a6af0bc647ae5f193e0044e07acec7e5b378c9ce2d
|
||||
unknown=74681be74a280a88abb53ff1e048eb1fb624b30d0066730df6d8afd02ba82e01
|
||||
32
Makefile
32
Makefile
@@ -10,7 +10,7 @@ COMMIT_SHORT_SHA ?= $(shell git rev-parse --short HEAD)
|
||||
BRANCH_NAME ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
|
||||
VERSION ?= $(BRANCH_NAME)-$(COMMIT_SHORT_SHA)
|
||||
TIMESTAMP ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
ARCHS = amd64 arm64
|
||||
ARCHS ?= amd64 arm64
|
||||
TARGET_DIR ?= $(shell pwd)/target
|
||||
|
||||
ZEUS_URL ?= https://api.signoz.cloud
|
||||
@@ -23,6 +23,7 @@ GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/pkg/query-service
|
||||
GO_BUILD_LDFLAGS_COMMUNITY = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
GO_BUILD_ARCHS_ENTERPRISE = $(addprefix go-build-enterprise-,$(ARCHS))
|
||||
GO_BUILD_ARCHS_ENTERPRISE_RACE = $(addprefix go-build-enterprise-race-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/ee/query-service
|
||||
GO_BUILD_LDFLAGS_ENTERPRISE = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=enterprise $(GO_BUILD_LDFLAG_ZEUS_URL) $(GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO)
|
||||
|
||||
@@ -119,6 +120,18 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
go-build-enterprise-race: ## Builds the go backend server for enterprise with race
|
||||
go-build-enterprise-race: $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
$(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
##############################################################
|
||||
# js commands
|
||||
##############################################################
|
||||
@@ -167,3 +180,20 @@ docker-buildx-enterprise: go-build-enterprise js-build
|
||||
--platform linux/arm64,linux/amd64 \
|
||||
--push \
|
||||
--tag $(DOCKER_REGISTRY_ENTERPRISE):$(VERSION) $(SRC)
|
||||
|
||||
##############################################################
|
||||
# python commands
|
||||
##############################################################
|
||||
.PHONY: py-fmt
|
||||
py-fmt: ## Run black for integration tests
|
||||
@cd tests/integration && poetry run black .
|
||||
|
||||
.PHONY: py-lint
|
||||
py-lint: ## Run lint for integration tests
|
||||
@cd tests/integration && poetry run isort .
|
||||
@cd tests/integration && poetry run autoflake .
|
||||
@cd tests/integration && poetry run pylint .
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.77.0
|
||||
image: signoz/signoz:v0.79.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -208,7 +208,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.37
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -232,7 +232,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.37
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.77.0
|
||||
image: signoz/signoz:v0.79.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -143,7 +143,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.37
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -167,7 +167,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.37
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.77.0}
|
||||
image: signoz/signoz:${VERSION:-v0.79.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -212,7 +212,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.37}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -238,7 +238,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -249,7 +249,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.77.0}
|
||||
image: signoz/signoz:${VERSION:-v0.79.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -146,7 +146,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.37}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -168,7 +168,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -180,7 +180,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.77.0}
|
||||
image: signoz/signoz:${VERSION:-v0.79.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.37}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -18,4 +18,4 @@ COPY frontend/build/ /etc/signoz/web/
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
36
ee/query-service/Dockerfile.integration
Normal file
36
ee/query-service/Dockerfile.integration
Normal file
@@ -0,0 +1,36 @@
|
||||
FROM golang:1.22-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
ARG ZEUSURL
|
||||
|
||||
# This path is important for stacktraces
|
||||
WORKDIR $GOPATH/src/github.com/signoz/signoz
|
||||
WORKDIR /root
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
g++ \
|
||||
gcc \
|
||||
libc6-dev \
|
||||
make \
|
||||
pkg-config \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
RUN go mod download
|
||||
|
||||
COPY ./ee/ ./ee/
|
||||
COPY ./pkg/ ./pkg/
|
||||
COPY ./templates/email /root/templates
|
||||
|
||||
COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz"]
|
||||
22
ee/query-service/Dockerfile.multi-arch
Normal file
22
ee/query-service/Dockerfile.multi-arch
Normal file
@@ -0,0 +1,22 @@
|
||||
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
|
||||
|
||||
FROM alpine@sha256:${ALPINE_SHA}
|
||||
LABEL maintainer="signoz"
|
||||
WORKDIR /root
|
||||
|
||||
ARG OS="linux"
|
||||
ARG ARCH
|
||||
|
||||
RUN apk update && \
|
||||
apk add ca-certificates && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
|
||||
COPY ./conf/prometheus.yml /root/config/prometheus.yml
|
||||
COPY ./templates/email /root/templates
|
||||
COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
@@ -28,11 +28,10 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
|
||||
}
|
||||
|
||||
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
FeatureLookup: dp.ff,
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
})
|
||||
|
||||
return dp
|
||||
|
||||
@@ -28,11 +28,10 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
|
||||
}
|
||||
|
||||
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
FeatureLookup: hp.ff,
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
})
|
||||
|
||||
return hp
|
||||
|
||||
@@ -38,12 +38,6 @@ func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericPr
|
||||
}
|
||||
}
|
||||
|
||||
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().ff = ff
|
||||
}
|
||||
}
|
||||
|
||||
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
|
||||
return func(p T) {
|
||||
p.GetBaseSeasonalProvider().reader = reader
|
||||
@@ -56,7 +50,6 @@ type BaseSeasonalProvider struct {
|
||||
fluxInterval time.Duration
|
||||
cache cache.Cache
|
||||
keyGenerator cache.KeyGenerator
|
||||
ff interfaces.FeatureLookup
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
||||
|
||||
@@ -27,11 +27,10 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
|
||||
}
|
||||
|
||||
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
FeatureLookup: wp.ff,
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
})
|
||||
|
||||
return wp
|
||||
|
||||
@@ -153,9 +153,11 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) (*types.User, *basemodel.ApiError) {
|
||||
cloudIntegrationUserId := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
|
||||
|
||||
integrationUserResult, apiErr := ah.AppDao().GetUser(ctx, cloudIntegrationUserId)
|
||||
// TODO(nitya): there should be orgId here
|
||||
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
|
||||
if apiErr != nil {
|
||||
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
|
||||
}
|
||||
@@ -170,9 +172,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
)
|
||||
|
||||
newUser := &types.User{
|
||||
ID: cloudIntegrationUserId,
|
||||
Name: fmt.Sprintf("%s integration", cloudProvider),
|
||||
Email: fmt.Sprintf("%s@signoz.io", cloudIntegrationUserId),
|
||||
ID: uuid.New().String(),
|
||||
Name: cloudIntegrationUser,
|
||||
Email: email,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
|
||||
@@ -5,16 +5,18 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/ee/types"
|
||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
@@ -58,7 +60,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
ah.Respond(w, &pat)
|
||||
}
|
||||
|
||||
func validatePATRequest(req types.GettablePAT) error {
|
||||
func validatePATRequest(req eeTypes.GettablePAT) error {
|
||||
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
|
||||
return fmt.Errorf("valid role is required")
|
||||
}
|
||||
@@ -74,12 +76,19 @@ func validatePATRequest(req types.GettablePAT) error {
|
||||
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
req := types.GettablePAT{}
|
||||
req := eeTypes.GettablePAT{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
user, err := auth.GetUserFromReqContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
@@ -89,6 +98,25 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
err = validatePATRequest(req)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
@@ -96,12 +124,6 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
req.UpdatedByUserID = user.ID
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
req.UpdatedAt = time.Now()
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
@@ -149,6 +171,25 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
//get the pat
|
||||
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
|
||||
if paterr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
// get the user
|
||||
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
|
||||
if usererr != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
|
||||
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
|
||||
@@ -88,28 +88,24 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityDaily:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
case anomaly.SeasonalityHourly:
|
||||
provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
default:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
|
||||
)
|
||||
}
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
|
||||
@@ -172,7 +172,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
lm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
@@ -532,7 +531,6 @@ func makeRulesManager(
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm baseint.FeatureLookup,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
@@ -549,7 +547,6 @@ func makeRulesManager(
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
FeatureFlags: fm,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
@@ -40,7 +39,6 @@ type ModelDao interface {
|
||||
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
|
||||
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
|
||||
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
|
||||
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
||||
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
@@ -44,7 +43,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
|
||||
}
|
||||
|
||||
user := &types.User{
|
||||
ID: uuid.NewString(),
|
||||
ID: uuid.New().String(),
|
||||
Name: "",
|
||||
Email: email,
|
||||
Password: hash,
|
||||
@@ -162,12 +161,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
// find domain from email
|
||||
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
if apierr != nil {
|
||||
var emailDomain string
|
||||
emailComponents := strings.Split(email, "@")
|
||||
if len(emailComponents) > 0 {
|
||||
emailDomain = emailComponents[1]
|
||||
}
|
||||
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
|
||||
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
|
||||
return resp, apierr
|
||||
}
|
||||
|
||||
|
||||
@@ -196,27 +196,3 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID)
|
||||
|
||||
return &patWithUser, nil
|
||||
}
|
||||
|
||||
// deprecated
|
||||
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError) {
|
||||
users := []ossTypes.GettableUser{}
|
||||
|
||||
if err := m.DB().NewSelect().
|
||||
Model(&users).
|
||||
Column("u.id", "u.name", "u.email", "u.password", "u.created_at", "u.profile_picture_url", "u.org_id", "u.group_id").
|
||||
Join("JOIN personal_access_tokens p ON u.id = p.user_id").
|
||||
Where("p.token = ?", token).
|
||||
Where("p.expires_at >= strftime('%s', 'now')").
|
||||
Where("p.org_id = ?", orgID).
|
||||
Scan(ctx); err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
|
||||
}
|
||||
|
||||
if len(users) != 1 {
|
||||
return nil, &model.ApiError{
|
||||
Typ: model.ErrorInternal,
|
||||
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
|
||||
}
|
||||
}
|
||||
return &users[0], nil
|
||||
}
|
||||
|
||||
@@ -157,8 +157,6 @@ func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
|
||||
}
|
||||
|
||||
switch planName {
|
||||
case PlanNameTeams:
|
||||
features = append(features, ProPlan...)
|
||||
case PlanNameEnterprise:
|
||||
features = append(features, EnterprisePlan...)
|
||||
case PlanNameBasic:
|
||||
|
||||
@@ -74,21 +74,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Parse the entire license properly",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "TEAMS",
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameTeams,
|
||||
PlanName: PlanNameEnterprise,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "ACTIVE",
|
||||
@@ -98,14 +98,14 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Fallback to basic plan if license status is invalid",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "TEAMS",
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "INVALID",
|
||||
@@ -122,21 +122,21 @@ func TestNewLicenseV3(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "fallback states for validFrom and validUntil",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "TEAMS",
|
||||
"name": "ENTERPRISE",
|
||||
},
|
||||
"valid_from": 1234.456,
|
||||
"valid_until": 5678.567,
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
},
|
||||
PlanName: PlanNameTeams,
|
||||
PlanName: PlanNameEnterprise,
|
||||
ValidFrom: 1234,
|
||||
ValidUntil: 5678,
|
||||
Status: "ACTIVE",
|
||||
|
||||
@@ -1,30 +1,26 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
const SSO = "SSO"
|
||||
const Basic = "BASIC_PLAN"
|
||||
const Pro = "PRO_PLAN"
|
||||
const Enterprise = "ENTERPRISE_PLAN"
|
||||
|
||||
var (
|
||||
PlanNameEnterprise = "ENTERPRISE"
|
||||
PlanNameTeams = "TEAMS"
|
||||
PlanNameBasic = "BASIC"
|
||||
)
|
||||
|
||||
var (
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameTeams: Pro, PlanNameEnterprise: Enterprise}
|
||||
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
|
||||
)
|
||||
|
||||
var (
|
||||
LicenseStatusInvalid = "INVALID"
|
||||
)
|
||||
|
||||
const DisableUpsell = "DISABLE_UPSELL"
|
||||
const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
const Gateway = "GATEWAY"
|
||||
@@ -38,83 +34,6 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: DisableUpsell,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -143,135 +62,6 @@ var BasicPlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
}
|
||||
|
||||
var ProPlan = basemodel.FeatureSet{
|
||||
basemodel.Feature{
|
||||
Name: SSO,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: Gateway,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: PremiumSupport,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AnomalyDetection,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
@@ -289,76 +79,6 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.OSS,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.CustomMetricsFunction,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.AlertChannelMsTeams,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -401,13 +121,6 @@ var EnterprisePlan = basemodel.FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.HostsInfraMonitoring,
|
||||
Active: constants.EnableHostsInfraMonitoring(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
basemodel.Feature{
|
||||
Name: basemodel.TraceFunnels,
|
||||
Active: false,
|
||||
|
||||
@@ -53,7 +53,6 @@ type AnomalyRule struct {
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
p *baserules.PostableRule,
|
||||
featureFlags interfaces.FeatureLookup,
|
||||
reader interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
opts ...baserules.RuleOption,
|
||||
@@ -89,10 +88,9 @@ func NewAnomalyRule(
|
||||
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FeatureLookup: featureFlags,
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
@@ -102,21 +100,18 @@ func NewAnomalyRule(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
|
||||
)
|
||||
}
|
||||
return &t, nil
|
||||
|
||||
@@ -23,7 +23,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
@@ -66,7 +65,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
@@ -123,7 +121,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
rule, err = baserules.NewThresholdRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
@@ -160,7 +157,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
rule, err = NewAnomalyRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithSendAlways(),
|
||||
|
||||
@@ -17,13 +17,15 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
Org = "org"
|
||||
User = "user"
|
||||
CloudIntegration = "cloud_integration"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
)
|
||||
|
||||
type dialect struct {
|
||||
@@ -211,6 +213,8 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
fkReferences = append(fkReferences, OrgReference)
|
||||
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||
fkReferences = append(fkReferences, UserReference)
|
||||
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,9 +11,12 @@ const logEvent = async (
|
||||
rateLimited?: boolean,
|
||||
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
// add tenant_url to attributes
|
||||
const { hostname } = window.location;
|
||||
const updatedAttributes = { ...attributes, tenant_url: hostname };
|
||||
const response = await axios.post('/event', {
|
||||
eventName,
|
||||
attributes,
|
||||
attributes: updatedAttributes,
|
||||
eventType: eventType || 'track',
|
||||
rateLimited: rateLimited || false, // TODO: Update this once we have a proper way to handle rate limiting
|
||||
});
|
||||
|
||||
@@ -1,29 +1,12 @@
|
||||
// keep this consistent with backend constants.go
|
||||
export enum FeatureKeys {
|
||||
SSO = 'SSO',
|
||||
ENTERPRISE_PLAN = 'ENTERPRISE_PLAN',
|
||||
BASIC_PLAN = 'BASIC_PLAN',
|
||||
ALERT_CHANNEL_SLACK = 'ALERT_CHANNEL_SLACK',
|
||||
ALERT_CHANNEL_WEBHOOK = 'ALERT_CHANNEL_WEBHOOK',
|
||||
ALERT_CHANNEL_PAGERDUTY = 'ALERT_CHANNEL_PAGERDUTY',
|
||||
ALERT_CHANNEL_OPSGENIE = 'ALERT_CHANNEL_OPSGENIE',
|
||||
ALERT_CHANNEL_MSTEAMS = 'ALERT_CHANNEL_MSTEAMS',
|
||||
DurationSort = 'DurationSort',
|
||||
TimestampSort = 'TimestampSort',
|
||||
CUSTOM_METRICS_FUNCTION = 'CUSTOM_METRICS_FUNCTION',
|
||||
QUERY_BUILDER_PANELS = 'QUERY_BUILDER_PANELS',
|
||||
QUERY_BUILDER_ALERTS = 'QUERY_BUILDER_ALERTS',
|
||||
DISABLE_UPSELL = 'DISABLE_UPSELL',
|
||||
USE_SPAN_METRICS = 'USE_SPAN_METRICS',
|
||||
OSS = 'OSS',
|
||||
ONBOARDING = 'ONBOARDING',
|
||||
CHAT_SUPPORT = 'CHAT_SUPPORT',
|
||||
GATEWAY = 'GATEWAY',
|
||||
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
|
||||
QUERY_BUILDER_SEARCH_V2 = 'QUERY_BUILDER_SEARCH_V2',
|
||||
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
||||
AWS_INTEGRATION = 'AWS_INTEGRATION',
|
||||
ONBOARDING_V3 = 'ONBOARDING_V3',
|
||||
THIRD_PARTY_API = 'THIRD_PARTY_API',
|
||||
TRACE_FUNNELS = 'TRACE_FUNNELS',
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { Form, FormInstance, Input, Select, Switch, Typography } from 'antd';
|
||||
import { Store } from 'antd/lib/form/interface';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import ROUTES from 'constants/routes';
|
||||
import {
|
||||
ChannelType,
|
||||
@@ -11,11 +10,8 @@ import {
|
||||
WebhookChannel,
|
||||
} from 'container/CreateAlertChannels/config';
|
||||
import history from 'lib/history';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { Dispatch, ReactElement, SetStateAction } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { FeatureFlagProps } from 'types/api/features/getFeaturesFlags';
|
||||
import { isFeatureKeys } from 'utils/app';
|
||||
|
||||
import EmailSettings from './Settings/Email';
|
||||
import MsTeamsSettings from './Settings/MsTeams';
|
||||
@@ -39,17 +35,6 @@ function FormAlertChannels({
|
||||
editing = false,
|
||||
}: FormAlertChannelsProps): JSX.Element {
|
||||
const { t } = useTranslation('channels');
|
||||
const { featureFlags } = useAppContext();
|
||||
|
||||
const feature = `ALERT_CHANNEL_${type.toUpperCase()}`;
|
||||
|
||||
const featureKey = isFeatureKeys(feature)
|
||||
? feature
|
||||
: FeatureKeys.ALERT_CHANNEL_SLACK;
|
||||
|
||||
const hasFeature = featureFlags?.find(
|
||||
(flag: FeatureFlagProps) => flag.name === featureKey,
|
||||
);
|
||||
|
||||
const renderSettings = (): ReactElement | null => {
|
||||
switch (type) {
|
||||
@@ -146,7 +131,7 @@ function FormAlertChannels({
|
||||
|
||||
<Form.Item>
|
||||
<Button
|
||||
disabled={savingState || !hasFeature}
|
||||
disabled={savingState}
|
||||
loading={savingState}
|
||||
type="primary"
|
||||
onClick={(): void => onSaveHandler(type)}
|
||||
@@ -154,7 +139,7 @@ function FormAlertChannels({
|
||||
{t('button_save_channel')}
|
||||
</Button>
|
||||
<Button
|
||||
disabled={testingState || !hasFeature}
|
||||
disabled={testingState}
|
||||
loading={testingState}
|
||||
onClick={(): void => onTestHandler(type)}
|
||||
>
|
||||
|
||||
@@ -467,10 +467,6 @@ function FormAlertRules({
|
||||
panelType,
|
||||
]);
|
||||
|
||||
const isAlertAvailable =
|
||||
!featureFlags?.find((flag) => flag.name === FeatureKeys.QUERY_BUILDER_ALERTS)
|
||||
?.active || false;
|
||||
|
||||
const saveRule = useCallback(async () => {
|
||||
if (!isFormValid()) {
|
||||
return;
|
||||
@@ -688,11 +684,6 @@ function FormAlertRules({
|
||||
|
||||
const isAlertNameMissing = !formInstance.getFieldValue('alert');
|
||||
|
||||
const isAlertAvailableToSave =
|
||||
isAlertAvailable &&
|
||||
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
||||
alertType !== AlertTypes.METRICS_BASED_ALERT;
|
||||
|
||||
const onUnitChangeHandler = (value: string): void => {
|
||||
setYAxisUnit(value);
|
||||
// reset target unit
|
||||
@@ -865,7 +856,6 @@ function FormAlertRules({
|
||||
icon={<SaveOutlined />}
|
||||
disabled={
|
||||
isAlertNameMissing ||
|
||||
isAlertAvailableToSave ||
|
||||
!isChannelConfigurationValid ||
|
||||
queryStatus === 'error'
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import { WarningOutlined } from '@ant-design/icons';
|
||||
import { Button, Flex, Modal, Space, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import {
|
||||
initialQueriesMap,
|
||||
@@ -27,7 +26,6 @@ import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
|
||||
import { Check, X } from 'lucide-react';
|
||||
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import {
|
||||
getNextWidgets,
|
||||
@@ -79,8 +77,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
|
||||
const { t } = useTranslation(['dashboard']);
|
||||
|
||||
const { featureFlags } = useAppContext();
|
||||
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
|
||||
const {
|
||||
@@ -566,12 +562,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const isQueryBuilderActive =
|
||||
!featureFlags?.find((flag) => flag.name === FeatureKeys.QUERY_BUILDER_PANELS)
|
||||
?.active || false;
|
||||
|
||||
const isNewTraceLogsAvailable =
|
||||
isQueryBuilderActive &&
|
||||
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
||||
currentQuery.builder.queryData.find(
|
||||
(query) => query.dataSource !== DataSource.METRICS,
|
||||
|
||||
@@ -13,11 +13,7 @@ function OrganizationSettings(): JSX.Element {
|
||||
const isNotSSO =
|
||||
!featureFlags?.find((flag) => flag.name === FeatureKeys.SSO)?.active || false;
|
||||
|
||||
const isNoUpSell =
|
||||
!featureFlags?.find((flag) => flag.name === FeatureKeys.DISABLE_UPSELL)
|
||||
?.active || false;
|
||||
|
||||
const isAuthDomain = !isNoUpSell || (isNoUpSell && !isNotSSO);
|
||||
const isAuthDomain = !isNotSSO;
|
||||
|
||||
if (!org) {
|
||||
return <div />;
|
||||
|
||||
@@ -284,16 +284,6 @@ function SideNav(): JSX.Element {
|
||||
manageLicenseMenuItem,
|
||||
];
|
||||
|
||||
const isApiMonitoringEnabled = featureFlags?.find(
|
||||
(flag) => flag.name === FeatureKeys.THIRD_PARTY_API,
|
||||
)?.active;
|
||||
|
||||
if (!isApiMonitoringEnabled) {
|
||||
updatedMenuItems = updatedMenuItems.filter(
|
||||
(item) => item.key !== ROUTES.API_MONITORING,
|
||||
);
|
||||
}
|
||||
|
||||
if (isCloudUser || isEnterpriseSelfHostedUser) {
|
||||
const isOnboardingEnabled =
|
||||
featureFlags?.find((feature) => feature.name === FeatureKeys.ONBOARDING)
|
||||
|
||||
@@ -186,76 +186,6 @@ export function getAppContextMock(
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.OSS,
|
||||
active: false,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.DISABLE_UPSELL,
|
||||
active: false,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.CUSTOM_METRICS_FUNCTION,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.QUERY_BUILDER_PANELS,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.QUERY_BUILDER_ALERTS,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.ALERT_CHANNEL_SLACK,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.ALERT_CHANNEL_WEBHOOK,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.ALERT_CHANNEL_PAGERDUTY,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.ALERT_CHANNEL_OPSGENIE,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.ALERT_CHANNEL_MSTEAMS,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.USE_SPAN_METRICS,
|
||||
active: false,
|
||||
@@ -284,20 +214,6 @@ export function getAppContextMock(
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.DurationSort,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.TimestampSort,
|
||||
active: true,
|
||||
usage: 0,
|
||||
usage_limit: -1,
|
||||
route: '',
|
||||
},
|
||||
{
|
||||
name: FeatureKeys.ONBOARDING,
|
||||
active: true,
|
||||
|
||||
9
go.mod
9
go.mod
@@ -10,7 +10,8 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.16
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.11.0
|
||||
@@ -22,12 +23,13 @@ require (
|
||||
github.com/go-redis/redismock/v8 v8.11.5
|
||||
github.com/go-viper/mapstructure/v2 v2.1.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/handlers v1.5.1
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/gorilla/websocket v1.5.0
|
||||
github.com/gosimple/slug v1.10.0
|
||||
github.com/huandu/go-sqlbuilder v1.35.0
|
||||
github.com/jackc/pgx/v5 v5.7.2
|
||||
github.com/jmoiron/sqlx v1.3.4
|
||||
github.com/json-iterator/go v1.1.12
|
||||
@@ -88,7 +90,7 @@ require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
|
||||
github.com/ClickHouse/ch-go v0.61.5 // indirect
|
||||
github.com/ClickHouse/ch-go v0.63.1 // indirect
|
||||
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/armon/go-metrics v0.4.1 // indirect
|
||||
@@ -150,6 +152,7 @@ require (
|
||||
github.com/hashicorp/golang-lru v1.0.2 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/memberlist v0.5.1 // indirect
|
||||
github.com/huandu/xstrings v1.4.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
|
||||
26
go.sum
26
go.sum
@@ -85,8 +85,8 @@ github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mx
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/ClickHouse/ch-go v0.61.5 h1:zwR8QbYI0tsMiEcze/uIMK+Tz1D3XZXLdNrlaOpeEI4=
|
||||
github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg=
|
||||
github.com/ClickHouse/ch-go v0.63.1 h1:s2JyZvWLTCSAGdtjMBBmAgQQHMco6pawLJMOXi0FODM=
|
||||
github.com/ClickHouse/ch-go v0.63.1/go.mod h1:I1kJJCL3WJcBMGe1m+HVK0+nREaG+JOYYBWjrDrF3R0=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0 h1:AG4D/hW39qa58+JHQIFOSnxyL46H6h2lrmGGk17dhFo=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0/go.mod h1:i9ZQAojcayW3RsdCb3YR+n+wC2h65eJsZCscZ1Z1wyo=
|
||||
github.com/Code-Hex/go-generics-cache v1.5.1 h1:6vhZGc5M7Y/YD8cIUcY8kcuQLB4cHR7U+0KMqAA0KcU=
|
||||
@@ -100,8 +100,10 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.16 h1:535uKH5Oux+35EsI+L3C6pnAP/Ye0PTCbVizXoL+VqE=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.16/go.mod h1:HJ4m0LY1MPsuZmuRF7Ixb+bY8rxgRzI0VXzOedESsjg=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39-beta.1 h1:ZpSNrOZBOH2iCJIPeER5X0mfxOe64yP3JRX7FzBNfwY=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39-beta.1/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39 h1:Dl8QqZNAsj2atxP572OzsszPK0XPpd3LLPNPRAUJ5wo=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
@@ -113,6 +115,8 @@ github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b/go.mod h1:fvzegU4
|
||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ=
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw=
|
||||
github.com/antonmedv/expr v1.15.3 h1:q3hOJZNvLvhqE8OHBs1cFRdbXFNKuA+bHmRaI+AmRmI=
|
||||
github.com/antonmedv/expr v1.15.3/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE=
|
||||
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||
@@ -333,8 +337,8 @@ github.com/gojek/heimdall/v7 v7.0.3 h1:+5sAhl8S0m+qRRL8IVeHCJudFh/XkG3wyO++nvOg+
|
||||
github.com/gojek/heimdall/v7 v7.0.3/go.mod h1:Z43HtMid7ysSjmsedPTXAki6jcdcNVnjn5pmsTyiMic=
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf h1:5xRGbUdOmZKoDXkGx5evVLehuCMpuO1hl701bEQqXOM=
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf/go.mod h1:QzhUKaYKJmcbTnCYCAVQrroCOY7vOOI8cSQ4NbuhYf0=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
@@ -537,6 +541,12 @@ github.com/hetznercloud/hcloud-go/v2 v2.13.1/go.mod h1:dhix40Br3fDiBhwaSG/zgaYOF
|
||||
github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs=
|
||||
github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/huandu/go-assert v1.1.6 h1:oaAfYxq9KNDi9qswn/6aE0EydfxSa+tWZC1KabNitYs=
|
||||
github.com/huandu/go-assert v1.1.6/go.mod h1:JuIfbmYG9ykwvuxoJ3V8TB5QP+3+ajIA54Y44TmkMxs=
|
||||
github.com/huandu/go-sqlbuilder v1.35.0 h1:ESvxFHN8vxCTudY1Vq63zYpU5yJBESn19sf6k4v2T5Q=
|
||||
github.com/huandu/go-sqlbuilder v1.35.0/go.mod h1:mS0GAtrtW+XL6nM2/gXHRJax2RwSW1TraavWDFAc1JA=
|
||||
github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU=
|
||||
github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
|
||||
@@ -812,8 +822,8 @@ github.com/prometheus/prometheus v0.300.1/go.mod h1:gtTPY/XVyCdqqnjA3NzDMb0/nc5H
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.0 h1:i+cMcpEDY1BkNm7lPDkCtE4oElsYLn+EKF8kAu2vXT4=
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/redis/go-redis/v9 v9.6.1 h1:HHDteefn6ZkTtY5fGUE8tj8uy85AHk6zP7CpzIAM0y4=
|
||||
github.com/redis/go-redis/v9 v9.6.1/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA=
|
||||
github.com/redis/go-redis/v9 v9.6.3 h1:8Dr5ygF1QFXRxIH/m3Xg9MMG1rS8YCtAgosrsewT6i0=
|
||||
github.com/redis/go-redis/v9 v9.6.3/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA=
|
||||
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
|
||||
221
grammar/FilterQuery.g4
Normal file
221
grammar/FilterQuery.g4
Normal file
@@ -0,0 +1,221 @@
|
||||
grammar FilterQuery;
|
||||
|
||||
/*
|
||||
* Parser Rules
|
||||
*/
|
||||
|
||||
query
|
||||
: expression
|
||||
EOF
|
||||
;
|
||||
|
||||
// Expression with standard boolean precedence:
|
||||
// - parentheses > NOT > AND > OR
|
||||
// - consecutive expressions with no AND/OR => implicit AND
|
||||
expression
|
||||
: orExpression
|
||||
;
|
||||
|
||||
// OR expressions
|
||||
orExpression
|
||||
: andExpression ( OR andExpression )*
|
||||
;
|
||||
|
||||
// AND expressions + optional chaining with implicit AND if no OR is present
|
||||
andExpression
|
||||
: unaryExpression ( AND unaryExpression | unaryExpression )*
|
||||
;
|
||||
|
||||
// A unary expression handles optional NOT
|
||||
unaryExpression
|
||||
: NOT? primary
|
||||
;
|
||||
|
||||
// Primary constructs: grouped expressions, a comparison (key op value),
|
||||
// a function call, or a full-text string
|
||||
primary
|
||||
: LPAREN orExpression RPAREN
|
||||
| comparison
|
||||
| functionCall
|
||||
| fullText
|
||||
| key
|
||||
;
|
||||
|
||||
/*
|
||||
* Comparison-like filters
|
||||
*
|
||||
* Includes all operators: =, !=, <>, <, <=, >, >=, [NOT] LIKE, [NOT] ILIKE,
|
||||
* [NOT] BETWEEN, [NOT] IN, [NOT] EXISTS, [NOT] REGEXP, [NOT] CONTAINS, etc.
|
||||
*/
|
||||
comparison
|
||||
: key EQUALS value
|
||||
| key (NOT_EQUALS | NEQ) value
|
||||
| key LT value
|
||||
| key LE value
|
||||
| key GT value
|
||||
| key GE value
|
||||
|
||||
| key (LIKE | ILIKE) value
|
||||
| key (NOT_LIKE | NOT_ILIKE) value
|
||||
|
||||
| key BETWEEN value AND value
|
||||
| key NOT BETWEEN value AND value
|
||||
|
||||
| key inClause
|
||||
| key notInClause
|
||||
|
||||
| key EXISTS
|
||||
| key NOT EXISTS
|
||||
|
||||
| key REGEXP value
|
||||
| key NOT REGEXP value
|
||||
|
||||
| key CONTAINS value
|
||||
| key NOT CONTAINS value
|
||||
;
|
||||
|
||||
// in(...) or in[...]
|
||||
inClause
|
||||
: IN LPAREN valueList RPAREN
|
||||
| IN LBRACK valueList RBRACK
|
||||
;
|
||||
|
||||
notInClause
|
||||
: NOT IN LPAREN valueList RPAREN
|
||||
| NOT IN LBRACK valueList RBRACK
|
||||
;
|
||||
|
||||
// List of values for in(...) or in[...]
|
||||
valueList
|
||||
: value ( COMMA value )*
|
||||
;
|
||||
|
||||
// Full-text search: a standalone quoted string is allowed as a "primary"
|
||||
// e.g. `"Waiting for response" http.status_code=200`
|
||||
fullText
|
||||
: QUOTED_TEXT
|
||||
| FREETEXT
|
||||
;
|
||||
|
||||
/*
|
||||
* Function calls like:
|
||||
* has(payload.user_ids, 123)
|
||||
* hasAny(payload.user_ids, [123, 456])
|
||||
* ...
|
||||
*/
|
||||
functionCall
|
||||
: (HAS | HASANY | HASALL | HASNONE) LPAREN functionParamList RPAREN
|
||||
;
|
||||
|
||||
// Function parameters can be keys, single scalar values, or arrays
|
||||
functionParamList
|
||||
: functionParam ( COMMA functionParam )*
|
||||
;
|
||||
|
||||
functionParam
|
||||
: key
|
||||
| value
|
||||
| array
|
||||
;
|
||||
|
||||
// An array: [ item1, item2, item3 ]
|
||||
array
|
||||
: LBRACK valueList RBRACK
|
||||
;
|
||||
|
||||
/*
|
||||
* A 'value' can be a string literal (double or single-quoted),
|
||||
// a numeric literal, boolean, or a "bare" token as needed.
|
||||
*/
|
||||
value
|
||||
: QUOTED_TEXT
|
||||
| NUMBER
|
||||
| BOOL
|
||||
| KEY
|
||||
;
|
||||
|
||||
/*
|
||||
* A key can include letters, digits, underscores, dots, brackets
|
||||
* E.g. service.name, query_log.query_duration_ms, proto.user_objects[].name
|
||||
*/
|
||||
key
|
||||
: KEY
|
||||
;
|
||||
|
||||
|
||||
/*
|
||||
* Lexer Rules
|
||||
*/
|
||||
|
||||
// Common punctuation / symbols
|
||||
LPAREN : '(' ;
|
||||
RPAREN : ')' ;
|
||||
LBRACK : '[' ;
|
||||
RBRACK : ']' ;
|
||||
COMMA : ',' ;
|
||||
|
||||
EQUALS : '=' | '==' ;
|
||||
NOT_EQUALS : '!=' ;
|
||||
NEQ : '<>' ; // alternate not-equals operator
|
||||
LT : '<' ;
|
||||
LE : '<=' ;
|
||||
GT : '>' ;
|
||||
GE : '>=' ;
|
||||
|
||||
// Operators that are made of multiple keywords
|
||||
LIKE : [Ll][Ii][Kk][Ee] ;
|
||||
NOT_LIKE : [Nn][Oo][Tt] [ \t]+ [Ll][Ii][Kk][Ee] ;
|
||||
ILIKE : [Ii][Ll][Ii][Kk][Ee] ;
|
||||
NOT_ILIKE : [Nn][Oo][Tt] [ \t]+ [Ii][Ll][Ii][Kk][Ee] ;
|
||||
BETWEEN : [Bb][Ee][Tt][Ww][Ee][Ee][Nn] ;
|
||||
EXISTS : [Ee][Xx][Ii][Ss][Tt][Ss]? ;
|
||||
REGEXP : [Rr][Ee][Gg][Ee][Xx][Pp] ;
|
||||
CONTAINS : [Cc][Oo][Nn][Tt][Aa][Ii][Nn][Ss]? ;
|
||||
IN : [Ii][Nn] ;
|
||||
|
||||
// Boolean logic
|
||||
NOT : [Nn][Oo][Tt] ;
|
||||
AND : [Aa][Nn][Dd] ;
|
||||
OR : [Oo][Rr] ;
|
||||
|
||||
// For easy referencing in function calls
|
||||
HAS : [Hh][Aa][Ss] ;
|
||||
HASANY : [Hh][Aa][Ss][Aa][Nn][Yy] ;
|
||||
HASALL : [Hh][Aa][Ss][Aa][Ll][Ll] ;
|
||||
HASNONE : [Hh][Aa][Ss][Nn][Oo][Nn][Ee] ;
|
||||
|
||||
// Potential boolean constants
|
||||
BOOL
|
||||
: [Tt][Rr][Uu][Ee]
|
||||
| [Ff][Aa][Ll][Ss][Ee]
|
||||
;
|
||||
|
||||
// Numbers (integer or float). Adjust as needed for your domain.
|
||||
NUMBER
|
||||
: DIGIT+ ( '.' DIGIT+ )?
|
||||
;
|
||||
|
||||
// Double/single-quoted text, capturing full text search strings, values, etc.
|
||||
QUOTED_TEXT
|
||||
: ( '"' ( ~["\\] | '\\' . )* '"' // double-quoted
|
||||
| '\'' ( ~['\\] | '\\' . )* '\'' // single-quoted
|
||||
)
|
||||
;
|
||||
|
||||
// Keys can have letters, digits, underscores, dots, and bracket pairs
|
||||
// e.g. service.name, service.namespace, db.queries[].query_duration
|
||||
KEY
|
||||
: [a-zA-Z0-9_] [a-zA-Z0-9_.[\]]*
|
||||
;
|
||||
|
||||
// Ignore whitespace
|
||||
WS
|
||||
: [ \t\r\n]+ -> skip
|
||||
;
|
||||
|
||||
// Digits used by NUMBER
|
||||
fragment DIGIT
|
||||
: [0-9]
|
||||
;
|
||||
|
||||
FREETEXT : (~[ \t\r\n=()'"<>![\]])+ ;
|
||||
@@ -25,6 +25,25 @@ type postableAlert struct {
|
||||
Receivers []string `json:"receivers"`
|
||||
}
|
||||
|
||||
func (pa *postableAlert) MarshalJSON() ([]byte, error) {
|
||||
// Marshal the embedded PostableAlert to get its JSON representation.
|
||||
alertJSON, err := json.Marshal(pa.PostableAlert)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Unmarshal that JSON into a map so we can add extra fields.
|
||||
var m map[string]interface{}
|
||||
if err := json.Unmarshal(alertJSON, &m); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Add the Receivers field.
|
||||
m["receivers"] = pa.Receivers
|
||||
|
||||
return json.Marshal(m)
|
||||
}
|
||||
|
||||
const (
|
||||
alertsPath string = "/v1/alerts"
|
||||
routesPath string = "/v1/routes"
|
||||
|
||||
35
pkg/alertmanager/legacyalertmanager/provider_test.go
Normal file
35
pkg/alertmanager/legacyalertmanager/provider_test.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package legacyalertmanager
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/prometheus/alertmanager/api/v2/models"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestProvider_TestAlert(t *testing.T) {
|
||||
pa := &postableAlert{
|
||||
PostableAlert: &alertmanagertypes.PostableAlert{
|
||||
Alert: models.Alert{
|
||||
Labels: models.LabelSet{
|
||||
"alertname": "test",
|
||||
},
|
||||
GeneratorURL: "http://localhost:9090/graph?g0.expr=up&g0.tab=1",
|
||||
},
|
||||
Annotations: models.LabelSet{
|
||||
"summary": "test",
|
||||
},
|
||||
},
|
||||
Receivers: []string{"receiver1", "receiver2"},
|
||||
}
|
||||
|
||||
body, err := json.Marshal(pa)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to marshal postable alert: %v", err)
|
||||
}
|
||||
|
||||
assert.Contains(t, string(body), "receiver1")
|
||||
assert.Contains(t, string(body), "receiver2")
|
||||
}
|
||||
96
pkg/parser/grammar/FilterQuery.interp
Normal file
96
pkg/parser/grammar/FilterQuery.interp
Normal file
File diff suppressed because one or more lines are too long
45
pkg/parser/grammar/FilterQuery.tokens
Normal file
45
pkg/parser/grammar/FilterQuery.tokens
Normal file
@@ -0,0 +1,45 @@
|
||||
LPAREN=1
|
||||
RPAREN=2
|
||||
LBRACK=3
|
||||
RBRACK=4
|
||||
COMMA=5
|
||||
EQUALS=6
|
||||
NOT_EQUALS=7
|
||||
NEQ=8
|
||||
LT=9
|
||||
LE=10
|
||||
GT=11
|
||||
GE=12
|
||||
LIKE=13
|
||||
NOT_LIKE=14
|
||||
ILIKE=15
|
||||
NOT_ILIKE=16
|
||||
BETWEEN=17
|
||||
EXISTS=18
|
||||
REGEXP=19
|
||||
CONTAINS=20
|
||||
IN=21
|
||||
NOT=22
|
||||
AND=23
|
||||
OR=24
|
||||
HAS=25
|
||||
HASANY=26
|
||||
HASALL=27
|
||||
HASNONE=28
|
||||
BOOL=29
|
||||
NUMBER=30
|
||||
QUOTED_TEXT=31
|
||||
KEY=32
|
||||
WS=33
|
||||
FREETEXT=34
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
']'=4
|
||||
','=5
|
||||
'!='=7
|
||||
'<>'=8
|
||||
'<'=9
|
||||
'<='=10
|
||||
'>'=11
|
||||
'>='=12
|
||||
120
pkg/parser/grammar/FilterQueryLexer.interp
Normal file
120
pkg/parser/grammar/FilterQueryLexer.interp
Normal file
File diff suppressed because one or more lines are too long
45
pkg/parser/grammar/FilterQueryLexer.tokens
Normal file
45
pkg/parser/grammar/FilterQueryLexer.tokens
Normal file
@@ -0,0 +1,45 @@
|
||||
LPAREN=1
|
||||
RPAREN=2
|
||||
LBRACK=3
|
||||
RBRACK=4
|
||||
COMMA=5
|
||||
EQUALS=6
|
||||
NOT_EQUALS=7
|
||||
NEQ=8
|
||||
LT=9
|
||||
LE=10
|
||||
GT=11
|
||||
GE=12
|
||||
LIKE=13
|
||||
NOT_LIKE=14
|
||||
ILIKE=15
|
||||
NOT_ILIKE=16
|
||||
BETWEEN=17
|
||||
EXISTS=18
|
||||
REGEXP=19
|
||||
CONTAINS=20
|
||||
IN=21
|
||||
NOT=22
|
||||
AND=23
|
||||
OR=24
|
||||
HAS=25
|
||||
HASANY=26
|
||||
HASALL=27
|
||||
HASNONE=28
|
||||
BOOL=29
|
||||
NUMBER=30
|
||||
QUOTED_TEXT=31
|
||||
KEY=32
|
||||
WS=33
|
||||
FREETEXT=34
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
']'=4
|
||||
','=5
|
||||
'!='=7
|
||||
'<>'=8
|
||||
'<'=9
|
||||
'<='=10
|
||||
'>'=11
|
||||
'>='=12
|
||||
124
pkg/parser/grammar/filterquery_base_listener.go
Normal file
124
pkg/parser/grammar/filterquery_base_listener.go
Normal file
@@ -0,0 +1,124 @@
|
||||
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // FilterQuery
|
||||
|
||||
import "github.com/antlr4-go/antlr/v4"
|
||||
|
||||
// BaseFilterQueryListener is a complete listener for a parse tree produced by FilterQueryParser.
|
||||
type BaseFilterQueryListener struct{}
|
||||
|
||||
var _ FilterQueryListener = &BaseFilterQueryListener{}
|
||||
|
||||
// VisitTerminal is called when a terminal node is visited.
|
||||
func (s *BaseFilterQueryListener) VisitTerminal(node antlr.TerminalNode) {}
|
||||
|
||||
// VisitErrorNode is called when an error node is visited.
|
||||
func (s *BaseFilterQueryListener) VisitErrorNode(node antlr.ErrorNode) {}
|
||||
|
||||
// EnterEveryRule is called when any rule is entered.
|
||||
func (s *BaseFilterQueryListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}
|
||||
|
||||
// ExitEveryRule is called when any rule is exited.
|
||||
func (s *BaseFilterQueryListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}
|
||||
|
||||
// EnterQuery is called when production query is entered.
|
||||
func (s *BaseFilterQueryListener) EnterQuery(ctx *QueryContext) {}
|
||||
|
||||
// ExitQuery is called when production query is exited.
|
||||
func (s *BaseFilterQueryListener) ExitQuery(ctx *QueryContext) {}
|
||||
|
||||
// EnterExpression is called when production expression is entered.
|
||||
func (s *BaseFilterQueryListener) EnterExpression(ctx *ExpressionContext) {}
|
||||
|
||||
// ExitExpression is called when production expression is exited.
|
||||
func (s *BaseFilterQueryListener) ExitExpression(ctx *ExpressionContext) {}
|
||||
|
||||
// EnterOrExpression is called when production orExpression is entered.
|
||||
func (s *BaseFilterQueryListener) EnterOrExpression(ctx *OrExpressionContext) {}
|
||||
|
||||
// ExitOrExpression is called when production orExpression is exited.
|
||||
func (s *BaseFilterQueryListener) ExitOrExpression(ctx *OrExpressionContext) {}
|
||||
|
||||
// EnterAndExpression is called when production andExpression is entered.
|
||||
func (s *BaseFilterQueryListener) EnterAndExpression(ctx *AndExpressionContext) {}
|
||||
|
||||
// ExitAndExpression is called when production andExpression is exited.
|
||||
func (s *BaseFilterQueryListener) ExitAndExpression(ctx *AndExpressionContext) {}
|
||||
|
||||
// EnterUnaryExpression is called when production unaryExpression is entered.
|
||||
func (s *BaseFilterQueryListener) EnterUnaryExpression(ctx *UnaryExpressionContext) {}
|
||||
|
||||
// ExitUnaryExpression is called when production unaryExpression is exited.
|
||||
func (s *BaseFilterQueryListener) ExitUnaryExpression(ctx *UnaryExpressionContext) {}
|
||||
|
||||
// EnterPrimary is called when production primary is entered.
|
||||
func (s *BaseFilterQueryListener) EnterPrimary(ctx *PrimaryContext) {}
|
||||
|
||||
// ExitPrimary is called when production primary is exited.
|
||||
func (s *BaseFilterQueryListener) ExitPrimary(ctx *PrimaryContext) {}
|
||||
|
||||
// EnterComparison is called when production comparison is entered.
|
||||
func (s *BaseFilterQueryListener) EnterComparison(ctx *ComparisonContext) {}
|
||||
|
||||
// ExitComparison is called when production comparison is exited.
|
||||
func (s *BaseFilterQueryListener) ExitComparison(ctx *ComparisonContext) {}
|
||||
|
||||
// EnterInClause is called when production inClause is entered.
|
||||
func (s *BaseFilterQueryListener) EnterInClause(ctx *InClauseContext) {}
|
||||
|
||||
// ExitInClause is called when production inClause is exited.
|
||||
func (s *BaseFilterQueryListener) ExitInClause(ctx *InClauseContext) {}
|
||||
|
||||
// EnterNotInClause is called when production notInClause is entered.
|
||||
func (s *BaseFilterQueryListener) EnterNotInClause(ctx *NotInClauseContext) {}
|
||||
|
||||
// ExitNotInClause is called when production notInClause is exited.
|
||||
func (s *BaseFilterQueryListener) ExitNotInClause(ctx *NotInClauseContext) {}
|
||||
|
||||
// EnterValueList is called when production valueList is entered.
|
||||
func (s *BaseFilterQueryListener) EnterValueList(ctx *ValueListContext) {}
|
||||
|
||||
// ExitValueList is called when production valueList is exited.
|
||||
func (s *BaseFilterQueryListener) ExitValueList(ctx *ValueListContext) {}
|
||||
|
||||
// EnterFullText is called when production fullText is entered.
|
||||
func (s *BaseFilterQueryListener) EnterFullText(ctx *FullTextContext) {}
|
||||
|
||||
// ExitFullText is called when production fullText is exited.
|
||||
func (s *BaseFilterQueryListener) ExitFullText(ctx *FullTextContext) {}
|
||||
|
||||
// EnterFunctionCall is called when production functionCall is entered.
|
||||
func (s *BaseFilterQueryListener) EnterFunctionCall(ctx *FunctionCallContext) {}
|
||||
|
||||
// ExitFunctionCall is called when production functionCall is exited.
|
||||
func (s *BaseFilterQueryListener) ExitFunctionCall(ctx *FunctionCallContext) {}
|
||||
|
||||
// EnterFunctionParamList is called when production functionParamList is entered.
|
||||
func (s *BaseFilterQueryListener) EnterFunctionParamList(ctx *FunctionParamListContext) {}
|
||||
|
||||
// ExitFunctionParamList is called when production functionParamList is exited.
|
||||
func (s *BaseFilterQueryListener) ExitFunctionParamList(ctx *FunctionParamListContext) {}
|
||||
|
||||
// EnterFunctionParam is called when production functionParam is entered.
|
||||
func (s *BaseFilterQueryListener) EnterFunctionParam(ctx *FunctionParamContext) {}
|
||||
|
||||
// ExitFunctionParam is called when production functionParam is exited.
|
||||
func (s *BaseFilterQueryListener) ExitFunctionParam(ctx *FunctionParamContext) {}
|
||||
|
||||
// EnterArray is called when production array is entered.
|
||||
func (s *BaseFilterQueryListener) EnterArray(ctx *ArrayContext) {}
|
||||
|
||||
// ExitArray is called when production array is exited.
|
||||
func (s *BaseFilterQueryListener) ExitArray(ctx *ArrayContext) {}
|
||||
|
||||
// EnterValue is called when production value is entered.
|
||||
func (s *BaseFilterQueryListener) EnterValue(ctx *ValueContext) {}
|
||||
|
||||
// ExitValue is called when production value is exited.
|
||||
func (s *BaseFilterQueryListener) ExitValue(ctx *ValueContext) {}
|
||||
|
||||
// EnterKey is called when production key is entered.
|
||||
func (s *BaseFilterQueryListener) EnterKey(ctx *KeyContext) {}
|
||||
|
||||
// ExitKey is called when production key is exited.
|
||||
func (s *BaseFilterQueryListener) ExitKey(ctx *KeyContext) {}
|
||||
77
pkg/parser/grammar/filterquery_base_visitor.go
Normal file
77
pkg/parser/grammar/filterquery_base_visitor.go
Normal file
@@ -0,0 +1,77 @@
|
||||
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // FilterQuery
|
||||
|
||||
import "github.com/antlr4-go/antlr/v4"
|
||||
|
||||
type BaseFilterQueryVisitor struct {
|
||||
*antlr.BaseParseTreeVisitor
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitQuery(ctx *QueryContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitExpression(ctx *ExpressionContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitOrExpression(ctx *OrExpressionContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitAndExpression(ctx *AndExpressionContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitUnaryExpression(ctx *UnaryExpressionContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitPrimary(ctx *PrimaryContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitComparison(ctx *ComparisonContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitInClause(ctx *InClauseContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitNotInClause(ctx *NotInClauseContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitValueList(ctx *ValueListContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitFullText(ctx *FullTextContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitFunctionCall(ctx *FunctionCallContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitFunctionParamList(ctx *FunctionParamListContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitFunctionParam(ctx *FunctionParamContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitArray(ctx *ArrayContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitValue(ctx *ValueContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitKey(ctx *KeyContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
271
pkg/parser/grammar/filterquery_lexer.go
Normal file
271
pkg/parser/grammar/filterquery_lexer.go
Normal file
@@ -0,0 +1,271 @@
|
||||
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
"sync"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// Suppress unused import error
|
||||
var _ = fmt.Printf
|
||||
var _ = sync.Once{}
|
||||
var _ = unicode.IsLetter
|
||||
|
||||
type FilterQueryLexer struct {
|
||||
*antlr.BaseLexer
|
||||
channelNames []string
|
||||
modeNames []string
|
||||
// TODO: EOF string
|
||||
}
|
||||
|
||||
var FilterQueryLexerLexerStaticData struct {
|
||||
once sync.Once
|
||||
serializedATN []int32
|
||||
ChannelNames []string
|
||||
ModeNames []string
|
||||
LiteralNames []string
|
||||
SymbolicNames []string
|
||||
RuleNames []string
|
||||
PredictionContextCache *antlr.PredictionContextCache
|
||||
atn *antlr.ATN
|
||||
decisionToDFA []*antlr.DFA
|
||||
}
|
||||
|
||||
func filterquerylexerLexerInit() {
|
||||
staticData := &FilterQueryLexerLexerStaticData
|
||||
staticData.ChannelNames = []string{
|
||||
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
|
||||
}
|
||||
staticData.ModeNames = []string{
|
||||
"DEFAULT_MODE",
|
||||
}
|
||||
staticData.LiteralNames = []string{
|
||||
"", "'('", "')'", "'['", "']'", "','", "", "'!='", "'<>'", "'<'", "'<='",
|
||||
"'>'", "'>='",
|
||||
}
|
||||
staticData.SymbolicNames = []string{
|
||||
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
|
||||
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
|
||||
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
|
||||
"KEY", "WS", "FREETEXT",
|
||||
}
|
||||
staticData.RuleNames = []string{
|
||||
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
|
||||
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
|
||||
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
|
||||
"KEY", "WS", "DIGIT", "FREETEXT",
|
||||
}
|
||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||
staticData.serializedATN = []int32{
|
||||
4, 0, 34, 280, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
|
||||
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
|
||||
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
|
||||
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
|
||||
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
|
||||
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 1, 0, 1, 0, 1, 1,
|
||||
1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 85, 8,
|
||||
5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1,
|
||||
10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13,
|
||||
1, 13, 1, 13, 1, 13, 4, 13, 112, 8, 13, 11, 13, 12, 13, 113, 1, 13, 1,
|
||||
13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15,
|
||||
1, 15, 1, 15, 1, 15, 4, 15, 131, 8, 15, 11, 15, 12, 15, 132, 1, 15, 1,
|
||||
15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
|
||||
1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 155, 8,
|
||||
17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19,
|
||||
1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 172, 8, 19, 1, 20, 1, 20, 1,
|
||||
20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23,
|
||||
1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1,
|
||||
25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27,
|
||||
1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1,
|
||||
28, 1, 28, 1, 28, 1, 28, 1, 28, 3, 28, 223, 8, 28, 1, 29, 4, 29, 226, 8,
|
||||
29, 11, 29, 12, 29, 227, 1, 29, 1, 29, 4, 29, 232, 8, 29, 11, 29, 12, 29,
|
||||
233, 3, 29, 236, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 242, 8, 30,
|
||||
10, 30, 12, 30, 245, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 252,
|
||||
8, 30, 10, 30, 12, 30, 255, 9, 30, 1, 30, 3, 30, 258, 8, 30, 1, 31, 1,
|
||||
31, 5, 31, 262, 8, 31, 10, 31, 12, 31, 265, 9, 31, 1, 32, 4, 32, 268, 8,
|
||||
32, 11, 32, 12, 32, 269, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 4, 34, 277,
|
||||
8, 34, 11, 34, 12, 34, 278, 0, 0, 35, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11,
|
||||
6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15,
|
||||
31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24,
|
||||
49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33,
|
||||
67, 0, 69, 34, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105,
|
||||
2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110,
|
||||
2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2,
|
||||
0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0,
|
||||
83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0,
|
||||
80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68,
|
||||
68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85,
|
||||
85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39,
|
||||
92, 92, 4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 6, 0, 46, 46, 48, 57, 65,
|
||||
91, 93, 93, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57,
|
||||
7, 0, 9, 10, 13, 13, 32, 34, 39, 41, 60, 62, 91, 91, 93, 93, 295, 0, 1,
|
||||
1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9,
|
||||
1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0,
|
||||
17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0,
|
||||
0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0,
|
||||
0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0,
|
||||
0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1,
|
||||
0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55,
|
||||
1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0,
|
||||
63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 1, 71, 1, 0, 0, 0,
|
||||
3, 73, 1, 0, 0, 0, 5, 75, 1, 0, 0, 0, 7, 77, 1, 0, 0, 0, 9, 79, 1, 0, 0,
|
||||
0, 11, 84, 1, 0, 0, 0, 13, 86, 1, 0, 0, 0, 15, 89, 1, 0, 0, 0, 17, 92,
|
||||
1, 0, 0, 0, 19, 94, 1, 0, 0, 0, 21, 97, 1, 0, 0, 0, 23, 99, 1, 0, 0, 0,
|
||||
25, 102, 1, 0, 0, 0, 27, 107, 1, 0, 0, 0, 29, 120, 1, 0, 0, 0, 31, 126,
|
||||
1, 0, 0, 0, 33, 140, 1, 0, 0, 0, 35, 148, 1, 0, 0, 0, 37, 156, 1, 0, 0,
|
||||
0, 39, 163, 1, 0, 0, 0, 41, 173, 1, 0, 0, 0, 43, 176, 1, 0, 0, 0, 45, 180,
|
||||
1, 0, 0, 0, 47, 184, 1, 0, 0, 0, 49, 187, 1, 0, 0, 0, 51, 191, 1, 0, 0,
|
||||
0, 53, 198, 1, 0, 0, 0, 55, 205, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 225,
|
||||
1, 0, 0, 0, 61, 257, 1, 0, 0, 0, 63, 259, 1, 0, 0, 0, 65, 267, 1, 0, 0,
|
||||
0, 67, 273, 1, 0, 0, 0, 69, 276, 1, 0, 0, 0, 71, 72, 5, 40, 0, 0, 72, 2,
|
||||
1, 0, 0, 0, 73, 74, 5, 41, 0, 0, 74, 4, 1, 0, 0, 0, 75, 76, 5, 91, 0, 0,
|
||||
76, 6, 1, 0, 0, 0, 77, 78, 5, 93, 0, 0, 78, 8, 1, 0, 0, 0, 79, 80, 5, 44,
|
||||
0, 0, 80, 10, 1, 0, 0, 0, 81, 85, 5, 61, 0, 0, 82, 83, 5, 61, 0, 0, 83,
|
||||
85, 5, 61, 0, 0, 84, 81, 1, 0, 0, 0, 84, 82, 1, 0, 0, 0, 85, 12, 1, 0,
|
||||
0, 0, 86, 87, 5, 33, 0, 0, 87, 88, 5, 61, 0, 0, 88, 14, 1, 0, 0, 0, 89,
|
||||
90, 5, 60, 0, 0, 90, 91, 5, 62, 0, 0, 91, 16, 1, 0, 0, 0, 92, 93, 5, 60,
|
||||
0, 0, 93, 18, 1, 0, 0, 0, 94, 95, 5, 60, 0, 0, 95, 96, 5, 61, 0, 0, 96,
|
||||
20, 1, 0, 0, 0, 97, 98, 5, 62, 0, 0, 98, 22, 1, 0, 0, 0, 99, 100, 5, 62,
|
||||
0, 0, 100, 101, 5, 61, 0, 0, 101, 24, 1, 0, 0, 0, 102, 103, 7, 0, 0, 0,
|
||||
103, 104, 7, 1, 0, 0, 104, 105, 7, 2, 0, 0, 105, 106, 7, 3, 0, 0, 106,
|
||||
26, 1, 0, 0, 0, 107, 108, 7, 4, 0, 0, 108, 109, 7, 5, 0, 0, 109, 111, 7,
|
||||
6, 0, 0, 110, 112, 7, 7, 0, 0, 111, 110, 1, 0, 0, 0, 112, 113, 1, 0, 0,
|
||||
0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115,
|
||||
116, 7, 0, 0, 0, 116, 117, 7, 1, 0, 0, 117, 118, 7, 2, 0, 0, 118, 119,
|
||||
7, 3, 0, 0, 119, 28, 1, 0, 0, 0, 120, 121, 7, 1, 0, 0, 121, 122, 7, 0,
|
||||
0, 0, 122, 123, 7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0,
|
||||
125, 30, 1, 0, 0, 0, 126, 127, 7, 4, 0, 0, 127, 128, 7, 5, 0, 0, 128, 130,
|
||||
7, 6, 0, 0, 129, 131, 7, 7, 0, 0, 130, 129, 1, 0, 0, 0, 131, 132, 1, 0,
|
||||
0, 0, 132, 130, 1, 0, 0, 0, 132, 133, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0,
|
||||
134, 135, 7, 1, 0, 0, 135, 136, 7, 0, 0, 0, 136, 137, 7, 1, 0, 0, 137,
|
||||
138, 7, 2, 0, 0, 138, 139, 7, 3, 0, 0, 139, 32, 1, 0, 0, 0, 140, 141, 7,
|
||||
8, 0, 0, 141, 142, 7, 3, 0, 0, 142, 143, 7, 6, 0, 0, 143, 144, 7, 9, 0,
|
||||
0, 144, 145, 7, 3, 0, 0, 145, 146, 7, 3, 0, 0, 146, 147, 7, 4, 0, 0, 147,
|
||||
34, 1, 0, 0, 0, 148, 149, 7, 3, 0, 0, 149, 150, 7, 10, 0, 0, 150, 151,
|
||||
7, 1, 0, 0, 151, 152, 7, 11, 0, 0, 152, 154, 7, 6, 0, 0, 153, 155, 7, 11,
|
||||
0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 36, 1, 0, 0, 0,
|
||||
156, 157, 7, 12, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159, 7, 13, 0, 0, 159,
|
||||
160, 7, 3, 0, 0, 160, 161, 7, 10, 0, 0, 161, 162, 7, 14, 0, 0, 162, 38,
|
||||
1, 0, 0, 0, 163, 164, 7, 15, 0, 0, 164, 165, 7, 5, 0, 0, 165, 166, 7, 4,
|
||||
0, 0, 166, 167, 7, 6, 0, 0, 167, 168, 7, 16, 0, 0, 168, 169, 7, 1, 0, 0,
|
||||
169, 171, 7, 4, 0, 0, 170, 172, 7, 11, 0, 0, 171, 170, 1, 0, 0, 0, 171,
|
||||
172, 1, 0, 0, 0, 172, 40, 1, 0, 0, 0, 173, 174, 7, 1, 0, 0, 174, 175, 7,
|
||||
4, 0, 0, 175, 42, 1, 0, 0, 0, 176, 177, 7, 4, 0, 0, 177, 178, 7, 5, 0,
|
||||
0, 178, 179, 7, 6, 0, 0, 179, 44, 1, 0, 0, 0, 180, 181, 7, 16, 0, 0, 181,
|
||||
182, 7, 4, 0, 0, 182, 183, 7, 17, 0, 0, 183, 46, 1, 0, 0, 0, 184, 185,
|
||||
7, 5, 0, 0, 185, 186, 7, 12, 0, 0, 186, 48, 1, 0, 0, 0, 187, 188, 7, 18,
|
||||
0, 0, 188, 189, 7, 16, 0, 0, 189, 190, 7, 11, 0, 0, 190, 50, 1, 0, 0, 0,
|
||||
191, 192, 7, 18, 0, 0, 192, 193, 7, 16, 0, 0, 193, 194, 7, 11, 0, 0, 194,
|
||||
195, 7, 16, 0, 0, 195, 196, 7, 4, 0, 0, 196, 197, 7, 19, 0, 0, 197, 52,
|
||||
1, 0, 0, 0, 198, 199, 7, 18, 0, 0, 199, 200, 7, 16, 0, 0, 200, 201, 7,
|
||||
11, 0, 0, 201, 202, 7, 16, 0, 0, 202, 203, 7, 0, 0, 0, 203, 204, 7, 0,
|
||||
0, 0, 204, 54, 1, 0, 0, 0, 205, 206, 7, 18, 0, 0, 206, 207, 7, 16, 0, 0,
|
||||
207, 208, 7, 11, 0, 0, 208, 209, 7, 4, 0, 0, 209, 210, 7, 5, 0, 0, 210,
|
||||
211, 7, 4, 0, 0, 211, 212, 7, 3, 0, 0, 212, 56, 1, 0, 0, 0, 213, 214, 7,
|
||||
6, 0, 0, 214, 215, 7, 12, 0, 0, 215, 216, 7, 20, 0, 0, 216, 223, 7, 3,
|
||||
0, 0, 217, 218, 7, 21, 0, 0, 218, 219, 7, 16, 0, 0, 219, 220, 7, 0, 0,
|
||||
0, 220, 221, 7, 11, 0, 0, 221, 223, 7, 3, 0, 0, 222, 213, 1, 0, 0, 0, 222,
|
||||
217, 1, 0, 0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 67, 33, 0, 225, 224,
|
||||
1, 0, 0, 0, 226, 227, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 227, 228, 1, 0,
|
||||
0, 0, 228, 235, 1, 0, 0, 0, 229, 231, 5, 46, 0, 0, 230, 232, 3, 67, 33,
|
||||
0, 231, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 233,
|
||||
234, 1, 0, 0, 0, 234, 236, 1, 0, 0, 0, 235, 229, 1, 0, 0, 0, 235, 236,
|
||||
1, 0, 0, 0, 236, 60, 1, 0, 0, 0, 237, 243, 5, 34, 0, 0, 238, 242, 8, 22,
|
||||
0, 0, 239, 240, 5, 92, 0, 0, 240, 242, 9, 0, 0, 0, 241, 238, 1, 0, 0, 0,
|
||||
241, 239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243,
|
||||
244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 258,
|
||||
5, 34, 0, 0, 247, 253, 5, 39, 0, 0, 248, 252, 8, 23, 0, 0, 249, 250, 5,
|
||||
92, 0, 0, 250, 252, 9, 0, 0, 0, 251, 248, 1, 0, 0, 0, 251, 249, 1, 0, 0,
|
||||
0, 252, 255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254,
|
||||
256, 1, 0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 258, 5, 39, 0, 0, 257, 237,
|
||||
1, 0, 0, 0, 257, 247, 1, 0, 0, 0, 258, 62, 1, 0, 0, 0, 259, 263, 7, 24,
|
||||
0, 0, 260, 262, 7, 25, 0, 0, 261, 260, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0,
|
||||
263, 261, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 64, 1, 0, 0, 0, 265, 263,
|
||||
1, 0, 0, 0, 266, 268, 7, 26, 0, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0,
|
||||
0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0,
|
||||
271, 272, 6, 32, 0, 0, 272, 66, 1, 0, 0, 0, 273, 274, 7, 27, 0, 0, 274,
|
||||
68, 1, 0, 0, 0, 275, 277, 8, 28, 0, 0, 276, 275, 1, 0, 0, 0, 277, 278,
|
||||
1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 70, 1, 0,
|
||||
0, 0, 18, 0, 84, 113, 132, 154, 171, 222, 227, 233, 235, 241, 243, 251,
|
||||
253, 257, 263, 269, 278, 1, 6, 0, 0,
|
||||
}
|
||||
deserializer := antlr.NewATNDeserializer(nil)
|
||||
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||
atn := staticData.atn
|
||||
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
|
||||
decisionToDFA := staticData.decisionToDFA
|
||||
for index, state := range atn.DecisionToState {
|
||||
decisionToDFA[index] = antlr.NewDFA(state, index)
|
||||
}
|
||||
}
|
||||
|
||||
// FilterQueryLexerInit initializes any static state used to implement FilterQueryLexer. By default the
|
||||
// static state used to implement the lexer is lazily initialized during the first call to
|
||||
// NewFilterQueryLexer(). You can call this function if you wish to initialize the static state ahead
|
||||
// of time.
|
||||
func FilterQueryLexerInit() {
|
||||
staticData := &FilterQueryLexerLexerStaticData
|
||||
staticData.once.Do(filterquerylexerLexerInit)
|
||||
}
|
||||
|
||||
// NewFilterQueryLexer produces a new lexer instance for the optional input antlr.CharStream.
|
||||
func NewFilterQueryLexer(input antlr.CharStream) *FilterQueryLexer {
|
||||
FilterQueryLexerInit()
|
||||
l := new(FilterQueryLexer)
|
||||
l.BaseLexer = antlr.NewBaseLexer(input)
|
||||
staticData := &FilterQueryLexerLexerStaticData
|
||||
l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache)
|
||||
l.channelNames = staticData.ChannelNames
|
||||
l.modeNames = staticData.ModeNames
|
||||
l.RuleNames = staticData.RuleNames
|
||||
l.LiteralNames = staticData.LiteralNames
|
||||
l.SymbolicNames = staticData.SymbolicNames
|
||||
l.GrammarFileName = "FilterQuery.g4"
|
||||
// TODO: l.EOF = antlr.TokenEOF
|
||||
|
||||
return l
|
||||
}
|
||||
|
||||
// FilterQueryLexer tokens.
|
||||
const (
|
||||
FilterQueryLexerLPAREN = 1
|
||||
FilterQueryLexerRPAREN = 2
|
||||
FilterQueryLexerLBRACK = 3
|
||||
FilterQueryLexerRBRACK = 4
|
||||
FilterQueryLexerCOMMA = 5
|
||||
FilterQueryLexerEQUALS = 6
|
||||
FilterQueryLexerNOT_EQUALS = 7
|
||||
FilterQueryLexerNEQ = 8
|
||||
FilterQueryLexerLT = 9
|
||||
FilterQueryLexerLE = 10
|
||||
FilterQueryLexerGT = 11
|
||||
FilterQueryLexerGE = 12
|
||||
FilterQueryLexerLIKE = 13
|
||||
FilterQueryLexerNOT_LIKE = 14
|
||||
FilterQueryLexerILIKE = 15
|
||||
FilterQueryLexerNOT_ILIKE = 16
|
||||
FilterQueryLexerBETWEEN = 17
|
||||
FilterQueryLexerEXISTS = 18
|
||||
FilterQueryLexerREGEXP = 19
|
||||
FilterQueryLexerCONTAINS = 20
|
||||
FilterQueryLexerIN = 21
|
||||
FilterQueryLexerNOT = 22
|
||||
FilterQueryLexerAND = 23
|
||||
FilterQueryLexerOR = 24
|
||||
FilterQueryLexerHAS = 25
|
||||
FilterQueryLexerHASANY = 26
|
||||
FilterQueryLexerHASALL = 27
|
||||
FilterQueryLexerHASNONE = 28
|
||||
FilterQueryLexerBOOL = 29
|
||||
FilterQueryLexerNUMBER = 30
|
||||
FilterQueryLexerQUOTED_TEXT = 31
|
||||
FilterQueryLexerKEY = 32
|
||||
FilterQueryLexerWS = 33
|
||||
FilterQueryLexerFREETEXT = 34
|
||||
)
|
||||
112
pkg/parser/grammar/filterquery_listener.go
Normal file
112
pkg/parser/grammar/filterquery_listener.go
Normal file
@@ -0,0 +1,112 @@
|
||||
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // FilterQuery
|
||||
|
||||
import "github.com/antlr4-go/antlr/v4"
|
||||
|
||||
// FilterQueryListener is a complete listener for a parse tree produced by FilterQueryParser.
|
||||
type FilterQueryListener interface {
|
||||
antlr.ParseTreeListener
|
||||
|
||||
// EnterQuery is called when entering the query production.
|
||||
EnterQuery(c *QueryContext)
|
||||
|
||||
// EnterExpression is called when entering the expression production.
|
||||
EnterExpression(c *ExpressionContext)
|
||||
|
||||
// EnterOrExpression is called when entering the orExpression production.
|
||||
EnterOrExpression(c *OrExpressionContext)
|
||||
|
||||
// EnterAndExpression is called when entering the andExpression production.
|
||||
EnterAndExpression(c *AndExpressionContext)
|
||||
|
||||
// EnterUnaryExpression is called when entering the unaryExpression production.
|
||||
EnterUnaryExpression(c *UnaryExpressionContext)
|
||||
|
||||
// EnterPrimary is called when entering the primary production.
|
||||
EnterPrimary(c *PrimaryContext)
|
||||
|
||||
// EnterComparison is called when entering the comparison production.
|
||||
EnterComparison(c *ComparisonContext)
|
||||
|
||||
// EnterInClause is called when entering the inClause production.
|
||||
EnterInClause(c *InClauseContext)
|
||||
|
||||
// EnterNotInClause is called when entering the notInClause production.
|
||||
EnterNotInClause(c *NotInClauseContext)
|
||||
|
||||
// EnterValueList is called when entering the valueList production.
|
||||
EnterValueList(c *ValueListContext)
|
||||
|
||||
// EnterFullText is called when entering the fullText production.
|
||||
EnterFullText(c *FullTextContext)
|
||||
|
||||
// EnterFunctionCall is called when entering the functionCall production.
|
||||
EnterFunctionCall(c *FunctionCallContext)
|
||||
|
||||
// EnterFunctionParamList is called when entering the functionParamList production.
|
||||
EnterFunctionParamList(c *FunctionParamListContext)
|
||||
|
||||
// EnterFunctionParam is called when entering the functionParam production.
|
||||
EnterFunctionParam(c *FunctionParamContext)
|
||||
|
||||
// EnterArray is called when entering the array production.
|
||||
EnterArray(c *ArrayContext)
|
||||
|
||||
// EnterValue is called when entering the value production.
|
||||
EnterValue(c *ValueContext)
|
||||
|
||||
// EnterKey is called when entering the key production.
|
||||
EnterKey(c *KeyContext)
|
||||
|
||||
// ExitQuery is called when exiting the query production.
|
||||
ExitQuery(c *QueryContext)
|
||||
|
||||
// ExitExpression is called when exiting the expression production.
|
||||
ExitExpression(c *ExpressionContext)
|
||||
|
||||
// ExitOrExpression is called when exiting the orExpression production.
|
||||
ExitOrExpression(c *OrExpressionContext)
|
||||
|
||||
// ExitAndExpression is called when exiting the andExpression production.
|
||||
ExitAndExpression(c *AndExpressionContext)
|
||||
|
||||
// ExitUnaryExpression is called when exiting the unaryExpression production.
|
||||
ExitUnaryExpression(c *UnaryExpressionContext)
|
||||
|
||||
// ExitPrimary is called when exiting the primary production.
|
||||
ExitPrimary(c *PrimaryContext)
|
||||
|
||||
// ExitComparison is called when exiting the comparison production.
|
||||
ExitComparison(c *ComparisonContext)
|
||||
|
||||
// ExitInClause is called when exiting the inClause production.
|
||||
ExitInClause(c *InClauseContext)
|
||||
|
||||
// ExitNotInClause is called when exiting the notInClause production.
|
||||
ExitNotInClause(c *NotInClauseContext)
|
||||
|
||||
// ExitValueList is called when exiting the valueList production.
|
||||
ExitValueList(c *ValueListContext)
|
||||
|
||||
// ExitFullText is called when exiting the fullText production.
|
||||
ExitFullText(c *FullTextContext)
|
||||
|
||||
// ExitFunctionCall is called when exiting the functionCall production.
|
||||
ExitFunctionCall(c *FunctionCallContext)
|
||||
|
||||
// ExitFunctionParamList is called when exiting the functionParamList production.
|
||||
ExitFunctionParamList(c *FunctionParamListContext)
|
||||
|
||||
// ExitFunctionParam is called when exiting the functionParam production.
|
||||
ExitFunctionParam(c *FunctionParamContext)
|
||||
|
||||
// ExitArray is called when exiting the array production.
|
||||
ExitArray(c *ArrayContext)
|
||||
|
||||
// ExitValue is called when exiting the value production.
|
||||
ExitValue(c *ValueContext)
|
||||
|
||||
// ExitKey is called when exiting the key production.
|
||||
ExitKey(c *KeyContext)
|
||||
}
|
||||
3539
pkg/parser/grammar/filterquery_parser.go
Normal file
3539
pkg/parser/grammar/filterquery_parser.go
Normal file
File diff suppressed because it is too large
Load Diff
61
pkg/parser/grammar/filterquery_visitor.go
Normal file
61
pkg/parser/grammar/filterquery_visitor.go
Normal file
@@ -0,0 +1,61 @@
|
||||
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // FilterQuery
|
||||
|
||||
import "github.com/antlr4-go/antlr/v4"
|
||||
|
||||
// A complete Visitor for a parse tree produced by FilterQueryParser.
|
||||
type FilterQueryVisitor interface {
|
||||
antlr.ParseTreeVisitor
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#query.
|
||||
VisitQuery(ctx *QueryContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#expression.
|
||||
VisitExpression(ctx *ExpressionContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#orExpression.
|
||||
VisitOrExpression(ctx *OrExpressionContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#andExpression.
|
||||
VisitAndExpression(ctx *AndExpressionContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#unaryExpression.
|
||||
VisitUnaryExpression(ctx *UnaryExpressionContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#primary.
|
||||
VisitPrimary(ctx *PrimaryContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#comparison.
|
||||
VisitComparison(ctx *ComparisonContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#inClause.
|
||||
VisitInClause(ctx *InClauseContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#notInClause.
|
||||
VisitNotInClause(ctx *NotInClauseContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#valueList.
|
||||
VisitValueList(ctx *ValueListContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#fullText.
|
||||
VisitFullText(ctx *FullTextContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#functionCall.
|
||||
VisitFunctionCall(ctx *FunctionCallContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#functionParamList.
|
||||
VisitFunctionParamList(ctx *FunctionParamListContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#functionParam.
|
||||
VisitFunctionParam(ctx *FunctionParamContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#array.
|
||||
VisitArray(ctx *ArrayContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#value.
|
||||
VisitValue(ctx *ValueContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#key.
|
||||
VisitKey(ctx *KeyContext) interface{}
|
||||
}
|
||||
22
pkg/query-service/Dockerfile.multi-arch
Normal file
22
pkg/query-service/Dockerfile.multi-arch
Normal file
@@ -0,0 +1,22 @@
|
||||
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
|
||||
|
||||
FROM alpine@sha256:${ALPINE_SHA}
|
||||
LABEL maintainer="signoz"
|
||||
WORKDIR /root
|
||||
|
||||
ARG OS="linux"
|
||||
ARG ARCH
|
||||
|
||||
RUN apk update && \
|
||||
apk add ca-certificates && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY ./target/${OS}-${ARCH}/signoz-community /root/signoz-community
|
||||
COPY ./conf/prometheus.yml /root/config/prometheus.yml
|
||||
COPY ./templates/email /root/templates
|
||||
COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz-community
|
||||
|
||||
ENTRYPOINT ["./signoz-community"]
|
||||
CMD ["-config", "/root/config/prometheus.yml"]
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/mailru/easyjson"
|
||||
"github.com/uptrace/bun"
|
||||
|
||||
"github.com/google/uuid"
|
||||
@@ -40,6 +41,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/resource"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/services"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/traces/smart"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/traces/tracedetail"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
@@ -3926,11 +3928,16 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
|
||||
var rows driver.Rows
|
||||
var response v3.FilterAttributeKeyResponse
|
||||
|
||||
tagTypeFilter := `tag_type != 'logfield'`
|
||||
if req.TagType != "" {
|
||||
tagTypeFilter = fmt.Sprintf(`tag_type != 'logfield' and tag_type = '%s'`, req.TagType)
|
||||
}
|
||||
|
||||
if len(req.SearchText) != 0 {
|
||||
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' and tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2)
|
||||
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where %s and tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2, tagTypeFilter)
|
||||
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
|
||||
} else {
|
||||
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' limit $1", r.logsDB, r.logsTagAttributeTableV2)
|
||||
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where %s limit $1", r.logsDB, r.logsTagAttributeTableV2, tagTypeFilter)
|
||||
rows, err = r.db.Query(ctx, query, req.Limit)
|
||||
}
|
||||
|
||||
@@ -3965,13 +3972,16 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
|
||||
response.AttributeKeys = append(response.AttributeKeys, key)
|
||||
}
|
||||
|
||||
// add other attributes
|
||||
for _, f := range constants.StaticFieldsLogsV3 {
|
||||
if (v3.AttributeKey{} == f) {
|
||||
continue
|
||||
}
|
||||
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
|
||||
response.AttributeKeys = append(response.AttributeKeys, f)
|
||||
// add other attributes only when the tagType is not specified
|
||||
// i.e retrieve all attributes
|
||||
if req.TagType == "" {
|
||||
for _, f := range constants.StaticFieldsLogsV3 {
|
||||
if (v3.AttributeKey{} == f) {
|
||||
continue
|
||||
}
|
||||
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
|
||||
response.AttributeKeys = append(response.AttributeKeys, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4713,7 +4723,12 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
|
||||
var rows driver.Rows
|
||||
var response v3.FilterAttributeKeyResponse
|
||||
|
||||
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 and tag_type != 'spanfield' LIMIT $2", r.TraceDB, r.spanAttributeTableV2)
|
||||
tagTypeFilter := `tag_type != 'spanfield'`
|
||||
if req.TagType != "" {
|
||||
tagTypeFilter = fmt.Sprintf(`tag_type != 'spanfield' and tag_type = '%s'`, req.TagType)
|
||||
}
|
||||
|
||||
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 and %s LIMIT $2", r.TraceDB, r.spanAttributeTableV2, tagTypeFilter)
|
||||
|
||||
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
|
||||
|
||||
@@ -4758,13 +4773,16 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
|
||||
fields = constants.DeprecatedStaticFieldsTraces
|
||||
}
|
||||
|
||||
// add the new static fields
|
||||
for _, f := range fields {
|
||||
if (v3.AttributeKey{} == f) {
|
||||
continue
|
||||
}
|
||||
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
|
||||
response.AttributeKeys = append(response.AttributeKeys, f)
|
||||
// add the new static fields only when the tagType is not specified
|
||||
// i.e retrieve all attributes
|
||||
if req.TagType == "" {
|
||||
for _, f := range fields {
|
||||
if (v3.AttributeKey{} == f) {
|
||||
continue
|
||||
}
|
||||
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
|
||||
response.AttributeKeys = append(response.AttributeKeys, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6791,3 +6809,262 @@ func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, metric
|
||||
|
||||
return cachedMetadata, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) SearchTracesV2(ctx context.Context, params *model.SearchTracesParams) (*[]model.SearchSpansResult, error) {
|
||||
searchSpansResult := []model.SearchSpansResult{
|
||||
{
|
||||
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError", "StatusMessage", "StatusCodeString", "SpanKind"},
|
||||
IsSubTree: false,
|
||||
Events: make([][]interface{}, 0),
|
||||
},
|
||||
}
|
||||
|
||||
var traceSummary model.TraceSummary
|
||||
summaryQuery := fmt.Sprintf("SELECT * from %s.%s WHERE trace_id=$1", r.TraceDB, r.traceSummaryTable)
|
||||
err := r.db.QueryRow(ctx, summaryQuery, params.TraceID).Scan(&traceSummary.TraceID, &traceSummary.Start, &traceSummary.End, &traceSummary.NumSpans)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return &searchSpansResult, nil
|
||||
}
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error in processing sql query")
|
||||
}
|
||||
|
||||
if traceSummary.NumSpans > uint64(params.MaxSpansInTrace) {
|
||||
zap.L().Error("Max spans allowed in a trace limit reached", zap.Int("MaxSpansInTrace", params.MaxSpansInTrace),
|
||||
zap.Uint64("Count", traceSummary.NumSpans))
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if ok {
|
||||
data := map[string]interface{}{
|
||||
"traceSize": traceSummary.NumSpans,
|
||||
"maxSpansInTraceLimit": params.MaxSpansInTrace,
|
||||
"algo": "smart",
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_MAX_SPANS_ALLOWED_LIMIT_REACHED, data, claims.Email, true, false)
|
||||
}
|
||||
return nil, fmt.Errorf("max spans allowed in trace limit reached, please contact support for more details")
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if ok {
|
||||
data := map[string]interface{}{
|
||||
"traceSize": traceSummary.NumSpans,
|
||||
"algo": "smart",
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_TRACE_DETAIL_API, data, claims.Email, true, false)
|
||||
}
|
||||
|
||||
var startTime, endTime, durationNano uint64
|
||||
var searchScanResponses []model.SpanItemV2
|
||||
|
||||
query := fmt.Sprintf("SELECT timestamp, duration_nano, span_id, trace_id, has_error, kind, resource_string_service$$name, name, references, attributes_string, attributes_number, attributes_bool, resources_string, events, status_message, status_code_string, kind_string FROM %s.%s WHERE trace_id=$1 and ts_bucket_start>=$2 and ts_bucket_start<=$3", r.TraceDB, r.traceTableName)
|
||||
|
||||
start := time.Now()
|
||||
|
||||
err = r.db.Select(ctx, &searchScanResponses, query, params.TraceID, strconv.FormatInt(traceSummary.Start.Unix()-1800, 10), strconv.FormatInt(traceSummary.End.Unix(), 10))
|
||||
|
||||
zap.L().Info(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error in processing sql query")
|
||||
}
|
||||
end := time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery took: ", zap.Duration("duration", end.Sub(start)))
|
||||
|
||||
searchSpansResult[0].Events = make([][]interface{}, len(searchScanResponses))
|
||||
|
||||
searchSpanResponses := []model.SearchSpanResponseItem{}
|
||||
start = time.Now()
|
||||
for _, item := range searchScanResponses {
|
||||
ref := []model.OtelSpanRef{}
|
||||
err := json.Unmarshal([]byte(item.References), &ref)
|
||||
if err != nil {
|
||||
zap.L().Error("Error unmarshalling references", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge attributes_number and attributes_bool to attributes_string
|
||||
for k, v := range item.Attributes_bool {
|
||||
item.Attributes_string[k] = fmt.Sprintf("%v", v)
|
||||
}
|
||||
for k, v := range item.Attributes_number {
|
||||
item.Attributes_string[k] = fmt.Sprintf("%v", v)
|
||||
}
|
||||
for k, v := range item.Resources_string {
|
||||
item.Attributes_string[k] = v
|
||||
}
|
||||
|
||||
jsonItem := model.SearchSpanResponseItem{
|
||||
SpanID: item.SpanID,
|
||||
TraceID: item.TraceID,
|
||||
ServiceName: item.ServiceName,
|
||||
Name: item.Name,
|
||||
Kind: int32(item.Kind),
|
||||
DurationNano: int64(item.DurationNano),
|
||||
HasError: item.HasError,
|
||||
StatusMessage: item.StatusMessage,
|
||||
StatusCodeString: item.StatusCodeString,
|
||||
SpanKind: item.SpanKind,
|
||||
References: ref,
|
||||
Events: item.Events,
|
||||
TagMap: item.Attributes_string,
|
||||
}
|
||||
|
||||
jsonItem.TimeUnixNano = uint64(item.TimeUnixNano.UnixNano() / 1000000)
|
||||
|
||||
searchSpanResponses = append(searchSpanResponses, jsonItem)
|
||||
if startTime == 0 || jsonItem.TimeUnixNano < startTime {
|
||||
startTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if endTime == 0 || jsonItem.TimeUnixNano > endTime {
|
||||
endTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if durationNano == 0 || uint64(jsonItem.DurationNano) > durationNano {
|
||||
durationNano = uint64(jsonItem.DurationNano)
|
||||
}
|
||||
}
|
||||
end = time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery unmarshal took: ", zap.Duration("duration", end.Sub(start)))
|
||||
|
||||
if len(searchScanResponses) > params.SpansRenderLimit {
|
||||
start = time.Now()
|
||||
searchSpansResult, err = smart.SmartTraceAlgorithm(searchSpanResponses, params.SpanID, params.LevelUp, params.LevelDown, params.SpansRenderLimit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
end = time.Now()
|
||||
zap.L().Debug("smartTraceAlgo took: ", zap.Duration("duration", end.Sub(start)))
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if ok {
|
||||
data := map[string]interface{}{
|
||||
"traceSize": len(searchScanResponses),
|
||||
"spansRenderLimit": params.SpansRenderLimit,
|
||||
"algo": "smart",
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LARGE_TRACE_OPENED, data, claims.Email, true, false)
|
||||
}
|
||||
} else {
|
||||
for i, item := range searchSpanResponses {
|
||||
spanEvents := item.GetValues()
|
||||
searchSpansResult[0].Events[i] = spanEvents
|
||||
}
|
||||
}
|
||||
|
||||
searchSpansResult[0].StartTimestampMillis = startTime - (durationNano / 1000000)
|
||||
searchSpansResult[0].EndTimestampMillis = endTime + (durationNano / 1000000)
|
||||
|
||||
return &searchSpansResult, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.SearchTracesParams) (*[]model.SearchSpansResult, error) {
|
||||
|
||||
if r.useTraceNewSchema {
|
||||
return r.SearchTracesV2(ctx, params)
|
||||
}
|
||||
|
||||
var countSpans uint64
|
||||
countQuery := fmt.Sprintf("SELECT count() as count from %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
|
||||
err := r.db.QueryRow(ctx, countQuery, params.TraceID).Scan(&countSpans)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error in processing sql query")
|
||||
}
|
||||
|
||||
if countSpans > uint64(params.MaxSpansInTrace) {
|
||||
zap.L().Error("Max spans allowed in a trace limit reached", zap.Int("MaxSpansInTrace", params.MaxSpansInTrace),
|
||||
zap.Uint64("Count", countSpans))
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if ok {
|
||||
data := map[string]interface{}{
|
||||
"traceSize": countSpans,
|
||||
"maxSpansInTraceLimit": params.MaxSpansInTrace,
|
||||
"algo": "smart",
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_MAX_SPANS_ALLOWED_LIMIT_REACHED, data, claims.Email, true, false)
|
||||
}
|
||||
return nil, fmt.Errorf("max spans allowed in trace limit reached, please contact support for more details")
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if ok {
|
||||
data := map[string]interface{}{
|
||||
"traceSize": countSpans,
|
||||
"algo": "smart",
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_TRACE_DETAIL_API, data, claims.Email, true, false)
|
||||
}
|
||||
|
||||
var startTime, endTime, durationNano uint64
|
||||
var searchScanResponses []model.SearchSpanDBResponseItem
|
||||
|
||||
query := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
|
||||
|
||||
start := time.Now()
|
||||
|
||||
err = r.db.Select(ctx, &searchScanResponses, query, params.TraceID)
|
||||
|
||||
zap.L().Info(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error in processing sql query")
|
||||
}
|
||||
end := time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery took: ", zap.Duration("duration", end.Sub(start)))
|
||||
searchSpansResult := []model.SearchSpansResult{{
|
||||
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError", "StatusMessage", "StatusCodeString", "SpanKind"},
|
||||
Events: make([][]interface{}, len(searchScanResponses)),
|
||||
IsSubTree: false,
|
||||
},
|
||||
}
|
||||
|
||||
searchSpanResponses := []model.SearchSpanResponseItem{}
|
||||
start = time.Now()
|
||||
for _, item := range searchScanResponses {
|
||||
var jsonItem model.SearchSpanResponseItem
|
||||
easyjson.Unmarshal([]byte(item.Model), &jsonItem)
|
||||
jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano() / 1000000)
|
||||
searchSpanResponses = append(searchSpanResponses, jsonItem)
|
||||
if startTime == 0 || jsonItem.TimeUnixNano < startTime {
|
||||
startTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if endTime == 0 || jsonItem.TimeUnixNano > endTime {
|
||||
endTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if durationNano == 0 || uint64(jsonItem.DurationNano) > durationNano {
|
||||
durationNano = uint64(jsonItem.DurationNano)
|
||||
}
|
||||
}
|
||||
end = time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery unmarshal took: ", zap.Duration("duration", end.Sub(start)))
|
||||
|
||||
if len(searchScanResponses) > params.SpansRenderLimit {
|
||||
start = time.Now()
|
||||
searchSpansResult, err = smart.SmartTraceAlgorithm(searchSpanResponses, params.SpanID, params.LevelUp, params.LevelDown, params.SpansRenderLimit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
end = time.Now()
|
||||
zap.L().Debug("smartTraceAlgo took: ", zap.Duration("duration", end.Sub(start)))
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if ok {
|
||||
data := map[string]interface{}{
|
||||
"traceSize": len(searchScanResponses),
|
||||
"spansRenderLimit": params.SpansRenderLimit,
|
||||
"algo": "smart",
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LARGE_TRACE_OPENED, data, claims.Email, true, false)
|
||||
}
|
||||
} else {
|
||||
for i, item := range searchSpanResponses {
|
||||
spanEvents := item.GetValues()
|
||||
searchSpansResult[0].Events[i] = spanEvents
|
||||
}
|
||||
}
|
||||
|
||||
searchSpansResult[0].StartTimestampMillis = startTime - (durationNano / 1000000)
|
||||
searchSpansResult[0].EndTimestampMillis = endTime + (durationNano / 1000000)
|
||||
|
||||
return &searchSpansResult, nil
|
||||
}
|
||||
|
||||
@@ -8,68 +8,59 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type cloudProviderAccountsRepository interface {
|
||||
listConnected(ctx context.Context, cloudProvider string) ([]AccountRecord, *model.ApiError)
|
||||
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
|
||||
|
||||
get(ctx context.Context, cloudProvider string, id string) (*AccountRecord, *model.ApiError)
|
||||
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
|
||||
|
||||
getConnectedCloudAccount(
|
||||
ctx context.Context, cloudProvider string, cloudAccountId string,
|
||||
) (*AccountRecord, *model.ApiError)
|
||||
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
|
||||
|
||||
// Insert an account or update it by (cloudProvider, id)
|
||||
// for specified non-empty fields
|
||||
upsert(
|
||||
ctx context.Context,
|
||||
cloudProvider string,
|
||||
orgId string,
|
||||
provider string,
|
||||
id *string,
|
||||
config *AccountConfig,
|
||||
cloudAccountId *string,
|
||||
agentReport *AgentReport,
|
||||
config *types.AccountConfig,
|
||||
accountId *string,
|
||||
agentReport *types.AgentReport,
|
||||
removedAt *time.Time,
|
||||
) (*AccountRecord, *model.ApiError)
|
||||
) (*types.CloudIntegration, *model.ApiError)
|
||||
}
|
||||
|
||||
func newCloudProviderAccountsRepository(db *sqlx.DB) (
|
||||
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
|
||||
*cloudProviderAccountsSQLRepository, error,
|
||||
) {
|
||||
return &cloudProviderAccountsSQLRepository{
|
||||
db: db,
|
||||
store: store,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type cloudProviderAccountsSQLRepository struct {
|
||||
db *sqlx.DB
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) listConnected(
|
||||
ctx context.Context, cloudProvider string,
|
||||
) ([]AccountRecord, *model.ApiError) {
|
||||
accounts := []AccountRecord{}
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) ([]types.CloudIntegration, *model.ApiError) {
|
||||
accounts := []types.CloudIntegration{}
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&accounts).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("provider = ?", cloudProvider).
|
||||
Where("removed_at is NULL").
|
||||
Where("account_id is not NULL").
|
||||
Where("last_agent_report is not NULL").
|
||||
Order("created_at").
|
||||
Scan(ctx)
|
||||
|
||||
err := r.db.SelectContext(
|
||||
ctx, &accounts, `
|
||||
select
|
||||
cloud_provider,
|
||||
id,
|
||||
config_json,
|
||||
cloud_account_id,
|
||||
last_agent_report_json,
|
||||
created_at,
|
||||
removed_at
|
||||
from cloud_integrations_accounts
|
||||
where
|
||||
cloud_provider=$1
|
||||
and removed_at is NULL
|
||||
and cloud_account_id is not NULL
|
||||
and last_agent_report_json is not NULL
|
||||
order by created_at
|
||||
`, cloudProvider,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not query connected cloud accounts: %w", err,
|
||||
@@ -80,27 +71,16 @@ func (r *cloudProviderAccountsSQLRepository) listConnected(
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) get(
|
||||
ctx context.Context, cloudProvider string, id string,
|
||||
) (*AccountRecord, *model.ApiError) {
|
||||
var result AccountRecord
|
||||
ctx context.Context, orgId string, provider string, id string,
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
var result types.CloudIntegration
|
||||
|
||||
err := r.db.GetContext(
|
||||
ctx, &result, `
|
||||
select
|
||||
cloud_provider,
|
||||
id,
|
||||
config_json,
|
||||
cloud_account_id,
|
||||
last_agent_report_json,
|
||||
created_at,
|
||||
removed_at
|
||||
from cloud_integrations_accounts
|
||||
where
|
||||
cloud_provider=$1
|
||||
and id=$2
|
||||
`,
|
||||
cloudProvider, id,
|
||||
)
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("provider = ?", provider).
|
||||
Where("id = ?", id).
|
||||
Scan(ctx)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.NotFoundError(fmt.Errorf(
|
||||
@@ -116,33 +96,22 @@ func (r *cloudProviderAccountsSQLRepository) get(
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
|
||||
ctx context.Context, cloudProvider string, cloudAccountId string,
|
||||
) (*AccountRecord, *model.ApiError) {
|
||||
var result AccountRecord
|
||||
ctx context.Context, orgId string, provider string, accountId string,
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
var result types.CloudIntegration
|
||||
|
||||
err := r.db.GetContext(
|
||||
ctx, &result, `
|
||||
select
|
||||
cloud_provider,
|
||||
id,
|
||||
config_json,
|
||||
cloud_account_id,
|
||||
last_agent_report_json,
|
||||
created_at,
|
||||
removed_at
|
||||
from cloud_integrations_accounts
|
||||
where
|
||||
cloud_provider=$1
|
||||
and cloud_account_id=$2
|
||||
and last_agent_report_json is not NULL
|
||||
and removed_at is NULL
|
||||
`,
|
||||
cloudProvider, cloudAccountId,
|
||||
)
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("provider = ?", provider).
|
||||
Where("account_id = ?", accountId).
|
||||
Where("last_agent_report is not NULL").
|
||||
Where("removed_at is NULL").
|
||||
Scan(ctx)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.NotFoundError(fmt.Errorf(
|
||||
"couldn't find connected cloud account %s", cloudAccountId,
|
||||
"couldn't find connected cloud account %s", accountId,
|
||||
))
|
||||
} else if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
@@ -155,17 +124,18 @@ func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) upsert(
|
||||
ctx context.Context,
|
||||
cloudProvider string,
|
||||
orgId string,
|
||||
provider string,
|
||||
id *string,
|
||||
config *AccountConfig,
|
||||
cloudAccountId *string,
|
||||
agentReport *AgentReport,
|
||||
config *types.AccountConfig,
|
||||
accountId *string,
|
||||
agentReport *types.AgentReport,
|
||||
removedAt *time.Time,
|
||||
) (*AccountRecord, *model.ApiError) {
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
// Insert
|
||||
if id == nil {
|
||||
newId := uuid.NewString()
|
||||
id = &newId
|
||||
temp := valuer.GenerateUUID().StringValue()
|
||||
id = &temp
|
||||
}
|
||||
|
||||
// Prepare clause for setting values in `on conflict do update`
|
||||
@@ -176,19 +146,19 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
|
||||
|
||||
if config != nil {
|
||||
onConflictSetStmts = append(
|
||||
onConflictSetStmts, setColStatement("config_json"),
|
||||
onConflictSetStmts, setColStatement("config"),
|
||||
)
|
||||
}
|
||||
|
||||
if cloudAccountId != nil {
|
||||
if accountId != nil {
|
||||
onConflictSetStmts = append(
|
||||
onConflictSetStmts, setColStatement("cloud_account_id"),
|
||||
onConflictSetStmts, setColStatement("account_id"),
|
||||
)
|
||||
}
|
||||
|
||||
if agentReport != nil {
|
||||
onConflictSetStmts = append(
|
||||
onConflictSetStmts, setColStatement("last_agent_report_json"),
|
||||
onConflictSetStmts, setColStatement("last_agent_report"),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -198,37 +168,45 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
|
||||
)
|
||||
}
|
||||
|
||||
// set updated_at to current timestamp if it's an upsert
|
||||
onConflictSetStmts = append(
|
||||
onConflictSetStmts, setColStatement("updated_at"),
|
||||
)
|
||||
|
||||
onConflictClause := ""
|
||||
if len(onConflictSetStmts) > 0 {
|
||||
onConflictClause = fmt.Sprintf(
|
||||
"on conflict(cloud_provider, id) do update SET\n%s",
|
||||
"conflict(id, provider, org_id) do update SET\n%s",
|
||||
strings.Join(onConflictSetStmts, ",\n"),
|
||||
)
|
||||
}
|
||||
|
||||
insertQuery := fmt.Sprintf(`
|
||||
INSERT INTO cloud_integrations_accounts (
|
||||
cloud_provider,
|
||||
id,
|
||||
config_json,
|
||||
cloud_account_id,
|
||||
last_agent_report_json,
|
||||
removed_at
|
||||
) values ($1, $2, $3, $4, $5, $6)
|
||||
%s`, onConflictClause,
|
||||
)
|
||||
integration := types.CloudIntegration{
|
||||
OrgID: orgId,
|
||||
Provider: provider,
|
||||
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Config: config,
|
||||
AccountID: accountId,
|
||||
LastAgentReport: agentReport,
|
||||
RemovedAt: removedAt,
|
||||
}
|
||||
|
||||
_, dbErr := r.store.BunDB().NewInsert().
|
||||
Model(&integration).
|
||||
On(onConflictClause).
|
||||
Exec(ctx)
|
||||
|
||||
_, dbErr := r.db.ExecContext(
|
||||
ctx, insertQuery,
|
||||
cloudProvider, id, config, cloudAccountId, agentReport, removedAt,
|
||||
)
|
||||
if dbErr != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not upsert cloud account record: %w", dbErr,
|
||||
))
|
||||
}
|
||||
|
||||
upsertedAccount, apiErr := r.get(ctx, cloudProvider, *id)
|
||||
upsertedAccount, apiErr := r.get(ctx, orgId, provider, *id)
|
||||
if apiErr != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't fetch upserted account by id: %w", apiErr.ToError(),
|
||||
|
||||
@@ -33,12 +33,12 @@ type Controller struct {
|
||||
func NewController(sqlStore sqlstore.SQLStore) (
|
||||
*Controller, error,
|
||||
) {
|
||||
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore.SQLxDB())
|
||||
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("couldn't create cloud provider accounts repo: %w", err)
|
||||
}
|
||||
|
||||
serviceConfigRepo, err := newServiceConfigRepository(sqlStore.SQLxDB())
|
||||
serviceConfigRepo, err := newServiceConfigRepository(sqlStore)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("couldn't create cloud provider service config repo: %w", err)
|
||||
}
|
||||
@@ -49,19 +49,12 @@ func NewController(sqlStore sqlstore.SQLStore) (
|
||||
}, nil
|
||||
}
|
||||
|
||||
type Account struct {
|
||||
Id string `json:"id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config AccountConfig `json:"config"`
|
||||
Status AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
type ConnectedAccountsListResponse struct {
|
||||
Accounts []Account `json:"accounts"`
|
||||
Accounts []types.Account `json:"accounts"`
|
||||
}
|
||||
|
||||
func (c *Controller) ListConnectedAccounts(
|
||||
ctx context.Context, cloudProvider string,
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) (
|
||||
*ConnectedAccountsListResponse, *model.ApiError,
|
||||
) {
|
||||
@@ -69,14 +62,14 @@ func (c *Controller) ListConnectedAccounts(
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
|
||||
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgId, cloudProvider)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't list cloud accounts")
|
||||
}
|
||||
|
||||
connectedAccounts := []Account{}
|
||||
connectedAccounts := []types.Account{}
|
||||
for _, a := range accountRecords {
|
||||
connectedAccounts = append(connectedAccounts, a.account())
|
||||
connectedAccounts = append(connectedAccounts, a.Account())
|
||||
}
|
||||
|
||||
return &ConnectedAccountsListResponse{
|
||||
@@ -88,7 +81,7 @@ type GenerateConnectionUrlRequest struct {
|
||||
// Optional. To be specified for updates.
|
||||
AccountId *string `json:"account_id,omitempty"`
|
||||
|
||||
AccountConfig AccountConfig `json:"account_config"`
|
||||
AccountConfig types.AccountConfig `json:"account_config"`
|
||||
|
||||
AgentConfig SigNozAgentConfig `json:"agent_config"`
|
||||
}
|
||||
@@ -109,7 +102,7 @@ type GenerateConnectionUrlResponse struct {
|
||||
}
|
||||
|
||||
func (c *Controller) GenerateConnectionUrl(
|
||||
ctx context.Context, cloudProvider string, req GenerateConnectionUrlRequest,
|
||||
ctx context.Context, orgId string, cloudProvider string, req GenerateConnectionUrlRequest,
|
||||
) (*GenerateConnectionUrlResponse, *model.ApiError) {
|
||||
// Account connection with a simple connection URL may not be available for all providers.
|
||||
if cloudProvider != "aws" {
|
||||
@@ -117,7 +110,7 @@ func (c *Controller) GenerateConnectionUrl(
|
||||
}
|
||||
|
||||
account, apiErr := c.accountsRepo.upsert(
|
||||
ctx, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
|
||||
ctx, orgId, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
|
||||
@@ -135,7 +128,7 @@ func (c *Controller) GenerateConnectionUrl(
|
||||
"param_SigNozIntegrationAgentVersion": agentVersion,
|
||||
"param_SigNozApiUrl": req.AgentConfig.SigNozAPIUrl,
|
||||
"param_SigNozApiKey": req.AgentConfig.SigNozAPIKey,
|
||||
"param_SigNozAccountId": account.Id,
|
||||
"param_SigNozAccountId": account.ID.StringValue(),
|
||||
"param_IngestionUrl": req.AgentConfig.IngestionUrl,
|
||||
"param_IngestionKey": req.AgentConfig.IngestionKey,
|
||||
"stackName": "signoz-integration",
|
||||
@@ -148,19 +141,19 @@ func (c *Controller) GenerateConnectionUrl(
|
||||
}
|
||||
|
||||
return &GenerateConnectionUrlResponse{
|
||||
AccountId: account.Id,
|
||||
AccountId: account.ID.StringValue(),
|
||||
ConnectionUrl: connectionUrl,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type AccountStatusResponse struct {
|
||||
Id string `json:"id"`
|
||||
CloudAccountId *string `json:"cloud_account_id,omitempty"`
|
||||
Status AccountStatus `json:"status"`
|
||||
Id string `json:"id"`
|
||||
CloudAccountId *string `json:"cloud_account_id,omitempty"`
|
||||
Status types.AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
func (c *Controller) GetAccountStatus(
|
||||
ctx context.Context, cloudProvider string, accountId string,
|
||||
ctx context.Context, orgId string, cloudProvider string, accountId string,
|
||||
) (
|
||||
*AccountStatusResponse, *model.ApiError,
|
||||
) {
|
||||
@@ -168,23 +161,23 @@ func (c *Controller) GetAccountStatus(
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
|
||||
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
resp := AccountStatusResponse{
|
||||
Id: account.Id,
|
||||
CloudAccountId: account.CloudAccountId,
|
||||
Status: account.status(),
|
||||
Id: account.ID.StringValue(),
|
||||
CloudAccountId: account.AccountID,
|
||||
Status: account.Status(),
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
type AgentCheckInRequest struct {
|
||||
AccountId string `json:"account_id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
ID string `json:"account_id"`
|
||||
AccountID string `json:"cloud_account_id"`
|
||||
// Arbitrary cloud specific Agent data
|
||||
Data map[string]any `json:"data,omitempty"`
|
||||
}
|
||||
@@ -204,35 +197,35 @@ type IntegrationConfigForAgent struct {
|
||||
}
|
||||
|
||||
func (c *Controller) CheckInAsAgent(
|
||||
ctx context.Context, cloudProvider string, req AgentCheckInRequest,
|
||||
ctx context.Context, orgId string, cloudProvider string, req AgentCheckInRequest,
|
||||
) (*AgentCheckInResponse, *model.ApiError) {
|
||||
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
existingAccount, apiErr := c.accountsRepo.get(ctx, cloudProvider, req.AccountId)
|
||||
if existingAccount != nil && existingAccount.CloudAccountId != nil && *existingAccount.CloudAccountId != req.CloudAccountId {
|
||||
existingAccount, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, req.ID)
|
||||
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
|
||||
return nil, model.BadRequest(fmt.Errorf(
|
||||
"can't check in with new %s account id %s for account %s with existing %s id %s",
|
||||
cloudProvider, req.CloudAccountId, existingAccount.Id, cloudProvider, *existingAccount.CloudAccountId,
|
||||
cloudProvider, req.AccountID, existingAccount.ID.StringValue(), cloudProvider, *existingAccount.AccountID,
|
||||
))
|
||||
}
|
||||
|
||||
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, cloudProvider, req.CloudAccountId)
|
||||
if existingAccount != nil && existingAccount.Id != req.AccountId {
|
||||
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, orgId, cloudProvider, req.AccountID)
|
||||
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
|
||||
return nil, model.BadRequest(fmt.Errorf(
|
||||
"can't check in to %s account %s with id %s. already connected with id %s",
|
||||
cloudProvider, req.CloudAccountId, req.AccountId, existingAccount.Id,
|
||||
cloudProvider, req.AccountID, req.ID, existingAccount.ID.StringValue(),
|
||||
))
|
||||
}
|
||||
|
||||
agentReport := AgentReport{
|
||||
agentReport := types.AgentReport{
|
||||
TimestampMillis: time.Now().UnixMilli(),
|
||||
Data: req.Data,
|
||||
}
|
||||
|
||||
account, apiErr := c.accountsRepo.upsert(
|
||||
ctx, cloudProvider, &req.AccountId, nil, &req.CloudAccountId, &agentReport, nil,
|
||||
ctx, orgId, cloudProvider, &req.ID, nil, &req.AccountID, &agentReport, nil,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
|
||||
@@ -265,7 +258,7 @@ func (c *Controller) CheckInAsAgent(
|
||||
}
|
||||
|
||||
svcConfigs, apiErr := c.serviceConfigRepo.getAllForAccount(
|
||||
ctx, cloudProvider, *account.CloudAccountId,
|
||||
ctx, orgId, account.ID.StringValue(),
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(
|
||||
@@ -298,54 +291,55 @@ func (c *Controller) CheckInAsAgent(
|
||||
}
|
||||
|
||||
return &AgentCheckInResponse{
|
||||
AccountId: account.Id,
|
||||
CloudAccountId: *account.CloudAccountId,
|
||||
AccountId: account.ID.StringValue(),
|
||||
CloudAccountId: *account.AccountID,
|
||||
RemovedAt: account.RemovedAt,
|
||||
IntegrationConfig: agentConfig,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type UpdateAccountConfigRequest struct {
|
||||
Config AccountConfig `json:"config"`
|
||||
Config types.AccountConfig `json:"config"`
|
||||
}
|
||||
|
||||
func (c *Controller) UpdateAccountConfig(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
cloudProvider string,
|
||||
accountId string,
|
||||
req UpdateAccountConfigRequest,
|
||||
) (*Account, *model.ApiError) {
|
||||
) (*types.Account, *model.ApiError) {
|
||||
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
accountRecord, apiErr := c.accountsRepo.upsert(
|
||||
ctx, cloudProvider, &accountId, &req.Config, nil, nil, nil,
|
||||
ctx, orgId, cloudProvider, &accountId, &req.Config, nil, nil, nil,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
|
||||
}
|
||||
|
||||
account := accountRecord.account()
|
||||
account := accountRecord.Account()
|
||||
|
||||
return &account, nil
|
||||
}
|
||||
|
||||
func (c *Controller) DisconnectAccount(
|
||||
ctx context.Context, cloudProvider string, accountId string,
|
||||
) (*AccountRecord, *model.ApiError) {
|
||||
ctx context.Context, orgId string, cloudProvider string, accountId string,
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
|
||||
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
|
||||
}
|
||||
|
||||
tsNow := time.Now()
|
||||
account, apiErr = c.accountsRepo.upsert(
|
||||
ctx, cloudProvider, &accountId, nil, nil, nil, &tsNow,
|
||||
ctx, orgId, cloudProvider, &accountId, nil, nil, nil, &tsNow,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
|
||||
@@ -360,6 +354,7 @@ type ListServicesResponse struct {
|
||||
|
||||
func (c *Controller) ListServices(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudProvider string,
|
||||
cloudAccountId *string,
|
||||
) (*ListServicesResponse, *model.ApiError) {
|
||||
@@ -373,10 +368,16 @@ func (c *Controller) ListServices(
|
||||
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
|
||||
}
|
||||
|
||||
svcConfigs := map[string]*CloudServiceConfig{}
|
||||
svcConfigs := map[string]*types.CloudServiceConfig{}
|
||||
if cloudAccountId != nil {
|
||||
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
|
||||
ctx, orgID, cloudProvider, *cloudAccountId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't get active account")
|
||||
}
|
||||
svcConfigs, apiErr = c.serviceConfigRepo.getAllForAccount(
|
||||
ctx, cloudProvider, *cloudAccountId,
|
||||
ctx, orgID, activeAccount.ID.StringValue(),
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(
|
||||
@@ -400,6 +401,7 @@ func (c *Controller) ListServices(
|
||||
|
||||
func (c *Controller) GetServiceDetails(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudProvider string,
|
||||
serviceId string,
|
||||
cloudAccountId *string,
|
||||
@@ -415,8 +417,16 @@ func (c *Controller) GetServiceDetails(
|
||||
}
|
||||
|
||||
if cloudAccountId != nil {
|
||||
|
||||
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
|
||||
ctx, orgID, cloudProvider, *cloudAccountId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't get active account")
|
||||
}
|
||||
|
||||
config, apiErr := c.serviceConfigRepo.get(
|
||||
ctx, cloudProvider, *cloudAccountId, serviceId,
|
||||
ctx, orgID, activeAccount.ID.StringValue(), serviceId,
|
||||
)
|
||||
if apiErr != nil && apiErr.Type() != model.ErrorNotFound {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't fetch service config")
|
||||
@@ -425,15 +435,22 @@ func (c *Controller) GetServiceDetails(
|
||||
if config != nil {
|
||||
service.Config = config
|
||||
|
||||
enabled := false
|
||||
if config.Metrics != nil && config.Metrics.Enabled {
|
||||
// add links to service dashboards, making them clickable.
|
||||
for i, d := range service.Assets.Dashboards {
|
||||
dashboardUuid := c.dashboardUuid(
|
||||
cloudProvider, serviceId, d.Id,
|
||||
)
|
||||
enabled = true
|
||||
}
|
||||
|
||||
// add links to service dashboards, making them clickable.
|
||||
for i, d := range service.Assets.Dashboards {
|
||||
dashboardUuid := c.dashboardUuid(
|
||||
cloudProvider, serviceId, d.Id,
|
||||
)
|
||||
if enabled {
|
||||
service.Assets.Dashboards[i].Url = fmt.Sprintf(
|
||||
"/dashboard/%s", dashboardUuid,
|
||||
)
|
||||
} else {
|
||||
service.Assets.Dashboards[i].Url = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -443,17 +460,18 @@ func (c *Controller) GetServiceDetails(
|
||||
}
|
||||
|
||||
type UpdateServiceConfigRequest struct {
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config CloudServiceConfig `json:"config"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config types.CloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
type UpdateServiceConfigResponse struct {
|
||||
Id string `json:"id"`
|
||||
Config CloudServiceConfig `json:"config"`
|
||||
Id string `json:"id"`
|
||||
Config types.CloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
func (c *Controller) UpdateServiceConfig(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudProvider string,
|
||||
serviceId string,
|
||||
req UpdateServiceConfigRequest,
|
||||
@@ -465,7 +483,7 @@ func (c *Controller) UpdateServiceConfig(
|
||||
|
||||
// can only update config for a connected cloud account id
|
||||
_, apiErr := c.accountsRepo.getConnectedCloudAccount(
|
||||
ctx, cloudProvider, req.CloudAccountId,
|
||||
ctx, orgID, cloudProvider, req.CloudAccountId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't find connected cloud account")
|
||||
@@ -478,7 +496,7 @@ func (c *Controller) UpdateServiceConfig(
|
||||
}
|
||||
|
||||
updatedConfig, apiErr := c.serviceConfigRepo.upsert(
|
||||
ctx, cloudProvider, req.CloudAccountId, serviceId, req.Config,
|
||||
ctx, orgID, cloudProvider, req.CloudAccountId, serviceId, req.Config,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't update service config")
|
||||
@@ -492,13 +510,13 @@ func (c *Controller) UpdateServiceConfig(
|
||||
|
||||
// All dashboards that are available based on cloud integrations configuration
|
||||
// across all cloud providers
|
||||
func (c *Controller) AvailableDashboards(ctx context.Context) (
|
||||
func (c *Controller) AvailableDashboards(ctx context.Context, orgId string) (
|
||||
[]types.Dashboard, *model.ApiError,
|
||||
) {
|
||||
allDashboards := []types.Dashboard{}
|
||||
|
||||
for _, provider := range []string{"aws"} {
|
||||
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, provider)
|
||||
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, provider)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(
|
||||
apiErr, fmt.Sprintf("couldn't get available dashboards for %s", provider),
|
||||
@@ -512,10 +530,10 @@ func (c *Controller) AvailableDashboards(ctx context.Context) (
|
||||
}
|
||||
|
||||
func (c *Controller) AvailableDashboardsForCloudProvider(
|
||||
ctx context.Context, cloudProvider string,
|
||||
ctx context.Context, orgID string, cloudProvider string,
|
||||
) ([]types.Dashboard, *model.ApiError) {
|
||||
|
||||
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
|
||||
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgID, cloudProvider)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "couldn't list connected cloud accounts")
|
||||
}
|
||||
@@ -524,9 +542,9 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
|
||||
servicesWithAvailableMetrics := map[string]*time.Time{}
|
||||
|
||||
for _, ar := range accountRecords {
|
||||
if ar.CloudAccountId != nil {
|
||||
if ar.AccountID != nil {
|
||||
configsBySvcId, apiErr := c.serviceConfigRepo.getAllForAccount(
|
||||
ctx, cloudProvider, *ar.CloudAccountId,
|
||||
ctx, orgID, ar.ID.StringValue(),
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
@@ -574,6 +592,7 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
|
||||
}
|
||||
func (c *Controller) GetDashboardById(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
dashboardUuid string,
|
||||
) (*types.Dashboard, *model.ApiError) {
|
||||
cloudProvider, _, _, apiErr := c.parseDashboardUuid(dashboardUuid)
|
||||
@@ -581,7 +600,7 @@ func (c *Controller) GetDashboardById(
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, cloudProvider)
|
||||
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, cloudProvider)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(
|
||||
apiErr, fmt.Sprintf("couldn't list available dashboards"),
|
||||
|
||||
@@ -4,23 +4,30 @@ import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
|
||||
require := require.New(t)
|
||||
sqlStore, _ := utils.NewTestSqliteDB(t)
|
||||
sqlStore := utils.NewQueryServiceDBForTests(t)
|
||||
controller, err := NewController(sqlStore)
|
||||
require.NoError(err)
|
||||
|
||||
user, apiErr := createTestUser()
|
||||
require.Nil(apiErr)
|
||||
|
||||
// should be able to generate connection url for
|
||||
// same account id again with updated config
|
||||
testAccountConfig1 := AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
|
||||
testAccountConfig1 := types.AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
|
||||
resp1, apiErr := controller.GenerateConnectionUrl(
|
||||
context.TODO(), "aws", GenerateConnectionUrlRequest{
|
||||
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
|
||||
AccountConfig: testAccountConfig1,
|
||||
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
|
||||
},
|
||||
@@ -31,14 +38,14 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
|
||||
|
||||
testAccountId := resp1.AccountId
|
||||
account, apiErr := controller.accountsRepo.get(
|
||||
context.TODO(), "aws", testAccountId,
|
||||
context.TODO(), user.OrgID, "aws", testAccountId,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(testAccountConfig1, *account.Config)
|
||||
|
||||
testAccountConfig2 := AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
|
||||
testAccountConfig2 := types.AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
|
||||
resp2, apiErr := controller.GenerateConnectionUrl(
|
||||
context.TODO(), "aws", GenerateConnectionUrlRequest{
|
||||
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
|
||||
AccountId: &testAccountId,
|
||||
AccountConfig: testAccountConfig2,
|
||||
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
|
||||
@@ -48,7 +55,7 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
|
||||
require.Equal(testAccountId, resp2.AccountId)
|
||||
|
||||
account, apiErr = controller.accountsRepo.get(
|
||||
context.TODO(), "aws", testAccountId,
|
||||
context.TODO(), user.OrgID, "aws", testAccountId,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(testAccountConfig2, *account.Config)
|
||||
@@ -56,18 +63,21 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
|
||||
|
||||
func TestAgentCheckIns(t *testing.T) {
|
||||
require := require.New(t)
|
||||
sqlStore, _ := utils.NewTestSqliteDB(t)
|
||||
sqlStore := utils.NewQueryServiceDBForTests(t)
|
||||
controller, err := NewController(sqlStore)
|
||||
require.NoError(err)
|
||||
|
||||
user, apiErr := createTestUser()
|
||||
require.Nil(apiErr)
|
||||
|
||||
// An agent should be able to check in from a cloud account even
|
||||
// if no connection url was requested (no account with agent's account id exists)
|
||||
testAccountId1 := uuid.NewString()
|
||||
testCloudAccountId1 := "546311234"
|
||||
resp1, apiErr := controller.CheckInAsAgent(
|
||||
context.TODO(), "aws", AgentCheckInRequest{
|
||||
AccountId: testAccountId1,
|
||||
CloudAccountId: testCloudAccountId1,
|
||||
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||
ID: testAccountId1,
|
||||
AccountID: testCloudAccountId1,
|
||||
},
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
@@ -78,9 +88,9 @@ func TestAgentCheckIns(t *testing.T) {
|
||||
// cloud account id for the same account.
|
||||
testCloudAccountId2 := "99999999"
|
||||
_, apiErr = controller.CheckInAsAgent(
|
||||
context.TODO(), "aws", AgentCheckInRequest{
|
||||
AccountId: testAccountId1,
|
||||
CloudAccountId: testCloudAccountId2,
|
||||
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||
ID: testAccountId1,
|
||||
AccountID: testCloudAccountId2,
|
||||
},
|
||||
)
|
||||
require.NotNil(apiErr)
|
||||
@@ -90,18 +100,18 @@ func TestAgentCheckIns(t *testing.T) {
|
||||
// i.e. there can't be 2 connected account records for the same cloud account id
|
||||
// at any point in time.
|
||||
existingConnected, apiErr := controller.accountsRepo.getConnectedCloudAccount(
|
||||
context.TODO(), "aws", testCloudAccountId1,
|
||||
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.NotNil(existingConnected)
|
||||
require.Equal(testCloudAccountId1, *existingConnected.CloudAccountId)
|
||||
require.Equal(testCloudAccountId1, *existingConnected.AccountID)
|
||||
require.Nil(existingConnected.RemovedAt)
|
||||
|
||||
testAccountId2 := uuid.NewString()
|
||||
_, apiErr = controller.CheckInAsAgent(
|
||||
context.TODO(), "aws", AgentCheckInRequest{
|
||||
AccountId: testAccountId2,
|
||||
CloudAccountId: testCloudAccountId1,
|
||||
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||
ID: testAccountId2,
|
||||
AccountID: testCloudAccountId1,
|
||||
},
|
||||
)
|
||||
require.NotNil(apiErr)
|
||||
@@ -109,29 +119,29 @@ func TestAgentCheckIns(t *testing.T) {
|
||||
// After disconnecting existing account record, the agent should be able to
|
||||
// connected for a particular cloud account id
|
||||
_, apiErr = controller.DisconnectAccount(
|
||||
context.TODO(), "aws", testAccountId1,
|
||||
context.TODO(), user.OrgID, "aws", testAccountId1,
|
||||
)
|
||||
|
||||
existingConnected, apiErr = controller.accountsRepo.getConnectedCloudAccount(
|
||||
context.TODO(), "aws", testCloudAccountId1,
|
||||
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
|
||||
)
|
||||
require.Nil(existingConnected)
|
||||
require.NotNil(apiErr)
|
||||
require.Equal(model.ErrorNotFound, apiErr.Type())
|
||||
|
||||
_, apiErr = controller.CheckInAsAgent(
|
||||
context.TODO(), "aws", AgentCheckInRequest{
|
||||
AccountId: testAccountId2,
|
||||
CloudAccountId: testCloudAccountId1,
|
||||
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||
ID: testAccountId2,
|
||||
AccountID: testCloudAccountId1,
|
||||
},
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
|
||||
// should be able to keep checking in
|
||||
_, apiErr = controller.CheckInAsAgent(
|
||||
context.TODO(), "aws", AgentCheckInRequest{
|
||||
AccountId: testAccountId2,
|
||||
CloudAccountId: testCloudAccountId1,
|
||||
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||
ID: testAccountId2,
|
||||
AccountID: testCloudAccountId1,
|
||||
},
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
@@ -139,13 +149,16 @@ func TestAgentCheckIns(t *testing.T) {
|
||||
|
||||
func TestCantDisconnectNonExistentAccount(t *testing.T) {
|
||||
require := require.New(t)
|
||||
sqlStore, _ := utils.NewTestSqliteDB(t)
|
||||
sqlStore := utils.NewQueryServiceDBForTests(t)
|
||||
controller, err := NewController(sqlStore)
|
||||
require.NoError(err)
|
||||
|
||||
user, apiErr := createTestUser()
|
||||
require.Nil(apiErr)
|
||||
|
||||
// Attempting to disconnect a non-existent account should return error
|
||||
account, apiErr := controller.DisconnectAccount(
|
||||
context.TODO(), "aws", uuid.NewString(),
|
||||
context.TODO(), user.OrgID, "aws", uuid.NewString(),
|
||||
)
|
||||
require.NotNil(apiErr)
|
||||
require.Equal(model.ErrorNotFound, apiErr.Type())
|
||||
@@ -154,15 +167,23 @@ func TestCantDisconnectNonExistentAccount(t *testing.T) {
|
||||
|
||||
func TestConfigureService(t *testing.T) {
|
||||
require := require.New(t)
|
||||
sqlStore, _ := utils.NewTestSqliteDB(t)
|
||||
sqlStore := utils.NewQueryServiceDBForTests(t)
|
||||
controller, err := NewController(sqlStore)
|
||||
require.NoError(err)
|
||||
|
||||
user, apiErr := createTestUser()
|
||||
require.Nil(apiErr)
|
||||
|
||||
// create a connected account
|
||||
testCloudAccountId := "546311234"
|
||||
testConnectedAccount := makeTestConnectedAccount(t, user.OrgID, controller, testCloudAccountId)
|
||||
require.Nil(testConnectedAccount.RemovedAt)
|
||||
require.NotEmpty(testConnectedAccount.AccountID)
|
||||
require.Equal(testCloudAccountId, *testConnectedAccount.AccountID)
|
||||
|
||||
// should start out without any service config
|
||||
svcListResp, apiErr := controller.ListServices(
|
||||
context.TODO(), "aws", &testCloudAccountId,
|
||||
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
|
||||
@@ -170,25 +191,20 @@ func TestConfigureService(t *testing.T) {
|
||||
require.Nil(svcListResp.Services[0].Config)
|
||||
|
||||
svcDetails, apiErr := controller.GetServiceDetails(
|
||||
context.TODO(), "aws", testSvcId, &testCloudAccountId,
|
||||
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(testSvcId, svcDetails.Id)
|
||||
require.Nil(svcDetails.Config)
|
||||
|
||||
// should be able to configure a service for a connected account
|
||||
testConnectedAccount := makeTestConnectedAccount(t, controller, testCloudAccountId)
|
||||
require.Nil(testConnectedAccount.RemovedAt)
|
||||
require.NotNil(testConnectedAccount.CloudAccountId)
|
||||
require.Equal(testCloudAccountId, *testConnectedAccount.CloudAccountId)
|
||||
|
||||
testSvcConfig := CloudServiceConfig{
|
||||
Metrics: &CloudServiceMetricsConfig{
|
||||
testSvcConfig := types.CloudServiceConfig{
|
||||
Metrics: &types.CloudServiceMetricsConfig{
|
||||
Enabled: true,
|
||||
},
|
||||
}
|
||||
updateSvcConfigResp, apiErr := controller.UpdateServiceConfig(
|
||||
context.TODO(), "aws", testSvcId, UpdateServiceConfigRequest{
|
||||
context.TODO(), user.OrgID, "aws", testSvcId, UpdateServiceConfigRequest{
|
||||
CloudAccountId: testCloudAccountId,
|
||||
Config: testSvcConfig,
|
||||
},
|
||||
@@ -198,14 +214,14 @@ func TestConfigureService(t *testing.T) {
|
||||
require.Equal(testSvcConfig, updateSvcConfigResp.Config)
|
||||
|
||||
svcDetails, apiErr = controller.GetServiceDetails(
|
||||
context.TODO(), "aws", testSvcId, &testCloudAccountId,
|
||||
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(testSvcId, svcDetails.Id)
|
||||
require.Equal(testSvcConfig, *svcDetails.Config)
|
||||
|
||||
svcListResp, apiErr = controller.ListServices(
|
||||
context.TODO(), "aws", &testCloudAccountId,
|
||||
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
for _, svc := range svcListResp.Services {
|
||||
@@ -216,12 +232,12 @@ func TestConfigureService(t *testing.T) {
|
||||
|
||||
// should not be able to configure service after cloud account has been disconnected
|
||||
_, apiErr = controller.DisconnectAccount(
|
||||
context.TODO(), "aws", testConnectedAccount.Id,
|
||||
context.TODO(), user.OrgID, "aws", testConnectedAccount.ID.StringValue(),
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
|
||||
_, apiErr = controller.UpdateServiceConfig(
|
||||
context.TODO(), "aws", testSvcId,
|
||||
context.TODO(), user.OrgID, "aws", testSvcId,
|
||||
UpdateServiceConfigRequest{
|
||||
CloudAccountId: testCloudAccountId,
|
||||
Config: testSvcConfig,
|
||||
@@ -231,7 +247,7 @@ func TestConfigureService(t *testing.T) {
|
||||
|
||||
// should not be able to configure a service for a cloud account id that is not connected yet
|
||||
_, apiErr = controller.UpdateServiceConfig(
|
||||
context.TODO(), "aws", testSvcId,
|
||||
context.TODO(), user.OrgID, "aws", testSvcId,
|
||||
UpdateServiceConfigRequest{
|
||||
CloudAccountId: "9999999999",
|
||||
Config: testSvcConfig,
|
||||
@@ -241,7 +257,7 @@ func TestConfigureService(t *testing.T) {
|
||||
|
||||
// should not be able to set config for an unsupported service
|
||||
_, apiErr = controller.UpdateServiceConfig(
|
||||
context.TODO(), "aws", "bad-service", UpdateServiceConfigRequest{
|
||||
context.TODO(), user.OrgID, "aws", "bad-service", UpdateServiceConfigRequest{
|
||||
CloudAccountId: testCloudAccountId,
|
||||
Config: testSvcConfig,
|
||||
},
|
||||
@@ -250,22 +266,54 @@ func TestConfigureService(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
func makeTestConnectedAccount(t *testing.T, controller *Controller, cloudAccountId string) *AccountRecord {
|
||||
func makeTestConnectedAccount(t *testing.T, orgId string, controller *Controller, cloudAccountId string) *types.CloudIntegration {
|
||||
require := require.New(t)
|
||||
|
||||
// a check in from SigNoz agent creates or updates a connected account.
|
||||
testAccountId := uuid.NewString()
|
||||
resp, apiErr := controller.CheckInAsAgent(
|
||||
context.TODO(), "aws", AgentCheckInRequest{
|
||||
AccountId: testAccountId,
|
||||
CloudAccountId: cloudAccountId,
|
||||
context.TODO(), orgId, "aws", AgentCheckInRequest{
|
||||
ID: testAccountId,
|
||||
AccountID: cloudAccountId,
|
||||
},
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(testAccountId, resp.AccountId)
|
||||
require.Equal(cloudAccountId, resp.CloudAccountId)
|
||||
|
||||
acc, err := controller.accountsRepo.get(context.TODO(), "aws", resp.AccountId)
|
||||
acc, err := controller.accountsRepo.get(context.TODO(), orgId, "aws", resp.AccountId)
|
||||
require.Nil(err)
|
||||
return acc
|
||||
}
|
||||
|
||||
func createTestUser() (*types.User, *model.ApiError) {
|
||||
// Create a test user for auth
|
||||
ctx := context.Background()
|
||||
org, apiErr := dao.DB().CreateOrg(ctx, &types.Organization{
|
||||
Name: "test",
|
||||
})
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
auth.InitAuthCache(ctx)
|
||||
|
||||
userId := uuid.NewString()
|
||||
return dao.DB().CreateUser(
|
||||
ctx,
|
||||
&types.User{
|
||||
ID: userId,
|
||||
Name: "test",
|
||||
Email: userId[:8] + "test@test.com",
|
||||
Password: "test",
|
||||
OrgID: org.ID,
|
||||
GroupID: group.ID,
|
||||
},
|
||||
true,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,123 +1,11 @@
|
||||
package cloudintegrations
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
)
|
||||
|
||||
// Represents a cloud provider account for cloud integrations
|
||||
type AccountRecord struct {
|
||||
CloudProvider string `json:"cloud_provider" db:"cloud_provider"`
|
||||
Id string `json:"id" db:"id"`
|
||||
Config *AccountConfig `json:"config" db:"config_json"`
|
||||
CloudAccountId *string `json:"cloud_account_id" db:"cloud_account_id"`
|
||||
LastAgentReport *AgentReport `json:"last_agent_report" db:"last_agent_report_json"`
|
||||
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||
RemovedAt *time.Time `json:"removed_at" db:"removed_at"`
|
||||
}
|
||||
|
||||
type AccountConfig struct {
|
||||
EnabledRegions []string `json:"regions"`
|
||||
}
|
||||
|
||||
func DefaultAccountConfig() AccountConfig {
|
||||
return AccountConfig{
|
||||
EnabledRegions: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
// For serializing from db
|
||||
func (c *AccountConfig) Scan(src any) error {
|
||||
data, ok := src.([]byte)
|
||||
if !ok {
|
||||
return fmt.Errorf("tried to scan from %T instead of bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, &c)
|
||||
}
|
||||
|
||||
// For serializing to db
|
||||
func (c *AccountConfig) Value() (driver.Value, error) {
|
||||
if c == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't serialize cloud account config to JSON: %w", err,
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
type AgentReport struct {
|
||||
TimestampMillis int64 `json:"timestamp_millis"`
|
||||
Data map[string]any `json:"data"`
|
||||
}
|
||||
|
||||
// For serializing from db
|
||||
func (r *AgentReport) Scan(src any) error {
|
||||
data, ok := src.([]byte)
|
||||
if !ok {
|
||||
return fmt.Errorf("tried to scan from %T instead of bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, &r)
|
||||
}
|
||||
|
||||
// For serializing to db
|
||||
func (r *AgentReport) Value() (driver.Value, error) {
|
||||
if r == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(r)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't serialize agent report to JSON: %w", err,
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
type AccountStatus struct {
|
||||
Integration AccountIntegrationStatus `json:"integration"`
|
||||
}
|
||||
|
||||
type AccountIntegrationStatus struct {
|
||||
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
|
||||
}
|
||||
|
||||
func (a *AccountRecord) status() AccountStatus {
|
||||
status := AccountStatus{}
|
||||
if a.LastAgentReport != nil {
|
||||
lastHeartbeat := a.LastAgentReport.TimestampMillis
|
||||
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
|
||||
}
|
||||
return status
|
||||
}
|
||||
|
||||
func (a *AccountRecord) account() Account {
|
||||
ca := Account{Id: a.Id, Status: a.status()}
|
||||
|
||||
if a.CloudAccountId != nil {
|
||||
ca.CloudAccountId = *a.CloudAccountId
|
||||
}
|
||||
|
||||
if a.Config != nil {
|
||||
ca.Config = *a.Config
|
||||
} else {
|
||||
ca.Config = DefaultAccountConfig()
|
||||
}
|
||||
|
||||
return ca
|
||||
}
|
||||
|
||||
type CloudServiceSummary struct {
|
||||
Id string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
@@ -125,7 +13,7 @@ type CloudServiceSummary struct {
|
||||
|
||||
// Present only if the service has been configured in the
|
||||
// context of a cloud provider account.
|
||||
Config *CloudServiceConfig `json:"config,omitempty"`
|
||||
Config *types.CloudServiceConfig `json:"config,omitempty"`
|
||||
}
|
||||
|
||||
type CloudServiceDetails struct {
|
||||
@@ -144,44 +32,6 @@ type CloudServiceDetails struct {
|
||||
TelemetryCollectionStrategy *CloudTelemetryCollectionStrategy `json:"telemetry_collection_strategy"`
|
||||
}
|
||||
|
||||
type CloudServiceConfig struct {
|
||||
Logs *CloudServiceLogsConfig `json:"logs,omitempty"`
|
||||
Metrics *CloudServiceMetricsConfig `json:"metrics,omitempty"`
|
||||
}
|
||||
|
||||
// For serializing from db
|
||||
func (c *CloudServiceConfig) Scan(src any) error {
|
||||
data, ok := src.([]byte)
|
||||
if !ok {
|
||||
return fmt.Errorf("tried to scan from %T instead of bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, &c)
|
||||
}
|
||||
|
||||
// For serializing to db
|
||||
func (c *CloudServiceConfig) Value() (driver.Value, error) {
|
||||
if c == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't serialize cloud service config to JSON: %w", err,
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
type CloudServiceLogsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
|
||||
type CloudServiceMetricsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
|
||||
type CloudServiceAssets struct {
|
||||
Dashboards []CloudServiceDashboard `json:"dashboards"`
|
||||
}
|
||||
|
||||
@@ -4,161 +4,161 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type serviceConfigRepository interface {
|
||||
get(
|
||||
ctx context.Context,
|
||||
cloudProvider string,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
serviceId string,
|
||||
) (*CloudServiceConfig, *model.ApiError)
|
||||
serviceType string,
|
||||
) (*types.CloudServiceConfig, *model.ApiError)
|
||||
|
||||
upsert(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudProvider string,
|
||||
cloudAccountId string,
|
||||
serviceId string,
|
||||
config CloudServiceConfig,
|
||||
) (*CloudServiceConfig, *model.ApiError)
|
||||
config types.CloudServiceConfig,
|
||||
) (*types.CloudServiceConfig, *model.ApiError)
|
||||
|
||||
getAllForAccount(
|
||||
ctx context.Context,
|
||||
cloudProvider string,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
) (
|
||||
configsBySvcId map[string]*CloudServiceConfig,
|
||||
configsBySvcId map[string]*types.CloudServiceConfig,
|
||||
apiErr *model.ApiError,
|
||||
)
|
||||
}
|
||||
|
||||
func newServiceConfigRepository(db *sqlx.DB) (
|
||||
func newServiceConfigRepository(store sqlstore.SQLStore) (
|
||||
*serviceConfigSQLRepository, error,
|
||||
) {
|
||||
return &serviceConfigSQLRepository{
|
||||
db: db,
|
||||
store: store,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type serviceConfigSQLRepository struct {
|
||||
db *sqlx.DB
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func (r *serviceConfigSQLRepository) get(
|
||||
ctx context.Context,
|
||||
cloudProvider string,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
serviceId string,
|
||||
) (*CloudServiceConfig, *model.ApiError) {
|
||||
serviceType string,
|
||||
) (*types.CloudServiceConfig, *model.ApiError) {
|
||||
|
||||
var result CloudServiceConfig
|
||||
var result types.CloudIntegrationService
|
||||
|
||||
err := r.db.GetContext(
|
||||
ctx, &result, `
|
||||
select
|
||||
config_json
|
||||
from cloud_integrations_service_configs
|
||||
where
|
||||
cloud_provider=$1
|
||||
and cloud_account_id=$2
|
||||
and service_id=$3
|
||||
`,
|
||||
cloudProvider, cloudAccountId, serviceId,
|
||||
)
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
|
||||
Where("ci.org_id = ?", orgID).
|
||||
Where("ci.id = ?", cloudAccountId).
|
||||
Where("cis.type = ?", serviceType).
|
||||
Scan(ctx)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.NotFoundError(fmt.Errorf(
|
||||
"couldn't find %s %s config for %s",
|
||||
cloudProvider, serviceId, cloudAccountId,
|
||||
"couldn't find config for cloud account %s",
|
||||
cloudAccountId,
|
||||
))
|
||||
|
||||
} else if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't query cloud service config: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
return &result.Config, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *serviceConfigSQLRepository) upsert(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudProvider string,
|
||||
cloudAccountId string,
|
||||
serviceId string,
|
||||
config CloudServiceConfig,
|
||||
) (*CloudServiceConfig, *model.ApiError) {
|
||||
config types.CloudServiceConfig,
|
||||
) (*types.CloudServiceConfig, *model.ApiError) {
|
||||
|
||||
query := `
|
||||
INSERT INTO cloud_integrations_service_configs (
|
||||
cloud_provider,
|
||||
cloud_account_id,
|
||||
service_id,
|
||||
config_json
|
||||
) values ($1, $2, $3, $4)
|
||||
on conflict(cloud_provider, cloud_account_id, service_id)
|
||||
do update set config_json=excluded.config_json
|
||||
`
|
||||
_, dbErr := r.db.ExecContext(
|
||||
ctx, query,
|
||||
cloudProvider, cloudAccountId, serviceId, &config,
|
||||
)
|
||||
if dbErr != nil {
|
||||
// get cloud integration id from account id
|
||||
// if the account is not connected, we don't need to upsert the config
|
||||
var cloudIntegrationId string
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model((*types.CloudIntegration)(nil)).
|
||||
Column("id").
|
||||
Where("provider = ?", cloudProvider).
|
||||
Where("account_id = ?", cloudAccountId).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("removed_at is NULL").
|
||||
Where("last_agent_report is not NULL").
|
||||
Scan(ctx, &cloudIntegrationId)
|
||||
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not upsert cloud service config: %w", dbErr,
|
||||
"couldn't query cloud integration id: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
upsertedConfig, apiErr := r.get(ctx, cloudProvider, cloudAccountId, serviceId)
|
||||
if apiErr != nil {
|
||||
serviceConfig := types.CloudIntegrationService{
|
||||
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Config: config,
|
||||
Type: serviceId,
|
||||
CloudIntegrationID: cloudIntegrationId,
|
||||
}
|
||||
_, err = r.store.BunDB().NewInsert().
|
||||
Model(&serviceConfig).
|
||||
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't fetch upserted service config: %w", apiErr.ToError(),
|
||||
"could not upsert cloud service config: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
return upsertedConfig, nil
|
||||
return &serviceConfig.Config, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *serviceConfigSQLRepository) getAllForAccount(
|
||||
ctx context.Context,
|
||||
cloudProvider string,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
) (map[string]*CloudServiceConfig, *model.ApiError) {
|
||||
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
|
||||
|
||||
type ScannedServiceConfigRecord struct {
|
||||
ServiceId string `db:"service_id"`
|
||||
Config CloudServiceConfig `db:"config_json"`
|
||||
}
|
||||
serviceConfigs := []types.CloudIntegrationService{}
|
||||
|
||||
records := []ScannedServiceConfigRecord{}
|
||||
|
||||
err := r.db.SelectContext(
|
||||
ctx, &records, `
|
||||
select
|
||||
service_id,
|
||||
config_json
|
||||
from cloud_integrations_service_configs
|
||||
where
|
||||
cloud_provider=$1
|
||||
and cloud_account_id=$2
|
||||
`,
|
||||
cloudProvider, cloudAccountId,
|
||||
)
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&serviceConfigs).
|
||||
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
|
||||
Where("ci.id = ?", cloudAccountId).
|
||||
Where("ci.org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not query service configs from db: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
result := map[string]*CloudServiceConfig{}
|
||||
result := map[string]*types.CloudServiceConfig{}
|
||||
|
||||
for _, r := range records {
|
||||
result[r.ServiceId] = &r.Config
|
||||
for _, r := range serviceConfigs {
|
||||
result[r.Type] = &r.Config
|
||||
}
|
||||
|
||||
return result, nil
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -37,7 +38,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/explorer"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/thirdPartyApi"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
|
||||
@@ -200,7 +200,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
FeatureLookup: opts.FeatureFlags,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
}
|
||||
@@ -210,7 +209,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
FeatureLookup: opts.FeatureFlags,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
}
|
||||
@@ -279,7 +277,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
BuildTraceQuery: tracesQueryBuilder,
|
||||
BuildLogQuery: logsQueryBuilder,
|
||||
}
|
||||
aH.queryBuilder = queryBuilder.NewQueryBuilder(builderOpts, aH.featureFlags)
|
||||
aH.queryBuilder = queryBuilder.NewQueryBuilder(builderOpts)
|
||||
|
||||
// check if at least one user is created
|
||||
hasUsers, err := aH.appDao.GetUsersWithOpts(context.Background(), 1)
|
||||
@@ -551,6 +549,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *AuthMiddleware) {
|
||||
router.HandleFunc("/api/v1/services/list", am.ViewAccess(aH.getServicesList)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/service/top_operations", am.ViewAccess(aH.getTopOperations)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/service/top_level_operations", am.ViewAccess(aH.getServicesTopLevelOps)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(aH.SearchTraces)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/usage", am.ViewAccess(aH.getUsage)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/dependency_graph", am.ViewAccess(aH.dependencyGraph)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.AdminAccess(aH.setTTL)).Methods(http.MethodPost)
|
||||
@@ -1083,14 +1082,14 @@ func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
ic := aH.IntegrationsController
|
||||
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context())
|
||||
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context(), claims.OrgID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get dashboards for installed integrations", zap.Error(err))
|
||||
} else {
|
||||
allDashboards = append(allDashboards, installedIntegrationDashboards...)
|
||||
}
|
||||
|
||||
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context())
|
||||
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context(), claims.OrgID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get cloud dashboards", zap.Error(err))
|
||||
} else {
|
||||
@@ -1268,7 +1267,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(uuid) {
|
||||
dashboard, apiError = aH.CloudIntegrationsController.GetDashboardById(
|
||||
r.Context(), uuid,
|
||||
r.Context(), claims.OrgID, uuid,
|
||||
)
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
@@ -1277,7 +1276,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
} else {
|
||||
dashboard, apiError = aH.IntegrationsController.GetInstalledIntegrationDashboardById(
|
||||
r.Context(), uuid,
|
||||
r.Context(), claims.OrgID, uuid,
|
||||
)
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
@@ -1726,6 +1725,22 @@ func (aH *APIHandler) getServicesList(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) SearchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
params, err := ParseSearchTracesParams(r)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
||||
return
|
||||
}
|
||||
|
||||
result, err := aH.reader.SearchTraces(r.Context(), params)
|
||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) GetWaterfallSpansForTraceWithMetadata(w http.ResponseWriter, r *http.Request) {
|
||||
traceID := mux.Vars(r)["traceId"]
|
||||
if traceID == "" {
|
||||
@@ -2192,6 +2207,11 @@ func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) {
|
||||
old.ProfilePictureURL = update.ProfilePictureURL
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(old.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
_, apiErr = dao.DB().EditUser(ctx, &types.User{
|
||||
ID: old.ID,
|
||||
Name: old.Name,
|
||||
@@ -2223,6 +2243,11 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email)) {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
|
||||
return
|
||||
}
|
||||
|
||||
if user == nil {
|
||||
RespondError(w, &model.ApiError{
|
||||
Typ: model.ErrorNotFound,
|
||||
@@ -3482,9 +3507,14 @@ func (aH *APIHandler) ListIntegrations(
|
||||
for k, values := range r.URL.Query() {
|
||||
params[k] = values[0]
|
||||
}
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
resp, apiErr := aH.IntegrationsController.ListIntegrations(
|
||||
r.Context(), params,
|
||||
r.Context(), claims.OrgID, params,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, "Failed to fetch integrations")
|
||||
@@ -3497,8 +3527,13 @@ func (aH *APIHandler) GetIntegration(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
integrationId := mux.Vars(r)["integrationId"]
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
integration, apiErr := aH.IntegrationsController.GetIntegration(
|
||||
r.Context(), integrationId,
|
||||
r.Context(), claims.OrgID, integrationId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, "Failed to fetch integration details")
|
||||
@@ -3512,8 +3547,13 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
integrationId := mux.Vars(r)["integrationId"]
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
isInstalled, apiErr := aH.IntegrationsController.IsIntegrationInstalled(
|
||||
r.Context(), integrationId,
|
||||
r.Context(), claims.OrgID, integrationId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, "failed to check if integration is installed")
|
||||
@@ -3527,7 +3567,7 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
|
||||
}
|
||||
|
||||
connectionTests, apiErr := aH.IntegrationsController.GetIntegrationConnectionTests(
|
||||
r.Context(), integrationId,
|
||||
r.Context(), claims.OrgID, integrationId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, "failed to fetch integration connection tests")
|
||||
@@ -3726,8 +3766,14 @@ func (aH *APIHandler) InstallIntegration(
|
||||
return
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
integration, apiErr := aH.IntegrationsController.Install(
|
||||
r.Context(), &req,
|
||||
r.Context(), claims.OrgID, &req,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
@@ -3748,7 +3794,13 @@ func (aH *APIHandler) UninstallIntegration(
|
||||
return
|
||||
}
|
||||
|
||||
apiErr := aH.IntegrationsController.Uninstall(r.Context(), &req)
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
apiErr := aH.IntegrationsController.Uninstall(r.Context(), claims.OrgID, &req)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
@@ -3804,8 +3856,14 @@ func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(
|
||||
) {
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
resp, apiErr := aH.CloudIntegrationsController.ListConnectedAccounts(
|
||||
r.Context(), cloudProvider,
|
||||
r.Context(), claims.OrgID, cloudProvider,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
@@ -3826,8 +3884,14 @@ func (aH *APIHandler) CloudIntegrationsGenerateConnectionUrl(
|
||||
return
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.CloudIntegrationsController.GenerateConnectionUrl(
|
||||
r.Context(), cloudProvider, req,
|
||||
r.Context(), claims.OrgID, cloudProvider, req,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
@@ -3844,8 +3908,14 @@ func (aH *APIHandler) CloudIntegrationsGetAccountStatus(
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
accountId := mux.Vars(r)["accountId"]
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
resp, apiErr := aH.CloudIntegrationsController.GetAccountStatus(
|
||||
r.Context(), cloudProvider, accountId,
|
||||
r.Context(), claims.OrgID, cloudProvider, accountId,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
@@ -3866,8 +3936,14 @@ func (aH *APIHandler) CloudIntegrationsAgentCheckIn(
|
||||
return
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.CloudIntegrationsController.CheckInAsAgent(
|
||||
r.Context(), cloudProvider, req,
|
||||
r.Context(), claims.OrgID, cloudProvider, req,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
@@ -3890,8 +3966,14 @@ func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(
|
||||
return
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.CloudIntegrationsController.UpdateAccountConfig(
|
||||
r.Context(), cloudProvider, accountId, req,
|
||||
r.Context(), claims.OrgID, cloudProvider, accountId, req,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
@@ -3908,8 +3990,14 @@ func (aH *APIHandler) CloudIntegrationsDisconnectAccount(
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
accountId := mux.Vars(r)["accountId"]
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.CloudIntegrationsController.DisconnectAccount(
|
||||
r.Context(), cloudProvider, accountId,
|
||||
r.Context(), claims.OrgID, cloudProvider, accountId,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
@@ -3932,8 +4020,14 @@ func (aH *APIHandler) CloudIntegrationsListServices(
|
||||
cloudAccountId = &cloudAccountIdQP
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
resp, apiErr := aH.CloudIntegrationsController.ListServices(
|
||||
r.Context(), cloudProvider, cloudAccountId,
|
||||
r.Context(), claims.OrgID, cloudProvider, cloudAccountId,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
@@ -3956,8 +4050,14 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
|
||||
cloudAccountId = &cloudAccountIdQP
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
resp, apiErr := aH.CloudIntegrationsController.GetServiceDetails(
|
||||
r.Context(), cloudProvider, serviceId, cloudAccountId,
|
||||
r.Context(), claims.OrgID, cloudProvider, serviceId, cloudAccountId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
@@ -4196,8 +4296,14 @@ func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
|
||||
return
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.CloudIntegrationsController.UpdateServiceConfig(
|
||||
r.Context(), cloudProvider, serviceId, req,
|
||||
r.Context(), claims.OrgID, cloudProvider, serviceId, req,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
|
||||
@@ -18,7 +18,7 @@ type Controller struct {
|
||||
func NewController(sqlStore sqlstore.SQLStore) (
|
||||
*Controller, error,
|
||||
) {
|
||||
mgr, err := NewManager(sqlStore.SQLxDB())
|
||||
mgr, err := NewManager(sqlStore)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("couldn't create integrations manager: %w", err)
|
||||
}
|
||||
@@ -35,7 +35,7 @@ type IntegrationsListResponse struct {
|
||||
}
|
||||
|
||||
func (c *Controller) ListIntegrations(
|
||||
ctx context.Context, params map[string]string,
|
||||
ctx context.Context, orgId string, params map[string]string,
|
||||
) (
|
||||
*IntegrationsListResponse, *model.ApiError,
|
||||
) {
|
||||
@@ -47,7 +47,7 @@ func (c *Controller) ListIntegrations(
|
||||
}
|
||||
}
|
||||
|
||||
integrations, apiErr := c.mgr.ListIntegrations(ctx, filters)
|
||||
integrations, apiErr := c.mgr.ListIntegrations(ctx, orgId, filters)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
@@ -58,16 +58,15 @@ func (c *Controller) ListIntegrations(
|
||||
}
|
||||
|
||||
func (c *Controller) GetIntegration(
|
||||
ctx context.Context, integrationId string,
|
||||
ctx context.Context, orgId string, integrationId string,
|
||||
) (*Integration, *model.ApiError) {
|
||||
return c.mgr.GetIntegration(ctx, integrationId)
|
||||
return c.mgr.GetIntegration(ctx, orgId, integrationId)
|
||||
}
|
||||
|
||||
func (c *Controller) IsIntegrationInstalled(
|
||||
ctx context.Context,
|
||||
integrationId string,
|
||||
ctx context.Context, orgId string, integrationId string,
|
||||
) (bool, *model.ApiError) {
|
||||
installation, apiErr := c.mgr.getInstalledIntegration(ctx, integrationId)
|
||||
installation, apiErr := c.mgr.getInstalledIntegration(ctx, orgId, integrationId)
|
||||
if apiErr != nil {
|
||||
return false, apiErr
|
||||
}
|
||||
@@ -76,9 +75,9 @@ func (c *Controller) IsIntegrationInstalled(
|
||||
}
|
||||
|
||||
func (c *Controller) GetIntegrationConnectionTests(
|
||||
ctx context.Context, integrationId string,
|
||||
ctx context.Context, orgId string, integrationId string,
|
||||
) (*IntegrationConnectionTests, *model.ApiError) {
|
||||
return c.mgr.GetIntegrationConnectionTests(ctx, integrationId)
|
||||
return c.mgr.GetIntegrationConnectionTests(ctx, orgId, integrationId)
|
||||
}
|
||||
|
||||
type InstallIntegrationRequest struct {
|
||||
@@ -87,10 +86,10 @@ type InstallIntegrationRequest struct {
|
||||
}
|
||||
|
||||
func (c *Controller) Install(
|
||||
ctx context.Context, req *InstallIntegrationRequest,
|
||||
ctx context.Context, orgId string, req *InstallIntegrationRequest,
|
||||
) (*IntegrationsListItem, *model.ApiError) {
|
||||
res, apiErr := c.mgr.InstallIntegration(
|
||||
ctx, req.IntegrationId, req.Config,
|
||||
ctx, orgId, req.IntegrationId, req.Config,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
@@ -104,7 +103,7 @@ type UninstallIntegrationRequest struct {
|
||||
}
|
||||
|
||||
func (c *Controller) Uninstall(
|
||||
ctx context.Context, req *UninstallIntegrationRequest,
|
||||
ctx context.Context, orgId string, req *UninstallIntegrationRequest,
|
||||
) *model.ApiError {
|
||||
if len(req.IntegrationId) < 1 {
|
||||
return model.BadRequest(fmt.Errorf(
|
||||
@@ -113,7 +112,7 @@ func (c *Controller) Uninstall(
|
||||
}
|
||||
|
||||
apiErr := c.mgr.UninstallIntegration(
|
||||
ctx, req.IntegrationId,
|
||||
ctx, orgId, req.IntegrationId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return apiErr
|
||||
@@ -123,19 +122,19 @@ func (c *Controller) Uninstall(
|
||||
}
|
||||
|
||||
func (c *Controller) GetPipelinesForInstalledIntegrations(
|
||||
ctx context.Context,
|
||||
ctx context.Context, orgId string,
|
||||
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||
return c.mgr.GetPipelinesForInstalledIntegrations(ctx)
|
||||
return c.mgr.GetPipelinesForInstalledIntegrations(ctx, orgId)
|
||||
}
|
||||
|
||||
func (c *Controller) GetDashboardsForInstalledIntegrations(
|
||||
ctx context.Context,
|
||||
ctx context.Context, orgId string,
|
||||
) ([]types.Dashboard, *model.ApiError) {
|
||||
return c.mgr.GetDashboardsForInstalledIntegrations(ctx)
|
||||
return c.mgr.GetDashboardsForInstalledIntegrations(ctx, orgId)
|
||||
}
|
||||
|
||||
func (c *Controller) GetInstalledIntegrationDashboardById(
|
||||
ctx context.Context, dashboardUuid string,
|
||||
ctx context.Context, orgId string, dashboardUuid string,
|
||||
) (*types.Dashboard, *model.ApiError) {
|
||||
return c.mgr.GetInstalledIntegrationDashboardById(ctx, dashboardUuid)
|
||||
return c.mgr.GetInstalledIntegrationDashboardById(ctx, orgId, dashboardUuid)
|
||||
}
|
||||
|
||||
@@ -5,15 +5,14 @@ import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type IntegrationAuthor struct {
|
||||
@@ -105,16 +104,9 @@ type IntegrationsListItem struct {
|
||||
IsInstalled bool `json:"is_installed"`
|
||||
}
|
||||
|
||||
type InstalledIntegration struct {
|
||||
IntegrationId string `json:"integration_id" db:"integration_id"`
|
||||
Config InstalledIntegrationConfig `json:"config_json" db:"config_json"`
|
||||
InstalledAt time.Time `json:"installed_at" db:"installed_at"`
|
||||
}
|
||||
type InstalledIntegrationConfig map[string]interface{}
|
||||
|
||||
type Integration struct {
|
||||
IntegrationDetails
|
||||
Installation *InstalledIntegration `json:"installation"`
|
||||
Installation *types.InstalledIntegration `json:"installation"`
|
||||
}
|
||||
|
||||
type Manager struct {
|
||||
@@ -122,8 +114,8 @@ type Manager struct {
|
||||
installedIntegrationsRepo InstalledIntegrationsRepo
|
||||
}
|
||||
|
||||
func NewManager(db *sqlx.DB) (*Manager, error) {
|
||||
iiRepo, err := NewInstalledIntegrationsSqliteRepo(db)
|
||||
func NewManager(store sqlstore.SQLStore) (*Manager, error) {
|
||||
iiRepo, err := NewInstalledIntegrationsSqliteRepo(store)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"could not init sqlite DB for installed integrations: %w", err,
|
||||
@@ -142,6 +134,7 @@ type IntegrationsFilter struct {
|
||||
|
||||
func (m *Manager) ListIntegrations(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
filter *IntegrationsFilter,
|
||||
// Expected to have pagination over time.
|
||||
) ([]IntegrationsListItem, *model.ApiError) {
|
||||
@@ -152,22 +145,22 @@ func (m *Manager) ListIntegrations(
|
||||
)
|
||||
}
|
||||
|
||||
installed, apiErr := m.installedIntegrationsRepo.list(ctx)
|
||||
installed, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(
|
||||
apiErr, "could not fetch installed integrations",
|
||||
)
|
||||
}
|
||||
installedIds := []string{}
|
||||
installedTypes := []string{}
|
||||
for _, ii := range installed {
|
||||
installedIds = append(installedIds, ii.IntegrationId)
|
||||
installedTypes = append(installedTypes, ii.Type)
|
||||
}
|
||||
|
||||
result := []IntegrationsListItem{}
|
||||
for _, ai := range available {
|
||||
result = append(result, IntegrationsListItem{
|
||||
IntegrationSummary: ai.IntegrationSummary,
|
||||
IsInstalled: slices.Contains(installedIds, ai.Id),
|
||||
IsInstalled: slices.Contains(installedTypes, ai.Id),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -188,6 +181,7 @@ func (m *Manager) ListIntegrations(
|
||||
|
||||
func (m *Manager) GetIntegration(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationId string,
|
||||
) (*Integration, *model.ApiError) {
|
||||
integrationDetails, apiErr := m.getIntegrationDetails(
|
||||
@@ -198,7 +192,7 @@ func (m *Manager) GetIntegration(
|
||||
}
|
||||
|
||||
installation, apiErr := m.getInstalledIntegration(
|
||||
ctx, integrationId,
|
||||
ctx, orgId, integrationId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
@@ -212,6 +206,7 @@ func (m *Manager) GetIntegration(
|
||||
|
||||
func (m *Manager) GetIntegrationConnectionTests(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationId string,
|
||||
) (*IntegrationConnectionTests, *model.ApiError) {
|
||||
integrationDetails, apiErr := m.getIntegrationDetails(
|
||||
@@ -225,8 +220,9 @@ func (m *Manager) GetIntegrationConnectionTests(
|
||||
|
||||
func (m *Manager) InstallIntegration(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationId string,
|
||||
config InstalledIntegrationConfig,
|
||||
config types.InstalledIntegrationConfig,
|
||||
) (*IntegrationsListItem, *model.ApiError) {
|
||||
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
|
||||
if apiErr != nil {
|
||||
@@ -234,7 +230,7 @@ func (m *Manager) InstallIntegration(
|
||||
}
|
||||
|
||||
_, apiErr = m.installedIntegrationsRepo.upsert(
|
||||
ctx, integrationId, config,
|
||||
ctx, orgId, integrationId, config,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(
|
||||
@@ -250,15 +246,17 @@ func (m *Manager) InstallIntegration(
|
||||
|
||||
func (m *Manager) UninstallIntegration(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationId string,
|
||||
) *model.ApiError {
|
||||
return m.installedIntegrationsRepo.delete(ctx, integrationId)
|
||||
return m.installedIntegrationsRepo.delete(ctx, orgId, integrationId)
|
||||
}
|
||||
|
||||
func (m *Manager) GetPipelinesForInstalledIntegrations(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
|
||||
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
@@ -308,6 +306,7 @@ func (m *Manager) parseDashboardUuid(dashboardUuid string) (
|
||||
|
||||
func (m *Manager) GetInstalledIntegrationDashboardById(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
dashboardUuid string,
|
||||
) (*types.Dashboard, *model.ApiError) {
|
||||
integrationId, dashboardId, apiErr := m.parseDashboardUuid(dashboardUuid)
|
||||
@@ -315,7 +314,7 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
integration, apiErr := m.GetIntegration(ctx, integrationId)
|
||||
integration, apiErr := m.GetIntegration(ctx, orgId, integrationId)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
@@ -355,8 +354,9 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
|
||||
|
||||
func (m *Manager) GetDashboardsForInstalledIntegrations(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
) ([]types.Dashboard, *model.ApiError) {
|
||||
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
|
||||
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
@@ -421,10 +421,11 @@ func (m *Manager) getIntegrationDetails(
|
||||
|
||||
func (m *Manager) getInstalledIntegration(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationId string,
|
||||
) (*InstalledIntegration, *model.ApiError) {
|
||||
) (*types.InstalledIntegration, *model.ApiError) {
|
||||
iis, apiErr := m.installedIntegrationsRepo.get(
|
||||
ctx, []string{integrationId},
|
||||
ctx, orgId, []string{integrationId},
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, fmt.Sprintf(
|
||||
@@ -441,32 +442,33 @@ func (m *Manager) getInstalledIntegration(
|
||||
|
||||
func (m *Manager) getInstalledIntegrations(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
) (
|
||||
map[string]Integration, *model.ApiError,
|
||||
) {
|
||||
installations, apiErr := m.installedIntegrationsRepo.list(ctx)
|
||||
installations, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
installedIds := utils.MapSlice(installations, func(i InstalledIntegration) string {
|
||||
return i.IntegrationId
|
||||
installedTypes := utils.MapSlice(installations, func(i types.InstalledIntegration) string {
|
||||
return i.Type
|
||||
})
|
||||
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedIds)
|
||||
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedTypes)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
result := map[string]Integration{}
|
||||
for _, ii := range installations {
|
||||
iDetails, exists := integrationDetails[ii.IntegrationId]
|
||||
iDetails, exists := integrationDetails[ii.Type]
|
||||
if !exists {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't find integration details for %s", ii.IntegrationId,
|
||||
"couldn't find integration details for %s", ii.Type,
|
||||
))
|
||||
}
|
||||
|
||||
result[ii.IntegrationId] = Integration{
|
||||
result[ii.Type] = Integration{
|
||||
Installation: &ii,
|
||||
IntegrationDetails: iDetails,
|
||||
}
|
||||
|
||||
@@ -14,18 +14,23 @@ func TestIntegrationLifecycle(t *testing.T) {
|
||||
mgr := NewTestIntegrationsManager(t)
|
||||
ctx := context.Background()
|
||||
|
||||
user, apiErr := createTestUser()
|
||||
if apiErr != nil {
|
||||
t.Fatalf("could not create test user: %v", apiErr)
|
||||
}
|
||||
|
||||
ii := true
|
||||
installedIntegrationsFilter := &IntegrationsFilter{
|
||||
IsInstalled: &ii,
|
||||
}
|
||||
|
||||
installedIntegrations, apiErr := mgr.ListIntegrations(
|
||||
ctx, installedIntegrationsFilter,
|
||||
ctx, user.OrgID, installedIntegrationsFilter,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.Equal([]IntegrationsListItem{}, installedIntegrations)
|
||||
|
||||
availableIntegrations, apiErr := mgr.ListIntegrations(ctx, nil)
|
||||
availableIntegrations, apiErr := mgr.ListIntegrations(ctx, user.OrgID, nil)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(2, len(availableIntegrations))
|
||||
require.False(availableIntegrations[0].IsInstalled)
|
||||
@@ -33,44 +38,44 @@ func TestIntegrationLifecycle(t *testing.T) {
|
||||
|
||||
testIntegrationConfig := map[string]interface{}{}
|
||||
installed, apiErr := mgr.InstallIntegration(
|
||||
ctx, availableIntegrations[1].Id, testIntegrationConfig,
|
||||
ctx, user.OrgID, availableIntegrations[1].Id, testIntegrationConfig,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(installed.Id, availableIntegrations[1].Id)
|
||||
|
||||
integration, apiErr := mgr.GetIntegration(ctx, availableIntegrations[1].Id)
|
||||
integration, apiErr := mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(integration.Id, availableIntegrations[1].Id)
|
||||
require.NotNil(integration.Installation)
|
||||
|
||||
installedIntegrations, apiErr = mgr.ListIntegrations(
|
||||
ctx, installedIntegrationsFilter,
|
||||
ctx, user.OrgID, installedIntegrationsFilter,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(1, len(installedIntegrations))
|
||||
require.Equal(availableIntegrations[1].Id, installedIntegrations[0].Id)
|
||||
|
||||
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, nil)
|
||||
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(2, len(availableIntegrations))
|
||||
require.False(availableIntegrations[0].IsInstalled)
|
||||
require.True(availableIntegrations[1].IsInstalled)
|
||||
|
||||
apiErr = mgr.UninstallIntegration(ctx, installed.Id)
|
||||
apiErr = mgr.UninstallIntegration(ctx, user.OrgID, installed.Id)
|
||||
require.Nil(apiErr)
|
||||
|
||||
integration, apiErr = mgr.GetIntegration(ctx, availableIntegrations[1].Id)
|
||||
integration, apiErr = mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(integration.Id, availableIntegrations[1].Id)
|
||||
require.Nil(integration.Installation)
|
||||
|
||||
installedIntegrations, apiErr = mgr.ListIntegrations(
|
||||
ctx, installedIntegrationsFilter,
|
||||
ctx, user.OrgID, installedIntegrationsFilter,
|
||||
)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(0, len(installedIntegrations))
|
||||
|
||||
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, nil)
|
||||
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil)
|
||||
require.Nil(apiErr)
|
||||
require.Equal(2, len(availableIntegrations))
|
||||
require.False(availableIntegrations[0].IsInstalled)
|
||||
|
||||
@@ -2,51 +2,33 @@ package integrations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
)
|
||||
|
||||
// For serializing from db
|
||||
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
|
||||
if data, ok := src.([]byte); ok {
|
||||
return json.Unmarshal(data, &c)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// For serializing to db
|
||||
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
|
||||
filterSetJson, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not serialize integration config to JSON")
|
||||
}
|
||||
return filterSetJson, nil
|
||||
}
|
||||
|
||||
type InstalledIntegrationsRepo interface {
|
||||
list(context.Context) ([]InstalledIntegration, *model.ApiError)
|
||||
list(ctx context.Context, orgId string) ([]types.InstalledIntegration, *model.ApiError)
|
||||
|
||||
get(
|
||||
ctx context.Context, integrationIds []string,
|
||||
) (map[string]InstalledIntegration, *model.ApiError)
|
||||
ctx context.Context, orgId string, integrationTypes []string,
|
||||
) (map[string]types.InstalledIntegration, *model.ApiError)
|
||||
|
||||
upsert(
|
||||
ctx context.Context,
|
||||
integrationId string,
|
||||
config InstalledIntegrationConfig,
|
||||
) (*InstalledIntegration, *model.ApiError)
|
||||
orgId string,
|
||||
integrationType string,
|
||||
config types.InstalledIntegrationConfig,
|
||||
) (*types.InstalledIntegration, *model.ApiError)
|
||||
|
||||
delete(ctx context.Context, integrationId string) *model.ApiError
|
||||
delete(ctx context.Context, orgId string, integrationType string) *model.ApiError
|
||||
}
|
||||
|
||||
type AvailableIntegrationsRepo interface {
|
||||
list(context.Context) ([]IntegrationDetails, *model.ApiError)
|
||||
|
||||
get(
|
||||
ctx context.Context, integrationIds []string,
|
||||
ctx context.Context, integrationTypes []string,
|
||||
) (map[string]IntegrationDetails, *model.ApiError)
|
||||
|
||||
// AvailableIntegrationsRepo implementations are expected to cache
|
||||
|
||||
@@ -3,39 +3,37 @@ package integrations
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type InstalledIntegrationsSqliteRepo struct {
|
||||
db *sqlx.DB
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewInstalledIntegrationsSqliteRepo(db *sqlx.DB) (
|
||||
func NewInstalledIntegrationsSqliteRepo(store sqlstore.SQLStore) (
|
||||
*InstalledIntegrationsSqliteRepo, error,
|
||||
) {
|
||||
return &InstalledIntegrationsSqliteRepo{
|
||||
db: db,
|
||||
store: store,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *InstalledIntegrationsSqliteRepo) list(
|
||||
ctx context.Context,
|
||||
) ([]InstalledIntegration, *model.ApiError) {
|
||||
integrations := []InstalledIntegration{}
|
||||
orgId string,
|
||||
) ([]types.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []types.InstalledIntegration{}
|
||||
|
||||
err := r.db.SelectContext(
|
||||
ctx, &integrations, `
|
||||
select
|
||||
integration_id,
|
||||
config_json,
|
||||
installed_at
|
||||
from integrations_installed
|
||||
order by installed_at
|
||||
`,
|
||||
)
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&integrations).
|
||||
Where("org_id = ?", orgId).
|
||||
Order("installed_at").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not query installed integrations: %w", err,
|
||||
@@ -45,38 +43,28 @@ func (r *InstalledIntegrationsSqliteRepo) list(
|
||||
}
|
||||
|
||||
func (r *InstalledIntegrationsSqliteRepo) get(
|
||||
ctx context.Context, integrationIds []string,
|
||||
) (map[string]InstalledIntegration, *model.ApiError) {
|
||||
integrations := []InstalledIntegration{}
|
||||
ctx context.Context, orgId string, integrationTypes []string,
|
||||
) (map[string]types.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []types.InstalledIntegration{}
|
||||
|
||||
idPlaceholders := []string{}
|
||||
idValues := []interface{}{}
|
||||
for _, id := range integrationIds {
|
||||
idPlaceholders = append(idPlaceholders, "?")
|
||||
idValues = append(idValues, id)
|
||||
typeValues := []interface{}{}
|
||||
for _, integrationType := range integrationTypes {
|
||||
typeValues = append(typeValues, integrationType)
|
||||
}
|
||||
|
||||
err := r.db.SelectContext(
|
||||
ctx, &integrations, fmt.Sprintf(`
|
||||
select
|
||||
integration_id,
|
||||
config_json,
|
||||
installed_at
|
||||
from integrations_installed
|
||||
where integration_id in (%s)`,
|
||||
strings.Join(idPlaceholders, ", "),
|
||||
),
|
||||
idValues...,
|
||||
)
|
||||
err := r.store.BunDB().NewSelect().Model(&integrations).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("type IN (?)", bun.In(typeValues)).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not query installed integrations: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
result := map[string]InstalledIntegration{}
|
||||
result := map[string]types.InstalledIntegration{}
|
||||
for _, ii := range integrations {
|
||||
result[ii.IntegrationId] = ii
|
||||
result[ii.Type] = ii
|
||||
}
|
||||
|
||||
return result, nil
|
||||
@@ -84,55 +72,57 @@ func (r *InstalledIntegrationsSqliteRepo) get(
|
||||
|
||||
func (r *InstalledIntegrationsSqliteRepo) upsert(
|
||||
ctx context.Context,
|
||||
integrationId string,
|
||||
config InstalledIntegrationConfig,
|
||||
) (*InstalledIntegration, *model.ApiError) {
|
||||
serializedConfig, err := config.Value()
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(fmt.Errorf(
|
||||
"could not serialize integration config: %w", err,
|
||||
))
|
||||
orgId string,
|
||||
integrationType string,
|
||||
config types.InstalledIntegrationConfig,
|
||||
) (*types.InstalledIntegration, *model.ApiError) {
|
||||
|
||||
integration := types.InstalledIntegration{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
OrgID: orgId,
|
||||
Type: integrationType,
|
||||
Config: config,
|
||||
}
|
||||
|
||||
_, dbErr := r.db.ExecContext(
|
||||
ctx, `
|
||||
INSERT INTO integrations_installed (
|
||||
integration_id,
|
||||
config_json
|
||||
) values ($1, $2)
|
||||
on conflict(integration_id) do update
|
||||
set config_json=excluded.config_json
|
||||
`, integrationId, serializedConfig,
|
||||
)
|
||||
_, dbErr := r.store.BunDB().NewInsert().
|
||||
Model(&integration).
|
||||
On("conflict (type, org_id) DO UPDATE").
|
||||
Set("config = EXCLUDED.config").
|
||||
Exec(ctx)
|
||||
|
||||
if dbErr != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not insert record for integration installation: %w", dbErr,
|
||||
))
|
||||
}
|
||||
|
||||
res, apiErr := r.get(ctx, []string{integrationId})
|
||||
res, apiErr := r.get(ctx, orgId, []string{integrationType})
|
||||
if apiErr != nil || len(res) < 1 {
|
||||
return nil, model.WrapApiError(
|
||||
apiErr, "could not fetch installed integration",
|
||||
)
|
||||
}
|
||||
|
||||
installed := res[integrationId]
|
||||
installed := res[integrationType]
|
||||
|
||||
return &installed, nil
|
||||
}
|
||||
|
||||
func (r *InstalledIntegrationsSqliteRepo) delete(
|
||||
ctx context.Context, integrationId string,
|
||||
ctx context.Context, orgId string, integrationType string,
|
||||
) *model.ApiError {
|
||||
_, dbErr := r.db.ExecContext(ctx, `
|
||||
DELETE FROM integrations_installed where integration_id = ?
|
||||
`, integrationId)
|
||||
_, dbErr := r.store.BunDB().NewDelete().
|
||||
Model(&types.InstalledIntegration{}).
|
||||
Where("type = ?", integrationType).
|
||||
Where("org_id = ?", orgId).
|
||||
Exec(ctx)
|
||||
|
||||
if dbErr != nil {
|
||||
return model.InternalError(fmt.Errorf(
|
||||
"could not delete installed integration record for %s: %w",
|
||||
integrationId, dbErr,
|
||||
integrationType, dbErr,
|
||||
))
|
||||
}
|
||||
|
||||
|
||||
@@ -5,18 +5,22 @@ import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
func NewTestIntegrationsManager(t *testing.T) *Manager {
|
||||
testDB := utils.NewQueryServiceDBForTests(t)
|
||||
|
||||
installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB.SQLxDB())
|
||||
installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB)
|
||||
if err != nil {
|
||||
t.Fatalf("could not init sqlite DB for installed integrations: %v", err)
|
||||
}
|
||||
@@ -27,6 +31,38 @@ func NewTestIntegrationsManager(t *testing.T) *Manager {
|
||||
}
|
||||
}
|
||||
|
||||
func createTestUser() (*types.User, *model.ApiError) {
|
||||
// Create a test user for auth
|
||||
ctx := context.Background()
|
||||
org, apiErr := dao.DB().CreateOrg(ctx, &types.Organization{
|
||||
Name: "test",
|
||||
})
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
auth.InitAuthCache(ctx)
|
||||
|
||||
userId := uuid.NewString()
|
||||
return dao.DB().CreateUser(
|
||||
ctx,
|
||||
&types.User{
|
||||
ID: userId,
|
||||
Name: "test",
|
||||
Email: userId[:8] + "test@test.com",
|
||||
Password: "test",
|
||||
OrgID: org.ID,
|
||||
GroupID: group.ID,
|
||||
},
|
||||
true,
|
||||
)
|
||||
}
|
||||
|
||||
type TestAvailableIntegrationsRepo struct{}
|
||||
|
||||
func (t *TestAvailableIntegrationsRepo) list(
|
||||
|
||||
@@ -25,12 +25,12 @@ import (
|
||||
type LogParsingPipelineController struct {
|
||||
Repo
|
||||
|
||||
GetIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError)
|
||||
GetIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError)
|
||||
}
|
||||
|
||||
func NewLogParsingPipelinesController(
|
||||
sqlStore sqlstore.SQLStore,
|
||||
getIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError),
|
||||
getIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError),
|
||||
) (*LogParsingPipelineController, error) {
|
||||
repo := NewRepo(sqlStore)
|
||||
return &LogParsingPipelineController{
|
||||
@@ -164,7 +164,7 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
||||
result = savedPipelines
|
||||
}
|
||||
|
||||
integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx)
|
||||
integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx, defaultOrgID)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(
|
||||
apiErr, "could not get pipelines for installed integrations",
|
||||
|
||||
@@ -131,9 +131,11 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin
|
||||
)
|
||||
}
|
||||
operator.If = fmt.Sprintf(
|
||||
`%s && %s matches "^\\s*{.*}\\s*$"`, parseFromNotNilCheck, operator.ParseFrom,
|
||||
`%s && (
|
||||
(typeOf(%s) == "string" && %s matches "^\\s*{.*}\\s*$" ) ||
|
||||
typeOf(%s) == "map[string]any"
|
||||
)`, parseFromNotNilCheck, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom,
|
||||
)
|
||||
|
||||
} else if operator.Type == "add" {
|
||||
if strings.HasPrefix(operator.Value, "EXPR(") && strings.HasSuffix(operator.Value, ")") {
|
||||
expression := strings.TrimSuffix(strings.TrimPrefix(operator.Value, "EXPR("), ")")
|
||||
|
||||
@@ -646,7 +646,7 @@ func TestMembershipOpInProcessorFieldExpressions(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
testLogs := []model.SignozLog{
|
||||
makeTestSignozLog("test log", map[string]interface{}{
|
||||
makeTestSignozLog("test log", map[string]any{
|
||||
"http.method": "GET",
|
||||
"order.products": `{"ids": ["pid0", "pid1"]}`,
|
||||
}),
|
||||
|
||||
@@ -719,6 +719,21 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
|
||||
aggregateOperator := v3.AggregateOperator(r.URL.Query().Get("aggregateOperator"))
|
||||
aggregateAttribute := r.URL.Query().Get("aggregateAttribute")
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
tagType := v3.TagType(r.URL.Query().Get("tagType"))
|
||||
|
||||
// empty string is a valid tagType
|
||||
// i.e retrieve all attributes
|
||||
if tagType != "" {
|
||||
// what is happening here?
|
||||
// if tagType is undefined(uh oh javascript) or any invalid value, set it to empty string
|
||||
// instead of failing the request. Ideally, we should fail the request.
|
||||
// but we are not doing that to maintain backward compatibility.
|
||||
if err := tagType.Validate(); err != nil {
|
||||
// if the tagType is invalid, set it to empty string
|
||||
tagType = ""
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
limit = 50
|
||||
}
|
||||
@@ -739,6 +754,7 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
|
||||
AggregateAttribute: aggregateAttribute,
|
||||
Limit: limit,
|
||||
SearchText: r.URL.Query().Get("searchText"),
|
||||
TagType: tagType,
|
||||
}
|
||||
return &req, nil
|
||||
}
|
||||
@@ -861,7 +877,7 @@ func chTransformQuery(query string, variables map[string]interface{}) {
|
||||
transformer := chVariables.NewQueryTransformer(query, varsForTransform)
|
||||
transformedQuery, err := transformer.Transform()
|
||||
if err != nil {
|
||||
zap.L().Warn("failed to transform clickhouse query", zap.Error(err))
|
||||
zap.L().Warn("failed to transform clickhouse query", zap.String("query", query), zap.Error(err))
|
||||
}
|
||||
zap.L().Info("transformed clickhouse query", zap.String("transformedQuery", transformedQuery), zap.String("originalQuery", query))
|
||||
}
|
||||
|
||||
@@ -112,6 +112,7 @@ func TestParseFilterAttributeKeyRequest(t *testing.T) {
|
||||
expectedSearchText string
|
||||
expectErr bool
|
||||
errMsg string
|
||||
expectedTagType v3.TagType
|
||||
}{
|
||||
{
|
||||
desc: "valid operator and data source",
|
||||
@@ -168,6 +169,38 @@ func TestParseFilterAttributeKeyRequest(t *testing.T) {
|
||||
expectedDataSource: v3.DataSourceTraces,
|
||||
expectedLimit: 50,
|
||||
},
|
||||
{
|
||||
desc: "invalid tag type",
|
||||
queryString: "aggregateOperator=avg&dataSource=traces&tagType=invalid",
|
||||
expectedOperator: v3.AggregateOperatorAvg,
|
||||
expectedDataSource: v3.DataSourceTraces,
|
||||
expectedTagType: "",
|
||||
expectedLimit: 50,
|
||||
},
|
||||
{
|
||||
desc: "valid tag type",
|
||||
queryString: "aggregateOperator=avg&dataSource=traces&tagType=resource",
|
||||
expectedOperator: v3.AggregateOperatorAvg,
|
||||
expectedDataSource: v3.DataSourceTraces,
|
||||
expectedTagType: v3.TagTypeResource,
|
||||
expectedLimit: 50,
|
||||
},
|
||||
{
|
||||
desc: "valid tag type",
|
||||
queryString: "aggregateOperator=avg&dataSource=traces&tagType=scope",
|
||||
expectedOperator: v3.AggregateOperatorAvg,
|
||||
expectedDataSource: v3.DataSourceTraces,
|
||||
expectedTagType: v3.TagTypeInstrumentationScope,
|
||||
expectedLimit: 50,
|
||||
},
|
||||
{
|
||||
desc: "valid tag type",
|
||||
queryString: "aggregateOperator=avg&dataSource=traces&tagType=tag",
|
||||
expectedOperator: v3.AggregateOperatorAvg,
|
||||
expectedDataSource: v3.DataSourceTraces,
|
||||
expectedTagType: v3.TagTypeTag,
|
||||
expectedLimit: 50,
|
||||
},
|
||||
}
|
||||
|
||||
for _, reqCase := range reqCases {
|
||||
|
||||
@@ -42,8 +42,7 @@ type querier struct {
|
||||
|
||||
fluxInterval time.Duration
|
||||
|
||||
builder *queryBuilder.QueryBuilder
|
||||
featureLookUp interfaces.FeatureLookup
|
||||
builder *queryBuilder.QueryBuilder
|
||||
|
||||
// used for testing
|
||||
// TODO(srikanthccv): remove this once we have a proper mock
|
||||
@@ -59,11 +58,10 @@ type querier struct {
|
||||
}
|
||||
|
||||
type QuerierOptions struct {
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
KeyGenerator cache.KeyGenerator
|
||||
FluxInterval time.Duration
|
||||
FeatureLookup interfaces.FeatureLookup
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
KeyGenerator cache.KeyGenerator
|
||||
FluxInterval time.Duration
|
||||
|
||||
// used for testing
|
||||
TestingMode bool
|
||||
@@ -96,8 +94,7 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
BuildTraceQuery: tracesQueryBuilder,
|
||||
BuildLogQuery: logsQueryBuilder,
|
||||
BuildMetricQuery: metricsV3.PrepareMetricQuery,
|
||||
}, opts.FeatureLookup),
|
||||
featureLookUp: opts.FeatureLookup,
|
||||
}),
|
||||
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
|
||||
@@ -17,7 +17,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
@@ -1383,7 +1382,6 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
||||
queryBuilder.QueryBuilderOptions{
|
||||
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
||||
},
|
||||
featureManager.StartManager(),
|
||||
),
|
||||
}
|
||||
// Update query parameters
|
||||
|
||||
@@ -42,8 +42,7 @@ type querier struct {
|
||||
|
||||
fluxInterval time.Duration
|
||||
|
||||
builder *queryBuilder.QueryBuilder
|
||||
featureLookUp interfaces.FeatureLookup
|
||||
builder *queryBuilder.QueryBuilder
|
||||
|
||||
// used for testing
|
||||
// TODO(srikanthccv): remove this once we have a proper mock
|
||||
@@ -58,11 +57,10 @@ type querier struct {
|
||||
}
|
||||
|
||||
type QuerierOptions struct {
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
KeyGenerator cache.KeyGenerator
|
||||
FluxInterval time.Duration
|
||||
FeatureLookup interfaces.FeatureLookup
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
KeyGenerator cache.KeyGenerator
|
||||
FluxInterval time.Duration
|
||||
|
||||
// used for testing
|
||||
TestingMode bool
|
||||
@@ -96,8 +94,7 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
BuildTraceQuery: tracesQueryBuilder,
|
||||
BuildLogQuery: logsQueryBuilder,
|
||||
BuildMetricQuery: metricsV4.PrepareMetricQuery,
|
||||
}, opts.FeatureLookup),
|
||||
featureLookUp: opts.FeatureLookup,
|
||||
}),
|
||||
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
|
||||
@@ -17,7 +17,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
@@ -1437,7 +1436,6 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
||||
queryBuilder.QueryBuilderOptions{
|
||||
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
||||
},
|
||||
featureManager.StartManager(),
|
||||
),
|
||||
}
|
||||
// Update query parameters
|
||||
|
||||
@@ -56,10 +56,9 @@ type QueryBuilderOptions struct {
|
||||
BuildMetricQuery prepareMetricQueryFunc
|
||||
}
|
||||
|
||||
func NewQueryBuilder(options QueryBuilderOptions, featureFlags interfaces.FeatureLookup) *QueryBuilder {
|
||||
func NewQueryBuilder(options QueryBuilderOptions) *QueryBuilder {
|
||||
return &QueryBuilder{
|
||||
options: options,
|
||||
featureFlags: featureFlags,
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
logsV4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||
metricsv3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -51,8 +50,7 @@ func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) {
|
||||
qbOptions := QueryBuilderOptions{
|
||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
qb := NewQueryBuilder(qbOptions, fm)
|
||||
qb := NewQueryBuilder(qbOptions)
|
||||
|
||||
queries, err := qb.PrepareQueries(q)
|
||||
|
||||
@@ -93,8 +91,7 @@ func TestBuildQueryWithIncorrectQueryRef(t *testing.T) {
|
||||
qbOptions := QueryBuilderOptions{
|
||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
qb := NewQueryBuilder(qbOptions, fm)
|
||||
qb := NewQueryBuilder(qbOptions)
|
||||
|
||||
_, err := qb.PrepareQueries(q)
|
||||
|
||||
@@ -168,8 +165,7 @@ func TestBuildQueryWithThreeOrMoreQueriesRefAndFormula(t *testing.T) {
|
||||
qbOptions := QueryBuilderOptions{
|
||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
qb := NewQueryBuilder(qbOptions, fm)
|
||||
qb := NewQueryBuilder(qbOptions)
|
||||
|
||||
queries, err := qb.PrepareQueries(q)
|
||||
|
||||
@@ -338,8 +334,7 @@ func TestBuildQueryWithThreeOrMoreQueriesRefAndFormula(t *testing.T) {
|
||||
qbOptions := QueryBuilderOptions{
|
||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
qb := NewQueryBuilder(qbOptions, fm)
|
||||
qb := NewQueryBuilder(qbOptions)
|
||||
|
||||
queries, err := qb.PrepareQueries(q)
|
||||
require.Contains(t, queries["F1"], "SELECT A.`os.type` as `os.type`, A.`ts` as `ts`, A.value + B.value as value FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000 AND JSONExtractString(labels, 'os.type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735036080000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) as A INNER JOIN (SELECT * FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.network.io'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system.network.io'] AND unix_milli >= 1735036020000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) HAVING value > 4) as B ON A.`os.type` = B.`os.type` AND A.`ts` = B.`ts`")
|
||||
@@ -498,8 +493,7 @@ func TestDeltaQueryBuilder(t *testing.T) {
|
||||
qbOptions := QueryBuilderOptions{
|
||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
qb := NewQueryBuilder(qbOptions, fm)
|
||||
qb := NewQueryBuilder(qbOptions)
|
||||
queries, err := qb.PrepareQueries(c.query)
|
||||
|
||||
require.NoError(t, err)
|
||||
@@ -703,8 +697,7 @@ func TestLogsQueryWithFormula(t *testing.T) {
|
||||
qbOptions := QueryBuilderOptions{
|
||||
BuildLogQuery: logsV3.PrepareLogsQuery,
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
qb := NewQueryBuilder(qbOptions, fm)
|
||||
qb := NewQueryBuilder(qbOptions)
|
||||
|
||||
for _, test := range testLogsWithFormula {
|
||||
t.Run(test.Name, func(t *testing.T) {
|
||||
@@ -914,8 +907,7 @@ func TestLogsQueryWithFormulaV2(t *testing.T) {
|
||||
qbOptions := QueryBuilderOptions{
|
||||
BuildLogQuery: logsV4.PrepareLogsQuery,
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
qb := NewQueryBuilder(qbOptions, fm)
|
||||
qb := NewQueryBuilder(qbOptions)
|
||||
|
||||
for _, test := range testLogsWithFormulaV2 {
|
||||
t.Run(test.Name, func(t *testing.T) {
|
||||
|
||||
@@ -151,7 +151,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
fm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
@@ -483,7 +482,6 @@ func makeRulesManager(
|
||||
ch interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm interfaces.FeatureLookup,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
@@ -499,7 +497,6 @@ func makeRulesManager(
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
FeatureFlags: fm,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: constants.GetEvalDelay(),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package model
|
||||
package smart
|
||||
|
||||
type SpanForTraceDetails struct {
|
||||
TimeUnixNano uint64 `json:"timestamp"`
|
||||
@@ -15,8 +15,3 @@ type SpanForTraceDetails struct {
|
||||
HasError bool `json:"hasError"`
|
||||
Children []*SpanForTraceDetails `json:"children"`
|
||||
}
|
||||
|
||||
type GetSpansSubQueryDBResponse struct {
|
||||
SpanID string `ch:"spanID"`
|
||||
TraceID string `ch:"traceID"`
|
||||
}
|
||||
@@ -1,17 +1,16 @@
|
||||
package db
|
||||
package smart
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// SmartTraceAlgorithm is an algorithm to find the target span and build a tree of spans around it with the given levelUp and levelDown parameters and the given spanLimit
|
||||
func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]basemodel.SearchSpansResult, error) {
|
||||
var spans []*model.SpanForTraceDetails
|
||||
var spans []*SpanForTraceDetails
|
||||
|
||||
// if targetSpanId is null or not present then randomly select a span as targetSpanId
|
||||
if (targetSpanId == "" || targetSpanId == "null") && len(payload) > 0 {
|
||||
@@ -24,7 +23,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" {
|
||||
parentID = spanItem.References[0].SpanId
|
||||
}
|
||||
span := &model.SpanForTraceDetails{
|
||||
span := &SpanForTraceDetails{
|
||||
TimeUnixNano: spanItem.TimeUnixNano,
|
||||
SpanID: spanItem.SpanID,
|
||||
TraceID: spanItem.TraceID,
|
||||
@@ -45,7 +44,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
targetSpan := &model.SpanForTraceDetails{}
|
||||
targetSpan := &SpanForTraceDetails{}
|
||||
|
||||
// Find the target span in the span trees
|
||||
for _, root := range roots {
|
||||
@@ -65,7 +64,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Build the final result
|
||||
parents := []*model.SpanForTraceDetails{}
|
||||
parents := []*SpanForTraceDetails{}
|
||||
|
||||
// Get the parent spans of the target span up to the given levelUp parameter and spanLimit
|
||||
preParent := targetSpan
|
||||
@@ -90,11 +89,11 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Get the child spans of the target span until the given levelDown and spanLimit
|
||||
preParents := []*model.SpanForTraceDetails{targetSpan}
|
||||
children := []*model.SpanForTraceDetails{}
|
||||
preParents := []*SpanForTraceDetails{targetSpan}
|
||||
children := []*SpanForTraceDetails{}
|
||||
|
||||
for i := 0; i < levelDown && len(preParents) != 0 && spanLimit > 0; i++ {
|
||||
parents := []*model.SpanForTraceDetails{}
|
||||
parents := []*SpanForTraceDetails{}
|
||||
for _, parent := range preParents {
|
||||
if spanLimit-len(parent.Children) <= 0 {
|
||||
children = append(children, parent.Children[:spanLimit]...)
|
||||
@@ -108,7 +107,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// Store the final list of spans in the resultSpanSet map to avoid duplicates
|
||||
resultSpansSet := make(map[*model.SpanForTraceDetails]struct{})
|
||||
resultSpansSet := make(map[*SpanForTraceDetails]struct{})
|
||||
resultSpansSet[targetSpan] = struct{}{}
|
||||
for _, parent := range parents {
|
||||
resultSpansSet[parent] = struct{}{}
|
||||
@@ -169,12 +168,12 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
}
|
||||
|
||||
// buildSpanTrees builds trees of spans from a list of spans.
|
||||
func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTraceDetails, error) {
|
||||
func buildSpanTrees(spansPtr *[]*SpanForTraceDetails) ([]*SpanForTraceDetails, error) {
|
||||
|
||||
// Build a map of spanID to span for fast lookup
|
||||
var roots []*model.SpanForTraceDetails
|
||||
var roots []*SpanForTraceDetails
|
||||
spans := *spansPtr
|
||||
mapOfSpans := make(map[string]*model.SpanForTraceDetails, len(spans))
|
||||
mapOfSpans := make(map[string]*SpanForTraceDetails, len(spans))
|
||||
|
||||
for _, span := range spans {
|
||||
if span.ParentID == "" {
|
||||
@@ -206,8 +205,8 @@ func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTra
|
||||
}
|
||||
|
||||
// breadthFirstSearch performs a breadth-first search on the span tree to find the target span.
|
||||
func breadthFirstSearch(spansPtr *model.SpanForTraceDetails, targetId string) (*model.SpanForTraceDetails, error) {
|
||||
queue := []*model.SpanForTraceDetails{spansPtr}
|
||||
func breadthFirstSearch(spansPtr *SpanForTraceDetails, targetId string) (*SpanForTraceDetails, error) {
|
||||
queue := []*SpanForTraceDetails{spansPtr}
|
||||
visited := make(map[string]bool)
|
||||
|
||||
for len(queue) > 0 {
|
||||
@@ -439,7 +439,7 @@ func RegisterFirstUser(ctx context.Context, req *RegisterRequest) (*types.User,
|
||||
}
|
||||
|
||||
user := &types.User{
|
||||
ID: uuid.NewString(),
|
||||
ID: uuid.New().String(),
|
||||
Name: req.Name,
|
||||
Email: req.Email,
|
||||
Password: hash,
|
||||
@@ -519,7 +519,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b
|
||||
}
|
||||
|
||||
user := &types.User{
|
||||
ID: uuid.NewString(),
|
||||
ID: uuid.New().String(),
|
||||
Name: req.Name,
|
||||
Email: req.Email,
|
||||
Password: hash,
|
||||
|
||||
@@ -3,6 +3,7 @@ package auth
|
||||
import (
|
||||
"context"
|
||||
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -51,7 +52,7 @@ func InitAuthCache(ctx context.Context) error {
|
||||
func GetUserFromReqContext(ctx context.Context) (*types.GettableUser, error) {
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return nil, errors.New("no claims found in context")
|
||||
return nil, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "no claims found in context")
|
||||
}
|
||||
|
||||
user := &types.GettableUser{
|
||||
|
||||
@@ -22,8 +22,6 @@ type ContextKey string
|
||||
|
||||
const ContextUserKey ContextKey = "user"
|
||||
|
||||
var ConfigSignozIo = "https://config.signoz.io/api/v1"
|
||||
|
||||
var DEFAULT_TELEMETRY_ANONYMOUS = false
|
||||
|
||||
func IsOSSTelemetryEnabled() bool {
|
||||
@@ -50,9 +48,6 @@ const TraceTTL = "traces"
|
||||
const MetricsTTL = "metrics"
|
||||
const LogsTTL = "logs"
|
||||
|
||||
const DurationSort = "DurationSort"
|
||||
const TimestampSort = "TimestampSort"
|
||||
|
||||
const SpanSearchScopeRoot = "isroot"
|
||||
const SpanSearchScopeEntryPoint = "isentrypoint"
|
||||
|
||||
@@ -62,16 +57,9 @@ var TELEMETRY_ACTIVE_USER_DURATION_MINUTES = GetOrDefaultEnvInt("TELEMETRY_ACTIV
|
||||
|
||||
var InviteEmailTemplate = GetOrDefaultEnv("INVITE_EMAIL_TEMPLATE", "/root/templates/invitation_email_template.html")
|
||||
|
||||
var OTLPTarget = GetOrDefaultEnv("OTEL_EXPORTER_OTLP_ENDPOINT", "")
|
||||
var LogExportBatchSize = GetOrDefaultEnv("OTEL_BLRP_MAX_EXPORT_BATCH_SIZE", "512")
|
||||
|
||||
// [Deprecated] SIGNOZ_LOCAL_DB_PATH is deprecated and scheduled for removal. Please use SIGNOZ_SQLSTORE_SQLITE_PATH instead.
|
||||
var RELATIONAL_DATASOURCE_PATH = GetOrDefaultEnv("SIGNOZ_LOCAL_DB_PATH", "/var/lib/signoz/signoz.db")
|
||||
|
||||
var DurationSortFeature = GetOrDefaultEnv("DURATION_SORT_FEATURE", "true")
|
||||
|
||||
var TimestampSortFeature = GetOrDefaultEnv("TIMESTAMP_SORT_FEATURE", "true")
|
||||
|
||||
var MetricsExplorerClickhouseThreads = GetOrDefaultEnvInt("METRICS_EXPLORER_CLICKHOUSE_THREADS", 8)
|
||||
var UpdatedMetricsMetadataCachePrefix = GetOrDefaultEnv("METRICS_UPDATED_METADATA_CACHE_KEY", "UPDATED_METRICS_METADATA")
|
||||
|
||||
@@ -80,44 +68,9 @@ func UseMetricsPreAggregation() bool {
|
||||
return GetOrDefaultEnv("USE_METRICS_PRE_AGGREGATION", "true") == "true"
|
||||
}
|
||||
|
||||
func EnableHostsInfraMonitoring() bool {
|
||||
return GetOrDefaultEnv("ENABLE_INFRA_METRICS", "true") == "true"
|
||||
}
|
||||
|
||||
var KafkaSpanEval = GetOrDefaultEnv("KAFKA_SPAN_EVAL", "false")
|
||||
|
||||
func IsDurationSortFeatureEnabled() bool {
|
||||
isDurationSortFeatureEnabledStr := DurationSortFeature
|
||||
isDurationSortFeatureEnabledBool, err := strconv.ParseBool(isDurationSortFeatureEnabledStr)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return isDurationSortFeatureEnabledBool
|
||||
}
|
||||
|
||||
func IsTimestampSortFeatureEnabled() bool {
|
||||
isTimestampSortFeatureEnabledStr := TimestampSortFeature
|
||||
isTimestampSortFeatureEnabledBool, err := strconv.ParseBool(isTimestampSortFeatureEnabledStr)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return isTimestampSortFeatureEnabledBool
|
||||
}
|
||||
|
||||
var DEFAULT_FEATURE_SET = model.FeatureSet{
|
||||
model.Feature{
|
||||
Name: DurationSort,
|
||||
Active: IsDurationSortFeatureEnabled(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
}, model.Feature{
|
||||
Name: TimestampSort,
|
||||
Active: IsTimestampSortFeatureEnabled(),
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
model.Feature{
|
||||
Name: model.UseSpanMetrics,
|
||||
Active: false,
|
||||
|
||||
@@ -32,13 +32,7 @@ func (fm *FeatureManager) CheckFeature(featureKey string) error {
|
||||
|
||||
// GetFeatureFlags returns current features
|
||||
func (fm *FeatureManager) GetFeatureFlags() (model.FeatureSet, error) {
|
||||
features := append(constants.DEFAULT_FEATURE_SET, model.Feature{
|
||||
Name: model.OSS,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
})
|
||||
features := constants.DEFAULT_FEATURE_SET
|
||||
return features, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -39,6 +39,7 @@ type Reader interface {
|
||||
GetNextPrevErrorIDs(ctx context.Context, params *model.GetErrorParams) (*model.NextPrevErrorIDs, *model.ApiError)
|
||||
|
||||
// Search Interfaces
|
||||
SearchTraces(ctx context.Context, params *model.SearchTracesParams) (*[]model.SearchSpansResult, error)
|
||||
GetWaterfallSpansForTraceWithMetadata(ctx context.Context, traceID string, req *model.GetWaterfallSpansForTraceWithMetadataParams) (*model.GetWaterfallSpansForTraceWithMetadataResponse, *model.ApiError)
|
||||
GetFlamegraphSpansForTrace(ctx context.Context, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, *model.ApiError)
|
||||
|
||||
|
||||
@@ -9,58 +9,11 @@ type Feature struct {
|
||||
Route string `db:"route" json:"route"`
|
||||
}
|
||||
|
||||
const CustomMetricsFunction = "CUSTOM_METRICS_FUNCTION"
|
||||
const DisableUpsell = "DISABLE_UPSELL"
|
||||
const OSS = "OSS"
|
||||
const QueryBuilderPanels = "QUERY_BUILDER_PANELS"
|
||||
const QueryBuilderAlerts = "QUERY_BUILDER_ALERTS"
|
||||
const UseSpanMetrics = "USE_SPAN_METRICS"
|
||||
const AlertChannelSlack = "ALERT_CHANNEL_SLACK"
|
||||
const AlertChannelWebhook = "ALERT_CHANNEL_WEBHOOK"
|
||||
const AlertChannelPagerduty = "ALERT_CHANNEL_PAGERDUTY"
|
||||
const AlertChannelMsTeams = "ALERT_CHANNEL_MSTEAMS"
|
||||
const AlertChannelOpsgenie = "ALERT_CHANNEL_OPSGENIE"
|
||||
const AlertChannelEmail = "ALERT_CHANNEL_EMAIL"
|
||||
const AnomalyDetection = "ANOMALY_DETECTION"
|
||||
const HostsInfraMonitoring = "HOSTS_INFRA_MONITORING"
|
||||
const TraceFunnels = "TRACE_FUNNELS"
|
||||
|
||||
var BasicPlan = FeatureSet{
|
||||
Feature{
|
||||
Name: OSS,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: DisableUpsell,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: CustomMetricsFunction,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: QueryBuilderPanels,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: QueryBuilderAlerts,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: UseSpanMetrics,
|
||||
Active: false,
|
||||
@@ -68,48 +21,6 @@ var BasicPlan = FeatureSet{
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: AlertChannelSlack,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: AlertChannelWebhook,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: AlertChannelPagerduty,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: AlertChannelOpsgenie,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: AlertChannelEmail,
|
||||
Active: true,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: AlertChannelMsTeams,
|
||||
Active: false,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
},
|
||||
Feature{
|
||||
Name: AnomalyDetection,
|
||||
Active: false,
|
||||
|
||||
@@ -248,6 +248,7 @@ func (q TagType) Validate() error {
|
||||
type FilterAttributeKeyRequest struct {
|
||||
DataSource DataSource `json:"dataSource"`
|
||||
AggregateOperator AggregateOperator `json:"aggregateOperator"`
|
||||
TagType TagType `json:"tagType"`
|
||||
AggregateAttribute string `json:"aggregateAttribute"`
|
||||
SearchText string `json:"searchText"`
|
||||
Limit int `json:"limit"`
|
||||
|
||||
@@ -36,7 +36,6 @@ type PrepareTaskOptions struct {
|
||||
Logger *zap.Logger
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
FF interfaces.FeatureLookup
|
||||
ManagerOpts *ManagerOptions
|
||||
NotifyFunc NotifyFunc
|
||||
SQLStore sqlstore.SQLStore
|
||||
@@ -50,7 +49,6 @@ type PrepareTestRuleOptions struct {
|
||||
Logger *zap.Logger
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
FF interfaces.FeatureLookup
|
||||
ManagerOpts *ManagerOptions
|
||||
NotifyFunc NotifyFunc
|
||||
SQLStore sqlstore.SQLStore
|
||||
@@ -89,7 +87,6 @@ type ManagerOptions struct {
|
||||
Logger *zap.Logger
|
||||
ResendDelay time.Duration
|
||||
DisableRules bool
|
||||
FeatureFlags interfaces.FeatureLookup
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
|
||||
@@ -114,9 +111,7 @@ type Manager struct {
|
||||
// datastore to store alert definitions
|
||||
ruleDB RuleDB
|
||||
|
||||
logger *zap.Logger
|
||||
|
||||
featureFlags interfaces.FeatureLookup
|
||||
logger *zap.Logger
|
||||
reader interfaces.Reader
|
||||
cache cache.Cache
|
||||
prepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
|
||||
@@ -156,7 +151,6 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
tr, err := NewThresholdRule(
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
@@ -214,7 +208,6 @@ func NewManager(o *ManagerOptions) (*Manager, error) {
|
||||
opts: o,
|
||||
block: make(chan struct{}),
|
||||
logger: o.Logger,
|
||||
featureFlags: o.FeatureFlags,
|
||||
reader: o.Reader,
|
||||
cache: o.Cache,
|
||||
prepareTaskFunc: o.PrepareTaskFunc,
|
||||
@@ -391,7 +384,6 @@ func (m *Manager) editTask(rule *PostableRule, taskName string) error {
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
FF: m.featureFlags,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
@@ -575,7 +567,6 @@ func (m *Manager) addTask(rule *PostableRule, taskName string) error {
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
FF: m.featureFlags,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
@@ -954,7 +945,6 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
FF: m.featureFlags,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareTestNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
|
||||
@@ -46,7 +46,6 @@ func defaultTestNotification(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
rule, err = NewThresholdRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
|
||||
@@ -58,7 +58,6 @@ type ThresholdRule struct {
|
||||
func NewThresholdRule(
|
||||
id string,
|
||||
p *PostableRule,
|
||||
featureFlags interfaces.FeatureLookup,
|
||||
reader interfaces.Reader,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
@@ -82,7 +81,6 @@ func NewThresholdRule(
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FeatureLookup: featureFlags,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
}
|
||||
@@ -91,7 +89,6 @@ func NewThresholdRule(
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FeatureLookup: featureFlags,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -796,13 +795,12 @@ func TestThresholdRuleShouldAlert(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := featureManager.StartManager()
|
||||
for idx, c := range cases {
|
||||
postableRule.RuleCondition.CompareOp = CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -889,9 +887,8 @@ func TestPrepareLinksToLogs(t *testing.T) {
|
||||
SelectedQuery: "A",
|
||||
},
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -931,9 +928,8 @@ func TestPrepareLinksToTraces(t *testing.T) {
|
||||
SelectedQuery: "A",
|
||||
},
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -1003,13 +999,12 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := featureManager.StartManager()
|
||||
for idx, c := range cases {
|
||||
postableRule.RuleCondition.CompareOp = CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -1060,9 +1055,8 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := featureManager.StartManager()
|
||||
for idx, c := range cases {
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, nil, true, true) // no eval delay
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true) // no eval delay
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -1109,9 +1103,8 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := featureManager.StartManager()
|
||||
for idx, c := range cases {
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -1158,7 +1151,6 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
cols := make([]cmock.ColumnType, 0)
|
||||
@@ -1252,7 +1244,7 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
readerCache, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{TTL: DefaultFrequency}})
|
||||
require.NoError(t, err)
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", true, true, time.Duration(time.Second), readerCache)
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, reader, true, true)
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader, true, true)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
v3.Delta: true,
|
||||
@@ -1309,7 +1301,6 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
AlertOnAbsent: true,
|
||||
},
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
cols := make([]cmock.ColumnType, 0)
|
||||
@@ -1350,7 +1341,7 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", true, true, time.Duration(time.Second), readerCache)
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, reader, true, true)
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader, true, true)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
v3.Delta: true,
|
||||
@@ -1411,7 +1402,6 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
metaCols := make([]cmock.ColumnType, 0)
|
||||
@@ -1455,7 +1445,7 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", true, true, time.Duration(time.Second), nil)
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, reader, true, true)
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader, true, true)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
v3.Delta: true,
|
||||
@@ -1521,7 +1511,6 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
fm := featureManager.StartManager()
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
attrMetaCols := make([]cmock.ColumnType, 0)
|
||||
@@ -1577,7 +1566,7 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", true, true, time.Duration(time.Second), nil)
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, fm, reader, true, true)
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader, true, true)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
v3.Delta: true,
|
||||
@@ -1653,7 +1642,7 @@ func TestThresholdRuleShiftBy(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, nil, true, true)
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true)
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
|
||||
)
|
||||
|
||||
// Should be able to generate a connection url from UI - initializing an integration account
|
||||
testAccountConfig := cloudintegrations.AccountConfig{
|
||||
testAccountConfig := types.AccountConfig{
|
||||
EnabledRegions: []string{"us-east-1", "us-east-2"},
|
||||
}
|
||||
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
|
||||
@@ -65,8 +65,8 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
|
||||
testAWSAccountId := "4563215233"
|
||||
agentCheckInResp := testbed.CheckInAsAgentWithQS(
|
||||
"aws", cloudintegrations.AgentCheckInRequest{
|
||||
AccountId: testAccountId,
|
||||
CloudAccountId: testAWSAccountId,
|
||||
ID: testAccountId,
|
||||
AccountID: testAWSAccountId,
|
||||
},
|
||||
)
|
||||
require.Equal(testAccountId, agentCheckInResp.AccountId)
|
||||
@@ -91,20 +91,20 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
|
||||
require.Equal(testAWSAccountId, accountsListResp2.Accounts[0].CloudAccountId)
|
||||
|
||||
// Should be able to update account config from UI
|
||||
testAccountConfig2 := cloudintegrations.AccountConfig{
|
||||
testAccountConfig2 := types.AccountConfig{
|
||||
EnabledRegions: []string{"us-east-2", "us-west-1"},
|
||||
}
|
||||
latestAccount := testbed.UpdateAccountConfigWithQS(
|
||||
"aws", testAccountId, testAccountConfig2,
|
||||
)
|
||||
require.Equal(testAccountId, latestAccount.Id)
|
||||
require.Equal(testAccountId, latestAccount.ID.StringValue())
|
||||
require.Equal(testAccountConfig2, *latestAccount.Config)
|
||||
|
||||
// The agent should now receive latest account config.
|
||||
agentCheckInResp1 := testbed.CheckInAsAgentWithQS(
|
||||
"aws", cloudintegrations.AgentCheckInRequest{
|
||||
AccountId: testAccountId,
|
||||
CloudAccountId: testAWSAccountId,
|
||||
ID: testAccountId,
|
||||
AccountID: testAWSAccountId,
|
||||
},
|
||||
)
|
||||
require.Equal(testAccountId, agentCheckInResp1.AccountId)
|
||||
@@ -114,14 +114,14 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
|
||||
// Should be able to disconnect/remove account from UI.
|
||||
tsBeforeDisconnect := time.Now()
|
||||
latestAccount = testbed.DisconnectAccountWithQS("aws", testAccountId)
|
||||
require.Equal(testAccountId, latestAccount.Id)
|
||||
require.Equal(testAccountId, latestAccount.ID.StringValue())
|
||||
require.LessOrEqual(tsBeforeDisconnect, *latestAccount.RemovedAt)
|
||||
|
||||
// The agent should receive the disconnected status in account config post disconnection
|
||||
agentCheckInResp2 := testbed.CheckInAsAgentWithQS(
|
||||
"aws", cloudintegrations.AgentCheckInRequest{
|
||||
AccountId: testAccountId,
|
||||
CloudAccountId: testAWSAccountId,
|
||||
ID: testAccountId,
|
||||
AccountID: testAWSAccountId,
|
||||
},
|
||||
)
|
||||
require.Equal(testAccountId, agentCheckInResp2.AccountId)
|
||||
@@ -157,13 +157,13 @@ func TestAWSIntegrationServices(t *testing.T) {
|
||||
testAWSAccountId := "389389489489"
|
||||
testbed.CheckInAsAgentWithQS(
|
||||
"aws", cloudintegrations.AgentCheckInRequest{
|
||||
AccountId: testAccountId,
|
||||
CloudAccountId: testAWSAccountId,
|
||||
ID: testAccountId,
|
||||
AccountID: testAWSAccountId,
|
||||
},
|
||||
)
|
||||
|
||||
testSvcConfig := cloudintegrations.CloudServiceConfig{
|
||||
Metrics: &cloudintegrations.CloudServiceMetricsConfig{
|
||||
testSvcConfig := types.CloudServiceConfig{
|
||||
Metrics: &types.CloudServiceMetricsConfig{
|
||||
Enabled: true,
|
||||
},
|
||||
}
|
||||
@@ -199,7 +199,7 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
||||
testbed := NewCloudIntegrationsTestBed(t, nil)
|
||||
|
||||
// configure a connected account
|
||||
testAccountConfig := cloudintegrations.AccountConfig{
|
||||
testAccountConfig := types.AccountConfig{
|
||||
EnabledRegions: []string{"us-east-1", "us-east-2"},
|
||||
}
|
||||
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
|
||||
@@ -218,8 +218,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
||||
testAWSAccountId := "389389489489"
|
||||
checkinResp := testbed.CheckInAsAgentWithQS(
|
||||
"aws", cloudintegrations.AgentCheckInRequest{
|
||||
AccountId: testAccountId,
|
||||
CloudAccountId: testAWSAccountId,
|
||||
ID: testAccountId,
|
||||
AccountID: testAWSAccountId,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -237,14 +237,14 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
||||
|
||||
// helper
|
||||
setServiceConfig := func(svcId string, metricsEnabled bool, logsEnabled bool) {
|
||||
testSvcConfig := cloudintegrations.CloudServiceConfig{}
|
||||
testSvcConfig := types.CloudServiceConfig{}
|
||||
if metricsEnabled {
|
||||
testSvcConfig.Metrics = &cloudintegrations.CloudServiceMetricsConfig{
|
||||
testSvcConfig.Metrics = &types.CloudServiceMetricsConfig{
|
||||
Enabled: metricsEnabled,
|
||||
}
|
||||
}
|
||||
if logsEnabled {
|
||||
testSvcConfig.Logs = &cloudintegrations.CloudServiceLogsConfig{
|
||||
testSvcConfig.Logs = &types.CloudServiceLogsConfig{
|
||||
Enabled: logsEnabled,
|
||||
}
|
||||
}
|
||||
@@ -262,8 +262,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
||||
|
||||
checkinResp = testbed.CheckInAsAgentWithQS(
|
||||
"aws", cloudintegrations.AgentCheckInRequest{
|
||||
AccountId: testAccountId,
|
||||
CloudAccountId: testAWSAccountId,
|
||||
ID: testAccountId,
|
||||
AccountID: testAWSAccountId,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -292,13 +292,13 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
||||
require.True(strings.HasPrefix(logGroupPrefixes[0], "/aws/rds"))
|
||||
|
||||
// change regions and update service configs and validate config changes for agent
|
||||
testAccountConfig2 := cloudintegrations.AccountConfig{
|
||||
testAccountConfig2 := types.AccountConfig{
|
||||
EnabledRegions: []string{"us-east-2", "us-west-1"},
|
||||
}
|
||||
latestAccount := testbed.UpdateAccountConfigWithQS(
|
||||
"aws", testAccountId, testAccountConfig2,
|
||||
)
|
||||
require.Equal(testAccountId, latestAccount.Id)
|
||||
require.Equal(testAccountId, latestAccount.ID.StringValue())
|
||||
require.Equal(testAccountConfig2, *latestAccount.Config)
|
||||
|
||||
// disable metrics for one and logs for the other.
|
||||
@@ -308,8 +308,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
||||
|
||||
checkinResp = testbed.CheckInAsAgentWithQS(
|
||||
"aws", cloudintegrations.AgentCheckInRequest{
|
||||
AccountId: testAccountId,
|
||||
CloudAccountId: testAWSAccountId,
|
||||
ID: testAccountId,
|
||||
AccountID: testAWSAccountId,
|
||||
},
|
||||
)
|
||||
require.Equal(testAccountId, checkinResp.AccountId)
|
||||
@@ -453,8 +453,8 @@ func (tb *CloudIntegrationsTestBed) CheckInAsAgentWithQS(
|
||||
}
|
||||
|
||||
func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
|
||||
cloudProvider string, accountId string, newConfig cloudintegrations.AccountConfig,
|
||||
) *cloudintegrations.AccountRecord {
|
||||
cloudProvider string, accountId string, newConfig types.AccountConfig,
|
||||
) *types.CloudIntegration {
|
||||
respDataJson := tb.RequestQS(
|
||||
fmt.Sprintf(
|
||||
"/api/v1/cloud-integrations/%s/accounts/%s/config",
|
||||
@@ -464,7 +464,7 @@ func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
|
||||
},
|
||||
)
|
||||
|
||||
var resp cloudintegrations.AccountRecord
|
||||
var resp types.CloudIntegration
|
||||
err := json.Unmarshal(respDataJson, &resp)
|
||||
if err != nil {
|
||||
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")
|
||||
@@ -475,7 +475,7 @@ func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
|
||||
|
||||
func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS(
|
||||
cloudProvider string, accountId string,
|
||||
) *cloudintegrations.AccountRecord {
|
||||
) *types.CloudIntegration {
|
||||
respDataJson := tb.RequestQS(
|
||||
fmt.Sprintf(
|
||||
"/api/v1/cloud-integrations/%s/accounts/%s/disconnect",
|
||||
@@ -483,7 +483,7 @@ func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS(
|
||||
), map[string]any{},
|
||||
)
|
||||
|
||||
var resp cloudintegrations.AccountRecord
|
||||
var resp types.CloudIntegration
|
||||
err := json.Unmarshal(respDataJson, &resp)
|
||||
if err != nil {
|
||||
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user