Compare commits

...

49 Commits

Author SHA1 Message Date
ahmadshaheer
660c70d1e4 chore: remove trace funnels feature flag 2025-04-16 08:51:12 +04:30
Nityananda Gohain
e723399f7f fix: add check for empty services (#7611) 2025-04-15 16:39:33 +00:00
Nityananda Gohain
48936bed9b chore: multitenancy in integrations (#7507)
* chore: multitenancy in integrations

* chore: multitenancy in cloud integration accounts

* chore: changes to cloudintegrationservice

* chore: rename migration

* chore: update scan function

* chore: update scan function

* chore: fix migration

* chore: fix struct

* chore: remove unwanted code

* chore: update scan function

* chore: migrate user and pat for integrations

* fix: changes to the user for integrations

* fix: address comments

* fix: copy created_at

* fix: update non revoked token

* chore: don't allow deleting pat and user for integrations

* fix: address comments

* chore: address comments

* chore: add checks for fk in dialect

* fix: service migration

* fix: don't update user if user is already migrated

* fix: update correct service config

* fix: remove unwanted code

* fix: remove migration for multiple same services which is not required

* fix: fix migration and disable disaboard if metrics disabled

* fix: don't use ee types

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-04-15 15:35:36 +00:00
Srikanth Chekuri
ee70474cc7 fix: missing receivers in json payload for legacy postableAlert (#7603) 2025-04-14 13:20:39 +00:00
Srikanth Chekuri
c3fa7144ee chore: add tag type filter support in attribute keys (#7522) 2025-04-14 18:43:15 +05:30
Nityananda Gohain
5dd02a5b8e fix: remove unnecssary code for email domain check error (#7566)
* fix: proper check for emailComponents

* fix: correct error handling
2025-04-14 11:15:21 +05:30
Srikanth Chekuri
c0f01e4cb9 chore: add metadatastore implementation for logs and traces (#7559)
* chore: add metadatastore implementation for logs and traces

* chore: use telemetrystore mock
2025-04-11 19:41:02 +05:30
Srikanth Chekuri
fed84cb50a chore: add condition builder attributes metadata (#7558) 2025-04-11 16:20:27 +05:30
Srikanth Chekuri
80545c4d07 chore: add materialized field extractor from table schema (#7557) 2025-04-11 15:53:55 +05:30
Srikanth Chekuri
0b1faec092 chore: add condition builder for span index v3 (#7556) 2025-04-11 15:13:04 +05:30
Srikanth Chekuri
ba6f31b1c3 chore: add virtual fields table (#7586) 2025-04-11 07:36:31 +05:30
Srikanth Chekuri
eed92978a4 chore: add non-json condition builder for logs v2 (#7555) 2025-04-10 18:23:01 +00:00
Prashant Shahi
41cbd316b5 Feat/staging (#7585)
### Summary

- Non-production build workflow using Primus
- Staging CD: new staging app and dev staging deployments
- cleanup used docker resources in stagingapp/testingapp machines

---------

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2025-04-10 17:46:13 +05:30
Vishal Sharma
8d7d33393d feat: include tenant_url in event attributes for logging (#7582) 2025-04-10 15:17:14 +05:30
sawhil
8d143b44b1 feat: removed ff for tp-api-monitoring from fe - 1 2025-04-09 15:44:42 +05:30
sawhil
423aebd6eb feat: removed ff for tp-api-monitoring from fe 2025-04-09 15:44:42 +05:30
primus-bot[bot]
8d630707af chore(release): bump to v0.78.1 (#7573)
#### Summary
 - Release SigNoz v0.78.1

 Created by [Primus-Bot](https://github.com/apps/primus-bot)

Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-04-09 15:26:04 +05:30
Prashant Shahi
a5b52431b7 chore(build): include missing LDFLAGS in the community/enterprise builds (#7571)
### Summary

- include missing LDFLAGS in the community/enterprise builds

---------

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2025-04-09 15:10:49 +05:30
primus-bot[bot]
0138d757c8 chore(release): bump to v0.78.0 (#7568)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-04-09 12:28:18 +05:30
SagarRajput-7
844195b84f Revert "fix: reevaluated newdashboard bool to ensure that widget doesn't exis…" (#7567)
This reverts commit e53d3d1269.
2025-04-09 10:38:31 +05:30
Srikanth Chekuri
8ff05b2e8f chore: add field type definitions for qb v5 (#7552) 2025-04-08 22:34:58 +05:30
Srikanth Chekuri
c8c56c544e chore: add generated parser files for go (#7538) 2025-04-08 13:52:40 +00:00
Vikrant Gupta
1c43655336 fix(ff): alert creation disabled for non metric alerts (#7561) 2025-04-08 13:21:59 +00:00
Prashant Shahi
c269c8c6b8 feat: publish signoz to multiple registry using primus (#7504)
### Summary

- publish signoz images to multiple registry using primus
- deprecate old build workflow

---------

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2025-04-08 18:06:22 +05:30
Vibhu Pandey
3142b6cc6d chore(ff): remove some more unused ffs (#7532)
### Summary

remove unused feature flags
- ENTERPRISE_PLAN = 'ENTERPRISE_PLAN',
- BASIC_PLAN = 'BASIC_PLAN',
- ALERT_CHANNEL_SLACK = 'ALERT_CHANNEL_SLACK',
- ALERT_CHANNEL_WEBHOOK = 'ALERT_CHANNEL_WEBHOOK',
- ALERT_CHANNEL_PAGERDUTY = 'ALERT_CHANNEL_PAGERDUTY',
- ALERT_CHANNEL_OPSGENIE = 'ALERT_CHANNEL_OPSGENIE',
- ALERT_CHANNEL_MSTEAMS = 'ALERT_CHANNEL_MSTEAMS',
- CUSTOM_METRICS_FUNCTION = 'CUSTOM_METRICS_FUNCTION',
- QUERY_BUILDER_PANELS = 'QUERY_BUILDER_PANELS',
- QUERY_BUILDER_ALERTS = 'QUERY_BUILDER_ALERTS',
- DISABLE_UPSELL = 'DISABLE_UPSELL',
- OSS = 'OSS',
- QUERY_BUILDER_SEARCH_V2 = 'QUERY_BUILDER_SEARCH_V2',
- AWS_INTEGRATION = 'AWS_INTEGRATION',

remove ProPlan concept
2025-04-07 22:58:46 +05:30
Vibhu Pandey
58e141685a revert(smart): add smart trace detail search back (#7547)
### Summary

- Added the trace search endpoint back
- Enabled the `smart` algorithm for all users
- Added `"algo":"smart"` for telemetry events in the old endpoint
2025-04-07 17:19:39 +00:00
Srikanth Chekuri
e17f63a50c chore: error log on query error (#7553)
* chore: error log or query error

* chore: no error on context cancel
2025-04-07 15:15:49 +00:00
Vibhu Pandey
838ef5dcc5 feat(valuer): add string valuer (#7554) 2025-04-07 20:36:04 +05:30
SagarRajput-7
e53d3d1269 fix: reevaluated newdashboard bool to ensure that widget doesn't exist, incase of a prior PUT call (#7525)
* fix: reevaluated newdashboard bool to ensure that widget doesn't exist, incase of a prior PUT call

* fix: resolved comment
2025-04-07 15:31:08 +05:30
Vibhu Pandey
2330420c0d fix(querier): remove ff (#7531) 2025-04-05 18:08:06 +00:00
Vibhu Pandey
65ac277074 fix(duration|timestamp): remove duration/timestamp sort (#7530) 2025-04-05 23:26:50 +05:30
Vibhu Pandey
b7982ca348 fix(ff): remove feature interface from ruler (#7529)
### Summary

remove feature interface from ruler
2025-04-05 12:52:26 +00:00
dependabot[bot]
2748b49a44 chore(deps): bump github.com/golang-jwt/jwt/v5 from 5.2.1 to 5.2.2 (#7401)
Bumps [github.com/golang-jwt/jwt/v5](https://github.com/golang-jwt/jwt) from 5.2.1 to 5.2.2.
- [Release notes](https://github.com/golang-jwt/jwt/releases)
- [Changelog](https://github.com/golang-jwt/jwt/blob/main/VERSION_HISTORY.md)
- [Commits](https://github.com/golang-jwt/jwt/compare/v5.2.1...v5.2.2)

---
updated-dependencies:
- dependency-name: github.com/golang-jwt/jwt/v5
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-04-05 01:43:43 +00:00
Vibhu Pandey
7345027762 fix(ff): remove prefer rpm (#7528) 2025-04-04 23:38:16 +05:30
Vibhu Pandey
68f874e433 chore(ff): remove unused SMART_TRACE_DETAIL feature flag (#7527) 2025-04-04 20:28:54 +05:30
Vibhu Pandey
54a82b1664 fix(dashboards): remove ff interface (#7526) 2025-04-04 19:12:31 +05:30
Yunus M
93dc585145 fix: disable sidenav items for cloud users whose license has expired (#7524) 2025-04-04 18:33:55 +05:30
Vikrant Gupta
6a143efd2c feat(sqlmigration): update the user related tables according to new schema (#7518)
* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): make the preference package multi tenant

* feat(preference): address nit pick comments

* feat(preference): added the cascade delete for preferences

* feat(sqlmigration): update apdex and TTL status tables  (#7481)

* feat(sqlmigration): update the apdex and ttl tables

* feat(sqlmigration): register the new migration and rename table

* feat(sqlmigration): fix the ttl queries

* feat(sqlmigration): update the TTL and apdex tables

* feat(sqlmigration): update the TTL and apdex tables

* feat(sqlmigration): fix the reset password and pat tables (#7482)

* feat(sqlmigration): fix the reset password and pat tables

* feat(sqlmigration): revert PAT changes

* feat(sqlmigration): register and rename the new migration

* feat(sqlmigration): handle updates for user tables

* feat(sqlmigration): remove unwanted changes
2025-04-04 01:46:28 +05:30
Vikrant Gupta
0116eb20ab feat(sqlmigration): update apdex and TTL status tables (#7517)
* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): make the preference package multi tenant

* feat(preference): address nit pick comments

* feat(preference): added the cascade delete for preferences

* feat(sqlmigration): update apdex and TTL status tables  (#7481)

* feat(sqlmigration): update the apdex and ttl tables

* feat(sqlmigration): register the new migration and rename table

* feat(sqlmigration): fix the ttl queries

* feat(sqlmigration): update the TTL and apdex tables

* feat(sqlmigration): update the TTL and apdex tables
2025-04-04 01:36:47 +05:30
Vikrant Gupta
79e9d1b357 feat(preference): multi tenant preference module (#7516)
* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): make the preference package multi tenant

* feat(preference): address nit pick comments

* feat(preference): added the cascade delete for preferences
2025-04-04 01:25:24 +05:30
Vikrant Gupta
b89ce82e25 feat(sqlmigration): update the alertmanager tables (#7513)
* feat(sqlmigration): update the alertmanager tables
2025-04-03 17:56:49 +00:00
dependabot[bot]
b43a198fd8 chore(deps): bump github.com/expr-lang/expr from 1.16.9 to 1.17.0 (#7342)
Bumps [github.com/expr-lang/expr](https://github.com/expr-lang/expr) from 1.16.9 to 1.17.0.
- [Release notes](https://github.com/expr-lang/expr/releases)
- [Commits](https://github.com/expr-lang/expr/compare/v1.16.9...v1.17.0)

---
updated-dependencies:
- dependency-name: github.com/expr-lang/expr
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-04-03 08:17:40 +00:00
Srikanth Chekuri
b40ca4baf3 fix: do not crash service on panic during rule eval (#7514) 2025-04-03 13:13:58 +05:30
SagarRajput-7
8df77c9221 fix: fixed trace funnel - header style overriding other pages (#7512)
* fix: fixed trace funnel - header style overriding other pages

* fix: fixed trace funnel - header style overriding other pages

* fix: handled nesting
2025-04-03 07:29:39 +00:00
Srikanth Chekuri
f67555576f chore: add info icon tool tips for webhook/routing/integration key (#7405) 2025-04-03 09:40:41 +05:30
Srikanth Chekuri
f0a4c37073 fix: handle maintenance windows that cross day boundaries (#7494) 2025-04-02 20:48:01 +00:00
Vikrant Gupta
7972261237 Revert "fix: use search v2 component for traces data source & minor improveme…" (#7511)
This reverts commit d7a6607a25.
2025-04-03 01:15:20 +05:30
primus-bot[bot]
3b4a8e5e0f chore(release): bump to v0.77.0 (#7502)
#### Summary
 - Release SigNoz v0.77.0
 - Bump SigNoz OTel Collector to v0.111.37

 Created by [Primus-Bot](https://github.com/apps/primus-bot)
2025-04-02 12:19:03 +05:30
Yunus M
5ef3b8ee3f feat: support request data source and improve layout (#7485)
* feat: support request data source and improve layout

* feat: update config

* feat: update config with related keywords

* update config

---------

Co-authored-by: makeavish <makeavish786@gmail.com>
2025-04-02 11:59:53 +05:30
201 changed files with 15834 additions and 3440 deletions

81
.github/workflows/build-community.yaml vendored Normal file
View File

@@ -0,0 +1,81 @@
name: build-community
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
defaults:
run:
shell: bash
env:
PRIMUS_HOME: .primus
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
jobs:
prepare:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.build-info.outputs.version }}
hash: ${{ steps.build-info.outputs.hash }}
time: ${{ steps.build-info.outputs.time }}
branch: ${{ steps.build-info.outputs.branch }}
steps:
- name: self-checkout
uses: actions/checkout@v4
- id: token
name: github-token-gen
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.PRIMUS_APP_ID }}
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
owner: ${{ github.repository_owner }}
- name: primus-checkout
uses: actions/checkout@v4
with:
repository: signoz/primus
ref: main
path: .primus
token: ${{ steps.token.outputs.token }}
- name: build-info
run: |
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
js-build:
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
needs: prepare
secrets: inherit
with:
PRIMUS_REF: main
JS_SRC: frontend
JS_OUTPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_PATH: frontend/build
DOCKER_BUILD: false
DOCKER_MANIFEST: false
go-build:
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
needs: [prepare, js-build]
secrets: inherit
with:
PRIMUS_REF: main
GO_NAME: signoz-community
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./pkg/query-service
GO_BUILD_FLAGS: >-
-tags timetzdata
-ldflags='-linkmode external -extldflags \"-static\" -s -w
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
-X github.com/SigNoz/signoz/pkg/version.variant=community
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch
DOCKER_MANIFEST: true
DOCKER_PROVIDERS: dockerhub

113
.github/workflows/build-enterprise.yaml vendored Normal file
View File

@@ -0,0 +1,113 @@
name: build-enterprise
on:
push:
tags:
- v*
defaults:
run:
shell: bash
env:
PRIMUS_HOME: .primus
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
jobs:
prepare:
runs-on: ubuntu-latest
outputs:
docker_providers: ${{ steps.set-docker-providers.outputs.providers }}
version: ${{ steps.build-info.outputs.version }}
hash: ${{ steps.build-info.outputs.hash }}
time: ${{ steps.build-info.outputs.time }}
branch: ${{ steps.build-info.outputs.branch }}
steps:
- name: self-checkout
uses: actions/checkout@v4
- id: token
name: github-token-gen
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.PRIMUS_APP_ID }}
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
owner: ${{ github.repository_owner }}
- name: primus-checkout
uses: actions/checkout@v4
with:
repository: signoz/primus
ref: main
path: .primus
token: ${{ steps.token.outputs.token }}
- name: build-info
id: build-info
run: |
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
- name: set-docker-providers
id: set-docker-providers
run: |
if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ || ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]+$ ]]; then
echo "providers=dockerhub gcp" >> $GITHUB_OUTPUT
else
echo "providers=gcp" >> $GITHUB_OUTPUT
fi
- name: create-dotenv
run: |
mkdir -p frontend
echo 'CI=1' > frontend/.env
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
path: frontend/.env
key: dotenv-${{ github.sha }}
js-build:
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
needs: prepare
secrets: inherit
with:
PRIMUS_REF: main
JS_SRC: frontend
JS_INPUT_ARTIFACT_CACHE_KEY: dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_PATH: frontend/.env
JS_OUTPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_PATH: frontend/build
DOCKER_BUILD: false
DOCKER_MANIFEST: false
go-build:
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
needs: [prepare, js-build]
secrets: inherit
with:
PRIMUS_REF: main
GO_INPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service
GO_BUILD_FLAGS: >-
-tags timetzdata
-ldflags='-linkmode external -extldflags \"-static\" -s -w
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
DOCKER_MANIFEST: true
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}

131
.github/workflows/build-staging.yaml vendored Normal file
View File

@@ -0,0 +1,131 @@
name: build-staging
on:
push:
branches:
- main
pull_request:
types: [labeled]
defaults:
run:
shell: bash
env:
PRIMUS_HOME: .primus
MAKE: make --no-print-directory --makefile=.primus/src/make/main.mk
jobs:
prepare:
runs-on: ubuntu-latest
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
outputs:
version: ${{ steps.build-info.outputs.version }}
hash: ${{ steps.build-info.outputs.hash }}
time: ${{ steps.build-info.outputs.time }}
branch: ${{ steps.build-info.outputs.branch }}
deployment: ${{ steps.build-info.outputs.deployment }}
steps:
- name: self-checkout
uses: actions/checkout@v4
- id: token
name: github-token-gen
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.PRIMUS_APP_ID }}
private-key: ${{ secrets.PRIMUS_PRIVATE_KEY }}
owner: ${{ github.repository_owner }}
- name: primus-checkout
uses: actions/checkout@v4
with:
repository: signoz/primus
ref: main
path: .primus
token: ${{ steps.token.outputs.token }}
- name: build-info
id: build-info
run: |
echo "version=$($MAKE info-version)" >> $GITHUB_OUTPUT
echo "hash=$($MAKE info-commit-short)" >> $GITHUB_OUTPUT
echo "time=$($MAKE info-timestamp)" >> $GITHUB_OUTPUT
echo "branch=$($MAKE info-branch)" >> $GITHUB_OUTPUT
staging_label="${{ github.event.label.name }}"
if [[ "${staging_label}" == "staging:"* ]]; then
deployment=${staging_label#"staging:"}
elif [[ "${{ github.event.ref }}" == "refs/heads/main" ]]; then
deployment="staging"
else
echo "error: not able to determine deployment - please verify the PR label or the branch"
exit 1
fi
echo "deployment=${deployment}" >> $GITHUB_OUTPUT
- name: create-dotenv
run: |
mkdir -p frontend
echo 'CI=1' > frontend/.env
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
path: frontend/.env
key: dotenv-${{ github.sha }}
js-build:
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
needs: prepare
secrets: inherit
with:
PRIMUS_REF: main
JS_SRC: frontend
JS_INPUT_ARTIFACT_CACHE_KEY: dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_PATH: frontend/.env
JS_OUTPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_PATH: frontend/build
DOCKER_BUILD: false
DOCKER_MANIFEST: false
go-build:
uses: signoz/primus.workflows/.github/workflows/go-build.yaml@main
needs: [prepare, js-build]
secrets: inherit
with:
PRIMUS_REF: main
GO_INPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service
GO_BUILD_FLAGS: >-
-tags timetzdata
-ldflags='-linkmode external -extldflags \"-static\" -s -w
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
DOCKER_MANIFEST: true
DOCKER_PROVIDERS: gcp
staging:
if: ${{ contains(github.event.label.name, 'staging:') || github.event.ref == 'refs/heads/main' }}
uses: signoz/primus.workflows/.github/workflows/github-trigger.yaml@main
secrets: inherit
needs: [prepare, go-build]
with:
PRIMUS_REF: main
GITHUB_ENVIRONMENT: staging
GITHUB_SILENT: true
GITHUB_REPOSITORY_NAME: charts-saas-v3-staging
GITHUB_EVENT_NAME: releaser
GITHUB_EVENT_PAYLOAD: "{\"deployment\": \"${{ needs.prepare.outputs.deployment }}\", \"signoz_version\": \"${{ needs.prepare.outputs.version }}\"}"

View File

@@ -1,122 +0,0 @@
name: build
on:
push:
branches:
- main
tags:
- v*
jobs:
enterprise:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v4
- name: setup
uses: actions/setup-go@v5
with:
go-version: "1.22"
- name: setup-qemu
uses: docker/setup-qemu-action@v3
- name: setup-buildx
uses: docker/setup-buildx-action@v3
with:
version: latest
- name: docker-login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: create-env-file
run: |
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
- name: github-ref-info
shell: bash
run: |
GH_REF=${{ github.ref }}
if [[ "${{ github.ref_type }}" == "tag" ]]; then
PREFIX="refs/tags/"
echo "GH_IS_TAG=true" >> $GITHUB_ENV
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
else
PREFIX="refs/heads/"
echo "GH_IS_TAG=false" >> $GITHUB_ENV
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
fi
- name: set-version
run: |
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
echo "VERSION=latest" >> $GITHUB_ENV
else
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
fi
- name: cross-compilation-tools
run: |
set -ex
sudo apt-get update
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
- name: publish
run: make docker-buildx-enterprise
community:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v4
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.22"
- name: setup-qemu
uses: docker/setup-qemu-action@v3
- name: setup-buildx
uses: docker/setup-buildx-action@v3
with:
version: latest
- name: docker-login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: github-ref-info
shell: bash
run: |
GH_REF=${{ github.ref }}
if [[ "${{ github.ref_type }}" == "tag" ]]; then
PREFIX="refs/tags/"
echo "GH_IS_TAG=true" >> $GITHUB_ENV
echo "GH_TAG=${GH_REF#$PREFIX}" >> $GITHUB_ENV
else
PREFIX="refs/heads/"
echo "GH_IS_TAG=false" >> $GITHUB_ENV
echo "GH_BRANCH_NAME=${GH_REF#$PREFIX}" >> $GITHUB_ENV
fi
- name: set-version
run: |
if [ '${{ env.GH_IS_TAG }}' == 'true' ]; then
echo "VERSION=${{ env.GH_TAG }}" >> $GITHUB_ENV
elif [ '${{ env.GH_BRANCH_NAME }}' == 'main' ]; then
echo "VERSION=latest" >> $GITHUB_ENV
else
echo "VERSION=${{ env.GH_BRANCH_NAME }}" >> $GITHUB_ENV
fi
- name: cross-compilation-tools
run: |
set -ex
sudo apt-get update
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
- name: publish
run: make docker-buildx-community

View File

@@ -36,12 +36,17 @@ jobs:
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
export OTELCOL_TAG="main"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
export KAFKA_SPAN_EVAL="true"
docker system prune --force
docker pull signoz/signoz-otel-collector:main
docker pull signoz/signoz-schema-migrator:main
docker system prune --force --all
OTELCOL_TAG=$(curl -s https://api.github.com/repos/SigNoz/signoz-otel-collector/releases/latest | jq -r '.tag_name // "not-found"')
if [[ "${OTELCOL_TAG}" == "not-found" ]]; then
echo "warning: unable to determine latest SigNoz OtelCollector release tag, skipping latest otelcol deployment"
else
export OTELCOL_TAG=${OTELCOL_TAG}
docker pull signoz/signoz-otel-collector:${OTELCOL_TAG}
docker pull signoz/signoz-schema-migrator:${OTELCOL_TAG}
fi
cd ~/signoz
git status
git add .

View File

@@ -38,7 +38,7 @@ jobs:
export VERSION="${GITHUB_SHA:0:7}" # needed for child process to access it
export DEV_BUILD="1"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
docker system prune --force
docker system prune --force --all
cd ~/signoz
git status
git add .

17
.versions/alpine Normal file
View File

@@ -0,0 +1,17 @@
#### Auto generated by make docker-version-alpine. DO NOT EDIT! ####
amd64=029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85
unknown=5fea95373b9ec85974843f31446fa6a9df4492dddae4e1cb056193c34a20a5be
arm=b4aef1a899e0271f06d948c9a8fa626ecdb2202d3a178bc14775dd559e23df8e
unknown=a4d1e27e63a9d6353046eb25a2f0ec02945012b217f4364cd83a73fe6dfb0b15
arm=4fdafe217d0922f3c3e2b4f64cf043f8403a4636685cd9c51fea2cbd1f419740
unknown=7f21ac2018d95b2c51a5779c1d5ca6c327504adc3b0fdc747a6725d30b3f13c2
arm64=ea3c5a9671f7b3f7eb47eab06f73bc6591df978b0d5955689a9e6f943aa368c0
unknown=a8ba68c1a9e6eea8041b4b8f996c235163440808b9654a865976fdcbede0f433
386=dea9f02e103e837849f984d5679305c758aba7fea1b95b7766218597f61a05ab
unknown=3c6629bec05c8273a927d46b77428bf4a378dad911a0ae284887becdc149b734
ppc64le=0880443bffa028dfbbc4094a32dd6b7ac25684e4c0a3d50da9e0acae355c5eaf
unknown=bb48308f976b266e3ab39bbf9af84521959bd9c295d3c763690cf41f8df2a626
riscv64=d76e6fbe348ff20c2931bb7f101e49379648e026de95dd37f96e00ce1909dcf7
unknown=dd807544365f6dc187cbe6de0806adce2ea9de3e7124717d1d8e8b7a18b77b64
s390x=b815fadf80495594eb6296a6af0bc647ae5f193e0044e07acec7e5b378c9ce2d
unknown=74681be74a280a88abb53ff1e048eb1fb624b30d0066730df6d8afd02ba82e01

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.76.2
image: signoz/signoz:v0.78.1
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
@@ -208,7 +208,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.34
image: signoz/signoz-otel-collector:v0.111.38
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -232,7 +232,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.34
image: signoz/signoz-schema-migrator:v0.111.38
deploy:
restart_policy:
condition: on-failure

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.76.2
image: signoz/signoz:v0.78.1
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
@@ -143,7 +143,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.34
image: signoz/signoz-otel-collector:v0.111.38
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -167,7 +167,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.34
image: signoz/signoz-schema-migrator:v0.111.38
deploy:
restart_policy:
condition: on-failure

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.76.2}
image: signoz/signoz:${VERSION:-v0.78.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -212,7 +212,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -238,7 +238,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-sync
command:
- sync
@@ -249,7 +249,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.76.2}
image: signoz/signoz:${VERSION:-v0.78.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -146,7 +146,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -168,7 +168,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-sync
command:
- sync
@@ -180,7 +180,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.76.2}
image: signoz/signoz:${VERSION:-v0.78.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-async
command:
- async

View File

@@ -18,4 +18,4 @@ COPY frontend/build/ /etc/signoz/web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["./signoz"]
CMD ["-config", "/root/config/prometheus.yml"]
CMD ["-config", "/root/config/prometheus.yml"]

View File

@@ -0,0 +1,22 @@
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
FROM alpine@sha256:${ALPINE_SHA}
LABEL maintainer="signoz"
WORKDIR /root
ARG OS="linux"
ARG ARCH
RUN apk update && \
apk add ca-certificates && \
rm -rf /var/cache/apk/*
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
COPY ./conf/prometheus.yml /root/config/prometheus.yml
COPY ./templates/email /root/templates
COPY frontend/build/ /etc/signoz/web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["./signoz"]
CMD ["-config", "/root/config/prometheus.yml"]

View File

@@ -28,11 +28,10 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
}
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
Reader: dp.reader,
Cache: dp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: dp.fluxInterval,
FeatureLookup: dp.ff,
Reader: dp.reader,
Cache: dp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: dp.fluxInterval,
})
return dp

View File

@@ -28,11 +28,10 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
}
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
Reader: hp.reader,
Cache: hp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: hp.fluxInterval,
FeatureLookup: hp.ff,
Reader: hp.reader,
Cache: hp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: hp.fluxInterval,
})
return hp

View File

@@ -38,12 +38,6 @@ func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericPr
}
}
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().ff = ff
}
}
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().reader = reader
@@ -56,7 +50,6 @@ type BaseSeasonalProvider struct {
fluxInterval time.Duration
cache cache.Cache
keyGenerator cache.KeyGenerator
ff interfaces.FeatureLookup
}
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {

View File

@@ -27,11 +27,10 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
}
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
Reader: wp.reader,
Cache: wp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: wp.fluxInterval,
FeatureLookup: wp.ff,
Reader: wp.reader,
Cache: wp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: wp.fluxInterval,
})
return wp

View File

@@ -11,6 +11,8 @@ import (
"github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/modules/preference"
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
@@ -21,6 +23,7 @@ import (
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
@@ -54,6 +57,7 @@ type APIHandler struct {
// NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(signoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
@@ -71,6 +75,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
UseTraceNewSchema: opts.UseTraceNewSchema,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
Signoz: signoz,
Preference: preference,
})
if err != nil {
@@ -157,7 +162,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
// PAT APIs
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)

View File

@@ -153,9 +153,11 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
ctx context.Context, orgId string, cloudProvider string,
) (*types.User, *basemodel.ApiError) {
cloudIntegrationUserId := fmt.Sprintf("%s-integration", cloudProvider)
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
integrationUserResult, apiErr := ah.AppDao().GetUser(ctx, cloudIntegrationUserId)
// TODO(nitya): there should be orgId here
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
}
@@ -170,9 +172,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
)
newUser := &types.User{
ID: cloudIntegrationUserId,
Name: fmt.Sprintf("%s integration", cloudProvider),
Email: fmt.Sprintf("%s@signoz.io", cloudIntegrationUserId),
ID: uuid.New().String(),
Name: cloudIntegrationUser,
Email: email,
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
},

View File

@@ -5,14 +5,19 @@ import (
"encoding/json"
"fmt"
"net/http"
"slices"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/errors"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/auth"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
@@ -55,7 +60,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
ah.Respond(w, &pat)
}
func validatePATRequest(req types.GettablePAT) error {
func validatePATRequest(req eeTypes.GettablePAT) error {
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
return fmt.Errorf("valid role is required")
}
@@ -71,12 +76,19 @@ func validatePATRequest(req types.GettablePAT) error {
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := types.GettablePAT{}
req := eeTypes.GettablePAT{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
user, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, &model.ApiError{
@@ -86,6 +98,25 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
return
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
err = validatePATRequest(req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
@@ -93,7 +124,6 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
}
req.UpdatedByUserID = user.ID
id := mux.Vars(r)["id"]
req.UpdatedAt = time.Now()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
@@ -126,7 +156,12 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
id := mux.Vars(r)["id"]
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
user, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, &model.ApiError{
@@ -136,7 +171,26 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
return
}
zap.L().Info("Revoke PAT with id", zap.String("id", id))
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
RespondError(w, apierr, nil)
return

View File

@@ -88,28 +88,24 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
)
case anomaly.SeasonalityDaily:
provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
)
case anomaly.SeasonalityHourly:
provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
)
default:
provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
)
}
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})

View File

@@ -1,33 +0,0 @@
package api
import (
"net/http"
"github.com/SigNoz/signoz/ee/query-service/app/db"
"github.com/SigNoz/signoz/ee/query-service/model"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
ah.APIHandler.SearchTraces(w, r)
return
}
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
return
}
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
if ah.HandleError(w, err, http.StatusBadRequest) {
return
}
ah.WriteJSON(w, r, result)
}

View File

@@ -5,36 +5,33 @@ import (
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/jmoiron/sqlx"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/prometheus"
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
)
type ClickhouseReader struct {
conn clickhouse.Conn
appdb *sqlx.DB
appdb sqlstore.SQLStore
*basechr.ClickHouseReader
}
func NewDataConnector(
localDB *sqlx.DB,
sqlDB sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus,
lm interfaces.FeatureLookup,
cluster string,
useLogsNewSchema bool,
useTraceNewSchema bool,
fluxIntervalForTraceDetail time.Duration,
cache cache.Cache,
) *ClickhouseReader {
chReader := basechr.NewReader(localDB, telemetryStore, prometheus, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
return &ClickhouseReader{
conn: telemetryStore.ClickhouseDB(),
appdb: localDB,
appdb: sqlDB,
ClickHouseReader: chReader,
}
}

View File

@@ -44,7 +44,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
"github.com/SigNoz/signoz/pkg/query-service/cache"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
@@ -116,10 +115,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
return nil, err
}
if err := preferences.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB()); err != nil {
return nil, err
}
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
return nil, err
}
@@ -144,10 +139,9 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
reader := db.NewDataConnector(
serverOptions.SigNoz.SQLStore.SQLxDB(),
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
lm,
serverOptions.Cluster,
serverOptions.UseLogsNewSchema,
serverOptions.UseTraceNewSchema,
@@ -178,7 +172,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
reader,
c,
serverOptions.DisableRules,
lm,
serverOptions.UseLogsNewSchema,
serverOptions.UseTraceNewSchema,
serverOptions.SigNoz.Alertmanager,
@@ -538,7 +531,6 @@ func makeRulesManager(
ch baseint.Reader,
cache cache.Cache,
disableRules bool,
fm baseint.FeatureLookup,
useLogsNewSchema bool,
useTraceNewSchema bool,
alertmanager alertmanager.Alertmanager,
@@ -555,7 +547,6 @@ func makeRulesManager(
Context: context.Background(),
Logger: zap.L(),
DisableRules: disableRules,
FeatureFlags: fm,
Reader: ch,
Cache: cache,
EvalDelay: baseconst.GetEvalDelay(),

View File

@@ -8,8 +8,8 @@ import (
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/uptrace/bun"
)
@@ -36,10 +36,9 @@ type ModelDao interface {
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id string) basemodel.BaseApiError
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id string) (*types.GettablePAT, basemodel.BaseApiError)
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
}

View File

@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"net/url"
"strings"
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
@@ -44,7 +43,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
}
user := &types.User{
ID: uuid.NewString(),
ID: uuid.New().String(),
Name: "",
Email: email,
Password: hash,
@@ -162,12 +161,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
// find domain from email
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
var emailDomain string
emailComponents := strings.Split(email, "@")
if len(emailComponents) > 0 {
emailDomain = emailComponents[1]
}
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
return resp, apierr
}

View File

@@ -9,12 +9,14 @@ import (
"github.com/SigNoz/signoz/ee/types"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
p.StorablePersonalAccessToken.OrgID = orgID
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
_, err := m.DB().NewInsert().
Model(&p.StorablePersonalAccessToken).
Exec(ctx)
@@ -46,11 +48,11 @@ func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.Gettable
return p, nil
}
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id string) basemodel.BaseApiError {
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
_, err := m.DB().NewUpdate().
Model(&p.StorablePersonalAccessToken).
Column("role", "name", "updated_at", "updated_by_user_id").
Where("id = ?", id).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Where("revoked = false").
Exec(ctx)
@@ -127,14 +129,14 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
return patsWithUsers, nil
}
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError {
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
updatedAt := time.Now().Unix()
_, err := m.DB().NewUpdate().
Model(&types.StorablePersonalAccessToken{}).
Set("revoked = ?", true).
Set("updated_by_user_id = ?", userID).
Set("updated_at = ?", updatedAt).
Where("id = ?", id).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Exec(ctx)
if err != nil {
@@ -169,12 +171,12 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT
return &patWithUser, nil
}
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*types.GettablePAT, basemodel.BaseApiError) {
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
Model(&pats).
Where("id = ?", id).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Where("revoked = false").
Scan(ctx); err != nil {
@@ -194,27 +196,3 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*ty
return &patWithUser, nil
}
// deprecated
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError) {
users := []ossTypes.GettableUser{}
if err := m.DB().NewSelect().
Model(&users).
Column("u.id", "u.name", "u.email", "u.password", "u.created_at", "u.profile_picture_url", "u.org_id", "u.group_id").
Join("JOIN personal_access_tokens p ON u.id = p.user_id").
Where("p.token = ?", token).
Where("p.expires_at >= strftime('%s', 'now')").
Where("p.org_id = ?", orgID).
Scan(ctx); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
}
if len(users) != 1 {
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
}
}
return &users[0], nil
}

View File

@@ -157,8 +157,6 @@ func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
}
switch planName {
case PlanNameTeams:
features = append(features, ProPlan...)
case PlanNameEnterprise:
features = append(features, EnterprisePlan...)
case PlanNameBasic:

View File

@@ -74,21 +74,21 @@ func TestNewLicenseV3(t *testing.T) {
},
{
name: "Parse the entire license properly",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "TEAMS",
"name": "ENTERPRISE",
},
"category": "FREE",
"status": "ACTIVE",
"valid_from": float64(1730899309),
"valid_until": float64(-1),
},
PlanName: PlanNameTeams,
PlanName: PlanNameEnterprise,
ValidFrom: 1730899309,
ValidUntil: -1,
Status: "ACTIVE",
@@ -98,14 +98,14 @@ func TestNewLicenseV3(t *testing.T) {
},
{
name: "Fallback to basic plan if license status is invalid",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "TEAMS",
"name": "ENTERPRISE",
},
"category": "FREE",
"status": "INVALID",
@@ -122,21 +122,21 @@ func TestNewLicenseV3(t *testing.T) {
},
{
name: "fallback states for validFrom and validUntil",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "TEAMS",
"name": "ENTERPRISE",
},
"valid_from": 1234.456,
"valid_until": 5678.567,
"category": "FREE",
"status": "ACTIVE",
},
PlanName: PlanNameTeams,
PlanName: PlanNameEnterprise,
ValidFrom: 1234,
ValidUntil: 5678,
Status: "ACTIVE",

View File

@@ -1,30 +1,26 @@
package model
import (
"github.com/SigNoz/signoz/pkg/query-service/constants"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
)
const SSO = "SSO"
const Basic = "BASIC_PLAN"
const Pro = "PRO_PLAN"
const Enterprise = "ENTERPRISE_PLAN"
var (
PlanNameEnterprise = "ENTERPRISE"
PlanNameTeams = "TEAMS"
PlanNameBasic = "BASIC"
)
var (
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameTeams: Pro, PlanNameEnterprise: Enterprise}
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
)
var (
LicenseStatusInvalid = "INVALID"
)
const DisableUpsell = "DISABLE_UPSELL"
const Onboarding = "ONBOARDING"
const ChatSupport = "CHAT_SUPPORT"
const Gateway = "GATEWAY"
@@ -38,90 +34,6 @@ var BasicPlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.OSS,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: DisableUpsell,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.SmartTraceDetail,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.CustomMetricsFunction,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelSlack,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelWebhook,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelPagerduty,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelOpsgenie,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelEmail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelMsTeams,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
@@ -150,149 +62,7 @@ var BasicPlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.HostsInfraMonitoring,
Active: constants.EnableHostsInfraMonitoring(),
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.TraceFunnels,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}
var ProPlan = basemodel.FeatureSet{
basemodel.Feature{
Name: SSO,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.OSS,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.SmartTraceDetail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.CustomMetricsFunction,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelSlack,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelWebhook,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelPagerduty,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelOpsgenie,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelEmail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelMsTeams,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Gateway,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: PremiumSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AnomalyDetection,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.HostsInfraMonitoring,
Active: constants.EnableHostsInfraMonitoring(),
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.TraceFunnels,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}
var EnterprisePlan = basemodel.FeatureSet{
@@ -303,83 +73,6 @@ var EnterprisePlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.OSS,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.SmartTraceDetail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.CustomMetricsFunction,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelSlack,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelWebhook,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelPagerduty,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelOpsgenie,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelEmail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelMsTeams,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
@@ -422,18 +115,5 @@ var EnterprisePlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.HostsInfraMonitoring,
Active: constants.EnableHostsInfraMonitoring(),
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.TraceFunnels,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}

View File

@@ -53,7 +53,6 @@ type AnomalyRule struct {
func NewAnomalyRule(
id string,
p *baserules.PostableRule,
featureFlags interfaces.FeatureLookup,
reader interfaces.Reader,
cache cache.Cache,
opts ...baserules.RuleOption,
@@ -89,10 +88,9 @@ func NewAnomalyRule(
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FeatureLookup: featureFlags,
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
@@ -102,21 +100,18 @@ func NewAnomalyRule(
anomaly.WithCache[*anomaly.HourlyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](reader),
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
)
} else if t.seasonality == anomaly.SeasonalityDaily {
t.provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](reader),
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
)
} else if t.seasonality == anomaly.SeasonalityWeekly {
t.provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
)
}
return &t, nil

View File

@@ -23,7 +23,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
tr, err := baserules.NewThresholdRule(
ruleId,
opts.Rule,
opts.FF,
opts.Reader,
opts.UseLogsNewSchema,
opts.UseTraceNewSchema,
@@ -66,7 +65,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
ar, err := NewAnomalyRule(
ruleId,
opts.Rule,
opts.FF,
opts.Reader,
opts.Cache,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
@@ -123,7 +121,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
rule, err = baserules.NewThresholdRule(
alertname,
parsedRule,
opts.FF,
opts.Reader,
opts.UseLogsNewSchema,
opts.UseTraceNewSchema,
@@ -160,7 +157,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
rule, err = NewAnomalyRule(
alertname,
parsedRule,
opts.FF,
opts.Reader,
opts.Cache,
baserules.WithSendAlways(),

View File

@@ -4,10 +4,30 @@ import (
"context"
"fmt"
"reflect"
"slices"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/uptrace/bun"
)
var (
Identity = "id"
Integer = "bigint"
Text = "text"
)
var (
Org = "org"
User = "user"
CloudIntegration = "cloud_integration"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
)
type dialect struct {
}
@@ -175,7 +195,10 @@ func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table inte
return true, nil
}
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, references []string, cb func(context.Context) error) error {
if len(references) == 0 {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
}
exists, err := dialect.TableExists(ctx, bun, newModel)
if err != nil {
return err
@@ -184,12 +207,27 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
return nil
}
_, err = bun.
var fkReferences []string
for _, reference := range references {
if reference == Org && !slices.Contains(fkReferences, OrgReference) {
fkReferences = append(fkReferences, OrgReference)
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
fkReferences = append(fkReferences, UserReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
}
}
createTable := bun.
NewCreateTable().
IfNotExists().
Model(newModel).
Exec(ctx)
Model(newModel)
for _, fk := range fkReferences {
createTable = createTable.ForeignKey(fk)
}
_, err = createTable.Exec(ctx)
if err != nil {
return err
}
@@ -218,3 +256,115 @@ func (dialect *dialect) AddNotNullDefaultToColumn(ctx context.Context, bun bun.I
}
return nil
}
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
if reference == "" {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
}
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
columnType, err := dialect.GetColumnType(ctx, bun, oldTableName, Identity)
if err != nil {
return err
}
if columnType == Text {
return nil
}
fkReference := ""
if reference == Org {
fkReference = OrgReference
} else if reference == User {
fkReference = UserReference
}
_, err = bun.
NewCreateTable().
IfNotExists().
Model(newModel).
ForeignKey(fkReference).
Exec(ctx)
if err != nil {
return err
}
err = cb(ctx)
if err != nil {
return err
}
_, err = bun.
NewDropTable().
IfExists().
Model(oldModel).
Exec(ctx)
if err != nil {
return err
}
_, err = bun.
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
if err != nil {
return err
}
return nil
}
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
if reference == "" {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
}
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
identityExists, err := dialect.ColumnExists(ctx, bun, oldTableName, Identity)
if err != nil {
return err
}
if identityExists {
return nil
}
fkReference := ""
if reference == Org {
fkReference = OrgReference
} else if reference == User {
fkReference = UserReference
}
_, err = bun.
NewCreateTable().
IfNotExists().
Model(newModel).
ForeignKey(fkReference).
Exec(ctx)
if err != nil {
return err
}
err = cb(ctx)
if err != nil {
return err
}
_, err = bun.
NewDropTable().
IfExists().
Model(oldModel).
Exec(ctx)
if err != nil {
return err
}
_, err = bun.
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
if err != nil {
return err
}
return nil
}

View File

@@ -6,6 +6,7 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
@@ -28,11 +29,10 @@ func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
}
type StorablePersonalAccessToken struct {
bun.BaseModel `bun:"table:personal_access_tokens"`
bun.BaseModel `bun:"table:personal_access_token"`
types.Identifiable
types.TimeAuditable
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
ID int `json:"id" bun:"id,pk,autoincrement"`
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
Token string `json:"token" bun:"token,type:text,notnull,unique"`
@@ -69,5 +69,8 @@ func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64)
CreatedAt: now,
UpdatedAt: now,
},
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
}
}

View File

@@ -18,6 +18,13 @@
"field_send_resolved": "Send resolved alerts",
"field_channel_type": "Type",
"field_webhook_url": "Webhook URL",
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
"tooltip_email_to": "Enter email addresses separated by commas.",
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
"field_slack_recipient": "Recipient",
"field_slack_title": "Title",
"field_slack_description": "Description",

View File

@@ -18,6 +18,12 @@
"field_send_resolved": "Send resolved alerts",
"field_channel_type": "Type",
"field_webhook_url": "Webhook URL",
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
"tooltip_email_to": "Enter email addresses separated by commas.",
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
"field_slack_recipient": "Recipient",
"field_slack_title": "Title",
"field_slack_description": "Description",

View File

@@ -11,9 +11,12 @@ const logEvent = async (
rateLimited?: boolean,
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
try {
// add tenant_url to attributes
const { hostname } = window.location;
const updatedAttributes = { ...attributes, tenant_url: hostname };
const response = await axios.post('/event', {
eventName,
attributes,
attributes: updatedAttributes,
eventType: eventType || 'track',
rateLimited: rateLimited || false, // TODO: Update this once we have a proper way to handle rate limiting
});

View File

@@ -1,30 +1,11 @@
// keep this consistent with backend constants.go
export enum FeatureKeys {
SSO = 'SSO',
ENTERPRISE_PLAN = 'ENTERPRISE_PLAN',
BASIC_PLAN = 'BASIC_PLAN',
ALERT_CHANNEL_SLACK = 'ALERT_CHANNEL_SLACK',
ALERT_CHANNEL_WEBHOOK = 'ALERT_CHANNEL_WEBHOOK',
ALERT_CHANNEL_PAGERDUTY = 'ALERT_CHANNEL_PAGERDUTY',
ALERT_CHANNEL_OPSGENIE = 'ALERT_CHANNEL_OPSGENIE',
ALERT_CHANNEL_MSTEAMS = 'ALERT_CHANNEL_MSTEAMS',
DurationSort = 'DurationSort',
TimestampSort = 'TimestampSort',
SMART_TRACE_DETAIL = 'SMART_TRACE_DETAIL',
CUSTOM_METRICS_FUNCTION = 'CUSTOM_METRICS_FUNCTION',
QUERY_BUILDER_PANELS = 'QUERY_BUILDER_PANELS',
QUERY_BUILDER_ALERTS = 'QUERY_BUILDER_ALERTS',
DISABLE_UPSELL = 'DISABLE_UPSELL',
USE_SPAN_METRICS = 'USE_SPAN_METRICS',
OSS = 'OSS',
ONBOARDING = 'ONBOARDING',
CHAT_SUPPORT = 'CHAT_SUPPORT',
GATEWAY = 'GATEWAY',
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
QUERY_BUILDER_SEARCH_V2 = 'QUERY_BUILDER_SEARCH_V2',
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
AWS_INTEGRATION = 'AWS_INTEGRATION',
ONBOARDING_V3 = 'ONBOARDING_V3',
THIRD_PARTY_API = 'THIRD_PARTY_API',
TRACE_FUNNELS = 'TRACE_FUNNELS',
}

View File

@@ -31,6 +31,10 @@ jest.mock('hooks/useNotifications', () => ({
})),
}));
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
}));
describe('Create Alert Channel', () => {
afterEach(() => {
jest.clearAllMocks();

View File

@@ -18,6 +18,10 @@ import { render, screen } from 'tests/test-utils';
import { testLabelInputAndHelpValue } from './testUtils';
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
}));
describe('Create Alert Channel (Normal User)', () => {
afterEach(() => {
jest.clearAllMocks();

View File

@@ -20,6 +20,10 @@ jest.mock('hooks/useNotifications', () => ({
})),
}));
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
}));
describe('Should check if the edit alert channel is properly displayed ', () => {
beforeEach(() => {
render(<EditAlertChannels initialValue={editAlertChannelInitialValue} />);

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import React from 'react';
import { useTranslation } from 'react-i18next';
@@ -9,7 +10,20 @@ function MsTeams({ setSelectedConfig }: MsTeamsProps): JSX.Element {
return (
<>
<Form.Item name="webhook_url" label={t('field_webhook_url')}>
<Form.Item
name="webhook_url"
label={t('field_webhook_url')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_ms_teams_url')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
>
<Input
onChange={(event): void => {
setSelectedConfig((value) => ({

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import { useTranslation } from 'react-i18next';
import { OpsgenieChannel } from '../../CreateAlertChannels/config';
@@ -19,7 +20,21 @@ function OpsgenieForm({ setSelectedConfig }: OpsgenieFormProps): JSX.Element {
return (
<>
<Form.Item name="api_key" label={t('field_opsgenie_api_key')} required>
<Form.Item
name="api_key"
label={t('field_opsgenie_api_key')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_opsgenie_api_key')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
required
>
<Input
onChange={handleInputChange('api_key')}
data-testid="opsgenie-api-key-textbox"

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import { Dispatch, SetStateAction } from 'react';
import { useTranslation } from 'react-i18next';
@@ -10,7 +11,20 @@ function PagerForm({ setSelectedConfig }: PagerFormProps): JSX.Element {
const { t } = useTranslation('channels');
return (
<>
<Form.Item name="routing_key" label={t('field_pager_routing_key')} required>
<Form.Item
name="routing_key"
label={t('field_pager_routing_key')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_pager_routing_key')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
>
<Input
onChange={(event): void => {
setSelectedConfig((value) => ({

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import { Dispatch, SetStateAction } from 'react';
import { useTranslation } from 'react-i18next';
@@ -11,7 +12,20 @@ function Slack({ setSelectedConfig }: SlackProps): JSX.Element {
return (
<>
<Form.Item name="api_url" label={t('field_webhook_url')}>
<Form.Item
name="api_url"
label={t('field_webhook_url')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_slack_url')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
>
<Input
onChange={(event): void => {
setSelectedConfig((value) => ({

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import { Dispatch, SetStateAction } from 'react';
import { useTranslation } from 'react-i18next';
@@ -9,7 +10,20 @@ function WebhookSettings({ setSelectedConfig }: WebhookProps): JSX.Element {
return (
<>
<Form.Item name="api_url" label={t('field_webhook_url')}>
<Form.Item
name="api_url"
label={t('field_webhook_url')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_webhook_url')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
>
<Input
onChange={(event): void => {
setSelectedConfig((value) => ({

View File

@@ -1,6 +1,5 @@
import { Form, FormInstance, Input, Select, Switch, Typography } from 'antd';
import { Store } from 'antd/lib/form/interface';
import { FeatureKeys } from 'constants/features';
import ROUTES from 'constants/routes';
import {
ChannelType,
@@ -11,11 +10,8 @@ import {
WebhookChannel,
} from 'container/CreateAlertChannels/config';
import history from 'lib/history';
import { useAppContext } from 'providers/App/App';
import { Dispatch, ReactElement, SetStateAction } from 'react';
import { useTranslation } from 'react-i18next';
import { FeatureFlagProps } from 'types/api/features/getFeaturesFlags';
import { isFeatureKeys } from 'utils/app';
import EmailSettings from './Settings/Email';
import MsTeamsSettings from './Settings/MsTeams';
@@ -39,17 +35,6 @@ function FormAlertChannels({
editing = false,
}: FormAlertChannelsProps): JSX.Element {
const { t } = useTranslation('channels');
const { featureFlags } = useAppContext();
const feature = `ALERT_CHANNEL_${type.toUpperCase()}`;
const featureKey = isFeatureKeys(feature)
? feature
: FeatureKeys.ALERT_CHANNEL_SLACK;
const hasFeature = featureFlags?.find(
(flag: FeatureFlagProps) => flag.name === featureKey,
);
const renderSettings = (): ReactElement | null => {
switch (type) {
@@ -146,7 +131,7 @@ function FormAlertChannels({
<Form.Item>
<Button
disabled={savingState || !hasFeature}
disabled={savingState}
loading={savingState}
type="primary"
onClick={(): void => onSaveHandler(type)}
@@ -154,7 +139,7 @@ function FormAlertChannels({
{t('button_save_channel')}
</Button>
<Button
disabled={testingState || !hasFeature}
disabled={testingState}
loading={testingState}
onClick={(): void => onTestHandler(type)}
>

View File

@@ -467,10 +467,6 @@ function FormAlertRules({
panelType,
]);
const isAlertAvailable =
!featureFlags?.find((flag) => flag.name === FeatureKeys.QUERY_BUILDER_ALERTS)
?.active || false;
const saveRule = useCallback(async () => {
if (!isFormValid()) {
return;
@@ -688,11 +684,6 @@ function FormAlertRules({
const isAlertNameMissing = !formInstance.getFieldValue('alert');
const isAlertAvailableToSave =
isAlertAvailable &&
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
alertType !== AlertTypes.METRICS_BASED_ALERT;
const onUnitChangeHandler = (value: string): void => {
setYAxisUnit(value);
// reset target unit
@@ -865,7 +856,6 @@ function FormAlertRules({
icon={<SaveOutlined />}
disabled={
isAlertNameMissing ||
isAlertAvailableToSave ||
!isChannelConfigurationValid ||
queryStatus === 'error'
}

View File

@@ -5,7 +5,6 @@ import { WarningOutlined } from '@ant-design/icons';
import { Button, Flex, Modal, Space, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { FeatureKeys } from 'constants/features';
import { QueryParams } from 'constants/query';
import {
initialQueriesMap,
@@ -27,7 +26,6 @@ import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
import { Check, X } from 'lucide-react';
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';
import { useAppContext } from 'providers/App/App';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import {
getNextWidgets,
@@ -79,8 +77,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
const { t } = useTranslation(['dashboard']);
const { featureFlags } = useAppContext();
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
const {
@@ -566,12 +562,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const isQueryBuilderActive =
!featureFlags?.find((flag) => flag.name === FeatureKeys.QUERY_BUILDER_PANELS)
?.active || false;
const isNewTraceLogsAvailable =
isQueryBuilderActive &&
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
currentQuery.builder.queryData.find(
(query) => query.dataSource !== DataSource.METRICS,

View File

@@ -15,11 +15,12 @@ import {
import logEvent from 'api/common/logEvent';
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
import ROUTES from 'constants/routes';
import useDebouncedFn from 'hooks/useDebouncedFunction';
import history from 'lib/history';
import { isEmpty } from 'lodash-es';
import { ArrowRight, X } from 'lucide-react';
import { CheckIcon, Goal, UserPlus, X } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import React, { useEffect, useRef, useState } from 'react';
import React, { useCallback, useEffect, useRef, useState } from 'react';
import OnboardingIngestionDetails from '../IngestionDetails/IngestionDetails';
import InviteTeamMembers from '../InviteTeamMembers/InviteTeamMembers';
@@ -68,6 +69,7 @@ interface Entity {
};
};
tags: string[];
relatedSearchKeywords?: string[];
link?: string;
}
@@ -99,8 +101,11 @@ const ONBOARDING_V3_ANALYTICS_EVENTS_MAP = {
GET_EXPERT_ASSISTANCE_BUTTON_CLICKED: 'Get expert assistance clicked',
INVITE_TEAM_MEMBER_BUTTON_CLICKED: 'Invite team member clicked',
CLOSE_ONBOARDING_CLICKED: 'Close onboarding clicked',
DATA_SOURCE_REQUESTED: 'Datasource requested',
DATA_SOURCE_SEARCHED: 'Searched',
};
// eslint-disable-next-line sonarjs/cognitive-complexity
function OnboardingAddDataSource(): JSX.Element {
const [groupedDataSources, setGroupedDataSources] = useState<{
[tag: string]: Entity[];
@@ -110,6 +115,8 @@ function OnboardingAddDataSource(): JSX.Element {
const [setupStepItems, setSetupStepItems] = useState(setupStepItemsBase);
const [searchQuery, setSearchQuery] = useState<string>('');
const question2Ref = useRef<HTMLDivElement | null>(null);
const question3Ref = useRef<HTMLDivElement | null>(null);
const configureProdRef = useRef<HTMLDivElement | null>(null);
@@ -120,8 +127,15 @@ function OnboardingAddDataSource(): JSX.Element {
const [currentStep, setCurrentStep] = useState(1);
const [dataSourceRequest, setDataSourceRequest] = useState<string>('');
const [hasMoreQuestions, setHasMoreQuestions] = useState<boolean>(true);
const [
showRequestDataSourceModal,
setShowRequestDataSourceModal,
] = useState<boolean>(false);
const [
showInviteTeamMembersModal,
setShowInviteTeamMembersModal,
@@ -145,6 +159,11 @@ function OnboardingAddDataSource(): JSX.Element {
const [selectedCategory, setSelectedCategory] = useState<string>('All');
const [
dataSourceRequestSubmitted,
setDataSourceRequestSubmitted,
] = useState<boolean>(false);
const handleScrollToStep = (ref: React.RefObject<HTMLDivElement>): void => {
setTimeout(() => {
ref.current?.scrollIntoView({
@@ -286,8 +305,10 @@ function OnboardingAddDataSource(): JSX.Element {
setGroupedDataSources(groupedDataSources);
}, []);
const handleSearch = (e: React.ChangeEvent<HTMLInputElement>): void => {
const query = e.target.value.toLowerCase();
const debouncedUpdate = useDebouncedFn((query) => {
setSearchQuery(query as string);
setDataSourceRequestSubmitted(false);
if (query === '') {
setGroupedDataSources(
@@ -298,15 +319,35 @@ function OnboardingAddDataSource(): JSX.Element {
const filteredDataSources = onboardingConfigWithLinks.filter(
(dataSource) =>
dataSource.label.toLowerCase().includes(query) ||
dataSource.tags.some((tag) => tag.toLowerCase().includes(query)),
dataSource.label.toLowerCase().includes(query as string) ||
dataSource.tags.some((tag) =>
tag.toLowerCase().includes(query as string),
) ||
dataSource.relatedSearchKeywords?.some((keyword) =>
keyword?.toLowerCase().includes(query as string),
),
);
setGroupedDataSources(
groupDataSourcesByTags(filteredDataSources as Entity[]),
);
};
logEvent(
`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.DATA_SOURCE_SEARCHED}`,
{
searchedDataSource: query,
},
);
}, 300);
const handleSearch = useCallback(
(e: React.ChangeEvent<HTMLInputElement>): void => {
const query = e.target.value.trim().toLowerCase();
debouncedUpdate(query || '');
},
[debouncedUpdate],
);
const handleFilterByCategory = (category: string): void => {
setSelectedDataSource(null);
setSelectedFramework(null);
@@ -409,6 +450,129 @@ function OnboardingAddDataSource(): JSX.Element {
setShowInviteTeamMembersModal(true);
};
const handleSubmitDataSourceRequest = (): void => {
logEvent(
`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.DATA_SOURCE_REQUESTED}`,
{
requestedDataSource: dataSourceRequest,
},
);
setShowRequestDataSourceModal(false);
setDataSourceRequestSubmitted(true);
};
const handleRequestDataSource = (): void => {
setShowRequestDataSourceModal(true);
};
const handleRaiseRequest = (): void => {
logEvent(
`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.DATA_SOURCE_REQUESTED}`,
{
requestedDataSource: searchQuery,
},
);
setDataSourceRequestSubmitted(true);
};
const renderRequestDataSource = (): JSX.Element => {
const isSearchQueryEmpty = searchQuery.length === 0;
const isNoResultsFound = Object.keys(groupedDataSources).length === 0;
return (
<div className="request-data-source-container">
{!isNoResultsFound && (
<>
<Typography.Text>Cant find what youre looking for?</Typography.Text>
<svg
xmlns="http://www.w3.org/2000/svg"
width="279"
height="2"
viewBox="0 0 279 2"
fill="none"
>
<path
d="M0 1L279 1"
stroke="#7190F9"
strokeOpacity="0.2"
strokeDasharray="4 4"
/>
</svg>
{!dataSourceRequestSubmitted && (
<Button
type="default"
className="periscope-btn request-data-source-btn secondary"
icon={<Goal size={16} />}
onClick={handleRequestDataSource}
>
Request Data Source
</Button>
)}
{dataSourceRequestSubmitted && (
<Button
type="default"
className="periscope-btn request-data-source-btn success"
icon={<CheckIcon size={16} />}
>
Request raised
</Button>
)}
</>
)}
{isNoResultsFound && !isSearchQueryEmpty && (
<>
<Typography.Text>
Our team can help add{' '}
<span className="request-data-source-search-query">{searchQuery}</span>{' '}
support for you
</Typography.Text>
<svg
xmlns="http://www.w3.org/2000/svg"
width="279"
height="2"
viewBox="0 0 279 2"
fill="none"
>
<path
d="M0 1L279 1"
stroke="#7190F9"
strokeOpacity="0.2"
strokeDasharray="4 4"
/>
</svg>
{!dataSourceRequestSubmitted && (
<Button
type="default"
className="periscope-btn request-data-source-btn secondary"
icon={<Goal size={16} />}
onClick={handleRaiseRequest}
>
Raise request
</Button>
)}
{dataSourceRequestSubmitted && (
<Button
type="default"
className="periscope-btn request-data-source-btn success"
icon={<CheckIcon size={16} />}
>
Request raised
</Button>
)}
</>
)}
</div>
);
};
return (
<div className="onboarding-v2">
<Layout>
@@ -433,6 +597,15 @@ function OnboardingAddDataSource(): JSX.Element {
</div>
<div className="header-right-section">
<Button
type="default"
className="periscope-btn invite-teammate-btn outlined"
onClick={handleShowInviteTeamMembersModal}
icon={<UserPlus size={16} />}
>
Invite a teammate
</Button>
<LaunchChatSupport
attributes={{
dataSource: selectedDataSource?.dataSource,
@@ -442,7 +615,7 @@ function OnboardingAddDataSource(): JSX.Element {
}}
eventName={`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.GET_HELP_BUTTON_CLICKED}`}
message=""
buttonText="Get Help"
buttonText="Contact Support"
className="periscope-btn get-help-btn outlined"
/>
</div>
@@ -461,7 +634,11 @@ function OnboardingAddDataSource(): JSX.Element {
</Header>
<div className="onboarding-product-setup-container">
<div className="onboarding-product-setup-container_left-section">
<div
className={`onboarding-product-setup-container_left-section ${
currentStep === 1 ? 'step-id-1' : 'step-id-2'
}`}
>
<div className="perlian-bg" />
{currentStep === 1 && (
@@ -491,6 +668,7 @@ function OnboardingAddDataSource(): JSX.Element {
<div className="onboarding-data-source-search">
<Input
placeholder="Search"
maxLength={20}
onChange={handleSearch}
addonAfter={<SearchOutlined />}
/>
@@ -525,6 +703,14 @@ function OnboardingAddDataSource(): JSX.Element {
</div>
</div>
))}
{Object.keys(groupedDataSources).length === 0 && (
<div className="no-results-found-container">
<Typography.Text>No results for {searchQuery} :/</Typography.Text>
</div>
)}
{!selectedDataSource && renderRequestDataSource()}
</div>
<div className="data-source-categories-filter-container">
@@ -534,33 +720,66 @@ function OnboardingAddDataSource(): JSX.Element {
Filters{' '}
</Typography.Title>
<Typography.Title
level={5}
className={`onboarding-filters-item-title ${
selectedCategory === 'All' ? 'selected' : ''
}`}
<div
key="all"
className="onboarding-data-source-category-item"
onClick={(): void => handleFilterByCategory('All')}
role="button"
tabIndex={0}
onKeyDown={(e): void => {
if (e.key === 'Enter' || e.key === ' ') {
handleFilterByCategory('All');
}
}}
>
All ({onboardingConfigWithLinks.length})
</Typography.Title>
<Typography.Title
level={5}
className={`onboarding-filters-item-title ${
selectedCategory === 'All' ? 'selected' : ''
}`}
>
All
</Typography.Title>
<div className="line-divider" />
<Typography.Text className="onboarding-filters-item-count">
{onboardingConfigWithLinks.length}
</Typography.Text>
</div>
{Object.keys(groupedDataSources).map((tag) => (
<div key={tag} className="onboarding-data-source-category-item">
<div
key={tag}
className="onboarding-data-source-category-item"
onClick={(): void => handleFilterByCategory(tag)}
role="button"
tabIndex={0}
onKeyDown={(e): void => {
if (e.key === 'Enter' || e.key === ' ') {
handleFilterByCategory(tag);
}
}}
>
<Typography.Title
level={5}
className={`onboarding-filters-item-title ${
selectedCategory === tag ? 'selected' : ''
}`}
onClick={(): void => handleFilterByCategory(tag)}
>
{tag} ({groupedDataSources[tag].length})
{tag}
</Typography.Title>
<div className="line-divider" />
<Typography.Text className="onboarding-filters-item-count">
{groupedDataSources[tag].length}
</Typography.Text>
</div>
))}
</div>
</div>
</div>
{selectedDataSource &&
selectedDataSource?.question &&
!isEmpty(selectedDataSource?.question) && (
@@ -615,7 +834,6 @@ function OnboardingAddDataSource(): JSX.Element {
)}
</div>
)}
{selectedFramework &&
selectedFramework?.question &&
!isEmpty(selectedFramework?.question) && (
@@ -659,7 +877,6 @@ function OnboardingAddDataSource(): JSX.Element {
)}
</div>
)}
{!hasMoreQuestions && showConfigureProduct && (
<div className="questionaire-footer" ref={configureProdRef}>
<Button
@@ -767,39 +984,6 @@ function OnboardingAddDataSource(): JSX.Element {
</div>
<div className="onboarding-product-setup-container_right-section">
{currentStep === 1 && (
<div className="invite-user-section-content">
<Button
type="default"
shape="round"
className="invite-user-section-content-button"
onClick={handleShowInviteTeamMembersModal}
>
Invite a team member to help with this step
<ArrowRight size={14} />
</Button>
<div className="need-help-section-content-divider">Or</div>
<div className="need-help-section-content">
<Typography.Text>
Need help with setup? Upgrade now and get expert assistance.
</Typography.Text>
<LaunchChatSupport
attributes={{
dataSource: selectedDataSource?.dataSource,
framework: selectedFramework?.label,
environment: selectedEnvironment?.label,
currentPage: setupStepItems[currentStep]?.title || '',
}}
eventName={`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.GET_EXPERT_ASSISTANCE_BUTTON_CLICKED}`}
message=""
buttonText="Get Expert Assistance"
className="periscope-btn get-help-btn rounded-btn outlined"
/>
</div>
</div>
)}
{currentStep === 2 && <OnboardingIngestionDetails />}
</div>
</div>
@@ -824,6 +1008,46 @@ function OnboardingAddDataSource(): JSX.Element {
/>
</div>
</Modal>
<Modal
className="request-data-source-modal"
title={<span className="title">Request Data Source</span>}
open={showRequestDataSourceModal}
closable
onCancel={(): void => setShowRequestDataSourceModal(false)}
width="640px"
footer={[
<Button
type="default"
className="periscope-btn outlined"
key="back"
onClick={(): void => setShowRequestDataSourceModal(false)}
icon={<X size={16} />}
>
Cancel
</Button>,
<Button
key="submit"
type="primary"
className="periscope-btn primary"
disabled={dataSourceRequest.length <= 0}
onClick={handleSubmitDataSourceRequest}
icon={<CheckIcon size={16} />}
>
Submit request
</Button>,
]}
destroyOnClose
>
<div className="request-data-source-modal-content">
<Typography.Text>Enter your request</Typography.Text>
<Input
placeholder="Eg: Kotlin"
className="request-data-source-modal-input"
onChange={(e): void => setDataSourceRequest(e.target.value)}
/>
</div>
</Modal>
</Layout>
</div>
);

View File

@@ -4,14 +4,15 @@
&__header {
background: rgba(11, 12, 14, 0.7);
border-bottom: 1px solid var(--bg-slate-500);
backdrop-filter: blur(20px);
padding: 16px 0px 0px 0px;
padding: 12px 0px;
&--sticky {
display: flex;
align-items: center;
padding: 0px 1rem;
// margin-top: 16px;
margin-top: 12px;
background: rgba(11, 12, 14, 0.7);
backdrop-filter: blur(20px);
@@ -323,7 +324,8 @@
}
}
.get-help-btn {
.get-help-btn,
.invite-teammate-btn {
font-size: 11px;
padding: 6px 16px;
border: 1px solid var(--bg-slate-400) !important;
@@ -610,15 +612,61 @@
display: flex;
.data-sources-container {
flex: 0 0 70%;
max-width: 70%;
flex: 0 0 80%;
max-width: 80%;
margin-right: 32px;
}
.data-source-categories-filter-container {
flex: 0 0 30%;
max-width: 30%;
flex: 0 0 20%;
max-width: 20%;
.onboarding-data-source-category {
.onboarding-data-source-category-item {
display: flex;
align-items: center;
justify-content: space-between;
margin-bottom: 8px;
cursor: pointer;
}
.onboarding-filters-item-title {
display: flex;
align-items: center;
justify-content: space-between;
margin-bottom: 0px !important;
}
.line-divider {
height: 1px;
margin: 0 16px;
flex-grow: 1;
border-top: 2px dotted var(--bg-slate-400);
}
.onboarding-filters-item-count {
color: var(--text-vanilla-400);
font-family: Inter;
font-size: 10px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 150% */
background-color: var(--bg-ink-400);
border-radius: 4px;
width: 24px;
height: 24px;
border-radius: 50%;
display: flex;
align-items: center;
justify-content: center;
}
}
}
}
@@ -643,8 +691,14 @@
max-width: 70%;
display: flex;
flex-direction: column;
gap: 24px;
&.step-id-1 {
flex: 0 0 90%;
max-width: 90%;
}
// border-right: 1px solid var(--Greyscale-Slate-400, #1d212d);
.perlian-bg {
@@ -678,7 +732,7 @@
&_right-section {
flex: 1;
max-width: 30%;
height: calc(100vh - 120px);
height: calc(100vh - 130px);
display: flex;
flex-direction: column;
@@ -972,6 +1026,52 @@
}
}
.no-results-found-container {
.ant-typography {
color: rgba(192, 193, 195, 0.6);
font-family: Inter;
font-size: 11px;
font-style: normal;
line-height: 18px; /* 150% */
letter-spacing: 0.48px;
text-transform: uppercase;
}
}
.request-data-source-container {
display: flex;
align-items: center;
gap: 16px;
margin: 36px 0;
display: flex;
width: fit-content;
gap: 24px;
padding: 12px 12px 12px 16px;
border-radius: 6px;
background: rgba(171, 189, 255, 0.06);
.request-data-source-search-query {
border-radius: 2px;
border: 1px solid rgba(173, 127, 88, 0.1);
background: rgba(173, 127, 88, 0.1);
color: var(--Sienna-400, #bd9979);
font-size: 13px;
padding: 2px;
line-height: 20px; /* 142.857% */
}
.request-data-source-btn {
border-radius: 2px;
border: 1px solid var(--Slate-200, #2c3140);
background: var(--Ink-200, #23262e);
}
}
.onboarding-data-source-category-container {
flex: 1;
max-width: 30%;
@@ -996,7 +1096,7 @@
}
.onboarding-configure-container {
height: calc(100vh - 120px);
height: calc(100vh - 130px);
width: 100%;
display: flex;
flex-direction: column;
@@ -1070,7 +1170,8 @@
height: 18px;
}
.invite-team-member-modal {
.invite-team-member-modal,
.request-data-source-modal {
.ant-modal-content {
background-color: var(--bg-ink-500);
}
@@ -1079,9 +1180,26 @@
background-color: var(--bg-ink-500);
}
.invite-team-member-modal-content {
.invite-team-member-modal-content,
.request-data-source-modal-content {
background-color: var(--bg-ink-500);
}
.request-data-source-modal-content {
padding: 12px 0;
}
.request-data-source-modal-input {
margin-top: 8px;
}
}
.request-data-source-modal {
.ant-modal-footer {
display: flex;
justify-content: flex-end;
align-items: center;
}
}
.ingestion-setup-details-links {
@@ -1262,7 +1380,8 @@
}
.onboarding-v2 {
.get-help-btn {
.get-help-btn,
.invite-teammate-btn {
border: 1px solid var(--bg-vanilla-300) !important;
color: var(--bg-ink-300) !important;

View File

@@ -13,11 +13,7 @@ function OrganizationSettings(): JSX.Element {
const isNotSSO =
!featureFlags?.find((flag) => flag.name === FeatureKeys.SSO)?.active || false;
const isNoUpSell =
!featureFlags?.find((flag) => flag.name === FeatureKeys.DISABLE_UPSELL)
?.active || false;
const isAuthDomain = !isNoUpSell || (isNoUpSell && !isNotSSO);
const isAuthDomain = !isNotSSO;
if (!org) {
return <div />;

View File

@@ -453,7 +453,7 @@ export const Query = memo(function Query({
</Col>
)}
<Col flex="1" className="qb-search-container">
{[DataSource.LOGS, DataSource.TRACES].includes(query.dataSource) ? (
{query.dataSource === DataSource.LOGS ? (
<QueryBuilderSearchV2
query={query}
onChange={handleChangeTagFilters}

View File

@@ -2,7 +2,6 @@
import './QueryBuilderSearchV2.styles.scss';
import { Typography } from 'antd';
import cx from 'classnames';
import {
ArrowDown,
ArrowUp,
@@ -26,7 +25,6 @@ interface ICustomDropdownProps {
exampleQueries: TagFilter[];
onChange: (value: TagFilter) => void;
currentFilterItem?: ITag;
isLogsDataSource: boolean;
}
export default function QueryBuilderSearchDropdown(
@@ -40,14 +38,11 @@ export default function QueryBuilderSearchDropdown(
exampleQueries,
options,
onChange,
isLogsDataSource,
} = props;
const userOs = getUserOperatingSystem();
return (
<>
<div
className={cx('content', { 'non-logs-data-source': !isLogsDataSource })}
>
<div className="content">
{!currentFilterItem?.key ? (
<div className="suggested-filters">Suggested Filters</div>
) : !currentFilterItem?.op ? (

View File

@@ -11,11 +11,6 @@
.rc-virtual-list-holder {
height: 115px;
}
&.non-logs-data-source {
.rc-virtual-list-holder {
height: 256px;
}
}
}
}

View File

@@ -689,29 +689,12 @@ function QueryBuilderSearchV2(
})),
);
} else {
setDropdownOptions([
// Add user typed option if it doesn't exist in the payload
...(!isEmpty(tagKey) &&
!data?.payload?.attributeKeys?.some((val) => isEqual(val.key, tagKey))
? [
{
label: tagKey,
value: {
key: tagKey,
dataType: DataTypes.EMPTY,
type: '',
isColumn: false,
isJSON: false,
},
},
]
: []),
// Map existing attribute keys from payload
...(data?.payload?.attributeKeys?.map((key) => ({
setDropdownOptions(
data?.payload?.attributeKeys?.map((key) => ({
label: key.key,
value: key,
})) || []),
]);
})) || [],
);
}
}
if (currentState === DropdownState.OPERATOR) {
@@ -981,7 +964,6 @@ function QueryBuilderSearchV2(
exampleQueries={suggestionsData?.payload?.example_queries || []}
tags={tags}
currentFilterItem={currentFilterItem}
isLogsDataSource={isLogsDataSource}
/>
)}
>

View File

@@ -284,16 +284,6 @@ function SideNav(): JSX.Element {
manageLicenseMenuItem,
];
const isApiMonitoringEnabled = featureFlags?.find(
(flag) => flag.name === FeatureKeys.THIRD_PARTY_API,
)?.active;
if (!isApiMonitoringEnabled) {
updatedMenuItems = updatedMenuItems.filter(
(item) => item.key !== ROUTES.API_MONITORING,
);
}
if (isCloudUser || isEnterpriseSelfHostedUser) {
const isOnboardingEnabled =
featureFlags?.find((feature) => feature.name === FeatureKeys.ONBOARDING)

View File

@@ -170,7 +170,11 @@ export const useOptions = (
(option, index, self) =>
index ===
self.findIndex(
(o) => o.label === option.label && o.value === option.value, // to remove duplicate & empty options from list
(o) =>
// to remove duplicate & empty options from list
o.label === option.label &&
o.value === option.value &&
o.dataType?.toLowerCase() === option.dataType?.toLowerCase(), // handle case sensitivity
) && option.value !== '',
) || []
).map((option) => {

View File

@@ -1,9 +1,9 @@
.header {
.traces-funnels-header {
display: flex;
flex-direction: column;
gap: 4px;
&__title {
.traces-funnels-header-title {
color: var(--bg-vanilla-100);
font-size: 18px;
font-style: normal;
@@ -13,7 +13,7 @@
margin: 0;
}
&__subtitle {
.traces-funnels-header-subtitle {
color: var(--bg-vanilla-400);
font-size: 14px;
line-height: 20px;
@@ -21,13 +21,13 @@
}
.lightMode {
.header {
&__title {
.traces-funnels-header {
.traces-funnels-header-title {
color: var(--bg-ink-500);
}
&__subtitle {
.traces-funnels-header-subtitle {
color: var(--bg-ink-400);
}
}
}
}

View File

@@ -1,8 +1,10 @@
function Header(): JSX.Element {
return (
<div className="header">
<div className="header__title">Funnels</div>
<div className="header__subtitle">Create and manage tracing funnels.</div>
<div className="traces-funnels-header">
<div className="traces-funnels-header-title">Funnels</div>
<div className="traces-funnels-header-subtitle">
Create and manage tracing funnels.
</div>
</div>
);
}

View File

@@ -2,24 +2,17 @@ import './TracesModulePage.styles.scss';
import RouteTab from 'components/RouteTab';
import { TabRoutes } from 'components/RouteTab/types';
import { FeatureKeys } from 'constants/features';
import history from 'lib/history';
import { useAppContext } from 'providers/App/App';
import { useLocation } from 'react-router-dom';
import { tracesExplorer, tracesFunnel, tracesSaveView } from './constants';
function TracesModulePage(): JSX.Element {
const { pathname } = useLocation();
const { featureFlags } = useAppContext();
const isTraceFunnelsEnabled =
featureFlags?.find((flag) => flag.name === FeatureKeys.TRACE_FUNNELS)
?.active || false;
const routes: TabRoutes[] = [
tracesExplorer,
isTraceFunnelsEnabled ? tracesFunnel : null,
process.env.NODE_ENV === 'development' ? tracesFunnel : null,
tracesSaveView,
].filter(Boolean) as TabRoutes[];

View File

@@ -45,6 +45,13 @@
font-size: 11px;
font-weight: 400;
}
&.success {
color: var(--bg-forest-400) !important;
border-radius: 2px;
border: 1px solid rgba(37, 225, 146, 0.1);
background: rgba(37, 225, 146, 0.1) !important;
}
}
.periscope-tab {

View File

@@ -21,6 +21,7 @@ import { useQuery } from 'react-query';
import { FeatureFlagProps as FeatureFlags } from 'types/api/features/getFeaturesFlags';
import { PayloadProps as LicensesResModel } from 'types/api/licenses/getAll';
import {
LicensePlatform,
LicenseState,
LicenseV3ResModel,
TrialInfo,
@@ -145,7 +146,8 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
).unix(),
onTrial: isOnTrial,
workSpaceBlock:
activeLicenseV3Data.payload.state === LicenseState.EVALUATION_EXPIRED,
activeLicenseV3Data.payload.state === LicenseState.EVALUATION_EXPIRED &&
activeLicenseV3Data.payload.platform === LicensePlatform.CLOUD,
trialConvertedToSubscription:
activeLicenseV3Data.payload.state !== LicenseState.ISSUED &&
activeLicenseV3Data.payload.state !== LicenseState.EVALUATING &&

View File

@@ -186,83 +186,6 @@ export function getAppContextMock(
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.OSS,
active: false,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.DISABLE_UPSELL,
active: false,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.SMART_TRACE_DETAIL,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.CUSTOM_METRICS_FUNCTION,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.QUERY_BUILDER_PANELS,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.QUERY_BUILDER_ALERTS,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.ALERT_CHANNEL_SLACK,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.ALERT_CHANNEL_WEBHOOK,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.ALERT_CHANNEL_PAGERDUTY,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.ALERT_CHANNEL_OPSGENIE,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.ALERT_CHANNEL_MSTEAMS,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.USE_SPAN_METRICS,
active: false,
@@ -291,20 +214,6 @@ export function getAppContextMock(
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.DurationSort,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.TimestampSort,
active: true,
usage: 0,
usage_limit: -1,
route: '',
},
{
name: FeatureKeys.ONBOARDING,
active: true,

9
go.mod
View File

@@ -22,12 +22,13 @@ require (
github.com/go-redis/redismock/v8 v8.11.5
github.com/go-viper/mapstructure/v2 v2.1.0
github.com/gojek/heimdall/v7 v7.0.3
github.com/golang-jwt/jwt/v5 v5.2.1
github.com/golang-jwt/jwt/v5 v5.2.2
github.com/google/uuid v1.6.0
github.com/gorilla/handlers v1.5.1
github.com/gorilla/mux v1.8.1
github.com/gorilla/websocket v1.5.0
github.com/gosimple/slug v1.10.0
github.com/huandu/go-sqlbuilder v1.35.0
github.com/jackc/pgx/v5 v5.7.2
github.com/jmoiron/sqlx v1.3.4
github.com/json-iterator/go v1.1.12
@@ -77,7 +78,6 @@ require (
gopkg.in/segmentio/analytics-go.v3 v3.1.0
gopkg.in/yaml.v2 v2.4.0
gopkg.in/yaml.v3 v3.0.1
honnef.co/go/tools v0.0.1-2020.1.4
k8s.io/apimachinery v0.31.3
)
@@ -89,10 +89,10 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
github.com/BurntSushi/toml v0.3.1 // indirect
github.com/ClickHouse/ch-go v0.61.5 // indirect
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
github.com/andybalholm/brotli v1.1.1 // indirect
github.com/antlr4-go/antlr/v4 v4.13.1 // indirect
github.com/armon/go-metrics v0.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/aws/aws-sdk-go v1.55.5 // indirect
@@ -110,7 +110,7 @@ require (
github.com/ebitengine/purego v0.8.0 // indirect
github.com/edsrzf/mmap-go v1.2.0 // indirect
github.com/elastic/lunes v0.1.0 // indirect
github.com/expr-lang/expr v1.16.9 // indirect
github.com/expr-lang/expr v1.17.0 // indirect
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fsnotify/fsnotify v1.8.0 // indirect
@@ -152,6 +152,7 @@ require (
github.com/hashicorp/golang-lru v1.0.2 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hashicorp/memberlist v0.5.1 // indirect
github.com/huandu/xstrings v1.4.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect

18
go.sum
View File

@@ -83,7 +83,6 @@ github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJ
github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/ClickHouse/ch-go v0.61.5 h1:zwR8QbYI0tsMiEcze/uIMK+Tz1D3XZXLdNrlaOpeEI4=
@@ -114,6 +113,8 @@ github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b/go.mod h1:fvzegU4
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ=
github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw=
github.com/antonmedv/expr v1.15.3 h1:q3hOJZNvLvhqE8OHBs1cFRdbXFNKuA+bHmRaI+AmRmI=
github.com/antonmedv/expr v1.15.3/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
@@ -232,8 +233,8 @@ github.com/envoyproxy/go-control-plane v0.13.1/go.mod h1:X45hY0mufo6Fd0KW3rqsGvQ
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/envoyproxy/protoc-gen-validate v1.1.0 h1:tntQDh69XqOCOZsDz0lVJQez/2L6Uu2PdjCQwWCJ3bM=
github.com/envoyproxy/protoc-gen-validate v1.1.0/go.mod h1:sXRDRVmzEbkM7CVcM06s9shE/m23dg3wzjl0UWqJ2q4=
github.com/expr-lang/expr v1.16.9 h1:WUAzmR0JNI9JCiF0/ewwHB1gmcGw5wW7nWt8gc6PpCI=
github.com/expr-lang/expr v1.16.9/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4=
github.com/expr-lang/expr v1.17.0 h1:+vpszOyzKLQXC9VF+wA8cVA0tlA984/Wabc/1hF9Whg=
github.com/expr-lang/expr v1.17.0/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4=
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb h1:IT4JYU7k4ikYg1SCxNI1/Tieq/NFvh6dzLdgi7eu0tM=
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb/go.mod h1:bH6Xx7IW64qjjJq8M2u4dxNaBiDfKK+z/3eGDpXEQhc=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
@@ -334,8 +335,8 @@ github.com/gojek/heimdall/v7 v7.0.3 h1:+5sAhl8S0m+qRRL8IVeHCJudFh/XkG3wyO++nvOg+
github.com/gojek/heimdall/v7 v7.0.3/go.mod h1:Z43HtMid7ysSjmsedPTXAki6jcdcNVnjn5pmsTyiMic=
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf h1:5xRGbUdOmZKoDXkGx5evVLehuCMpuO1hl701bEQqXOM=
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf/go.mod h1:QzhUKaYKJmcbTnCYCAVQrroCOY7vOOI8cSQ4NbuhYf0=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
@@ -538,6 +539,12 @@ github.com/hetznercloud/hcloud-go/v2 v2.13.1/go.mod h1:dhix40Br3fDiBhwaSG/zgaYOF
github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs=
github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/huandu/go-assert v1.1.6 h1:oaAfYxq9KNDi9qswn/6aE0EydfxSa+tWZC1KabNitYs=
github.com/huandu/go-assert v1.1.6/go.mod h1:JuIfbmYG9ykwvuxoJ3V8TB5QP+3+ajIA54Y44TmkMxs=
github.com/huandu/go-sqlbuilder v1.35.0 h1:ESvxFHN8vxCTudY1Vq63zYpU5yJBESn19sf6k4v2T5Q=
github.com/huandu/go-sqlbuilder v1.35.0/go.mod h1:mS0GAtrtW+XL6nM2/gXHRJax2RwSW1TraavWDFAc1JA=
github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU=
github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
@@ -1653,7 +1660,6 @@ honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4 h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
k8s.io/api v0.31.3 h1:umzm5o8lFbdN/hIXbrK9oRpOproJO62CV1zqxXrLgk8=
k8s.io/api v0.31.3/go.mod h1:UJrkIp9pnMOI9K2nlL6vwpxRzzEX5sWgn8kGQe92kCE=

221
grammar/FilterQuery.g4 Normal file
View File

@@ -0,0 +1,221 @@
grammar FilterQuery;
/*
* Parser Rules
*/
query
: expression
EOF
;
// Expression with standard boolean precedence:
// - parentheses > NOT > AND > OR
// - consecutive expressions with no AND/OR => implicit AND
expression
: orExpression
;
// OR expressions
orExpression
: andExpression ( OR andExpression )*
;
// AND expressions + optional chaining with implicit AND if no OR is present
andExpression
: unaryExpression ( AND unaryExpression | unaryExpression )*
;
// A unary expression handles optional NOT
unaryExpression
: NOT? primary
;
// Primary constructs: grouped expressions, a comparison (key op value),
// a function call, or a full-text string
primary
: LPAREN orExpression RPAREN
| comparison
| functionCall
| fullText
| key
;
/*
* Comparison-like filters
*
* Includes all operators: =, !=, <>, <, <=, >, >=, [NOT] LIKE, [NOT] ILIKE,
* [NOT] BETWEEN, [NOT] IN, [NOT] EXISTS, [NOT] REGEXP, [NOT] CONTAINS, etc.
*/
comparison
: key EQUALS value
| key (NOT_EQUALS | NEQ) value
| key LT value
| key LE value
| key GT value
| key GE value
| key (LIKE | ILIKE) value
| key (NOT_LIKE | NOT_ILIKE) value
| key BETWEEN value AND value
| key NOT BETWEEN value AND value
| key inClause
| key notInClause
| key EXISTS
| key NOT EXISTS
| key REGEXP value
| key NOT REGEXP value
| key CONTAINS value
| key NOT CONTAINS value
;
// in(...) or in[...]
inClause
: IN LPAREN valueList RPAREN
| IN LBRACK valueList RBRACK
;
notInClause
: NOT IN LPAREN valueList RPAREN
| NOT IN LBRACK valueList RBRACK
;
// List of values for in(...) or in[...]
valueList
: value ( COMMA value )*
;
// Full-text search: a standalone quoted string is allowed as a "primary"
// e.g. `"Waiting for response" http.status_code=200`
fullText
: QUOTED_TEXT
| FREETEXT
;
/*
* Function calls like:
* has(payload.user_ids, 123)
* hasAny(payload.user_ids, [123, 456])
* ...
*/
functionCall
: (HAS | HASANY | HASALL | HASNONE) LPAREN functionParamList RPAREN
;
// Function parameters can be keys, single scalar values, or arrays
functionParamList
: functionParam ( COMMA functionParam )*
;
functionParam
: key
| value
| array
;
// An array: [ item1, item2, item3 ]
array
: LBRACK valueList RBRACK
;
/*
* A 'value' can be a string literal (double or single-quoted),
// a numeric literal, boolean, or a "bare" token as needed.
*/
value
: QUOTED_TEXT
| NUMBER
| BOOL
| KEY
;
/*
* A key can include letters, digits, underscores, dots, brackets
* E.g. service.name, query_log.query_duration_ms, proto.user_objects[].name
*/
key
: KEY
;
/*
* Lexer Rules
*/
// Common punctuation / symbols
LPAREN : '(' ;
RPAREN : ')' ;
LBRACK : '[' ;
RBRACK : ']' ;
COMMA : ',' ;
EQUALS : '=' | '==' ;
NOT_EQUALS : '!=' ;
NEQ : '<>' ; // alternate not-equals operator
LT : '<' ;
LE : '<=' ;
GT : '>' ;
GE : '>=' ;
// Operators that are made of multiple keywords
LIKE : [Ll][Ii][Kk][Ee] ;
NOT_LIKE : [Nn][Oo][Tt] [ \t]+ [Ll][Ii][Kk][Ee] ;
ILIKE : [Ii][Ll][Ii][Kk][Ee] ;
NOT_ILIKE : [Nn][Oo][Tt] [ \t]+ [Ii][Ll][Ii][Kk][Ee] ;
BETWEEN : [Bb][Ee][Tt][Ww][Ee][Ee][Nn] ;
EXISTS : [Ee][Xx][Ii][Ss][Tt][Ss]? ;
REGEXP : [Rr][Ee][Gg][Ee][Xx][Pp] ;
CONTAINS : [Cc][Oo][Nn][Tt][Aa][Ii][Nn][Ss]? ;
IN : [Ii][Nn] ;
// Boolean logic
NOT : [Nn][Oo][Tt] ;
AND : [Aa][Nn][Dd] ;
OR : [Oo][Rr] ;
// For easy referencing in function calls
HAS : [Hh][Aa][Ss] ;
HASANY : [Hh][Aa][Ss][Aa][Nn][Yy] ;
HASALL : [Hh][Aa][Ss][Aa][Ll][Ll] ;
HASNONE : [Hh][Aa][Ss][Nn][Oo][Nn][Ee] ;
// Potential boolean constants
BOOL
: [Tt][Rr][Uu][Ee]
| [Ff][Aa][Ll][Ss][Ee]
;
// Numbers (integer or float). Adjust as needed for your domain.
NUMBER
: DIGIT+ ( '.' DIGIT+ )?
;
// Double/single-quoted text, capturing full text search strings, values, etc.
QUOTED_TEXT
: ( '"' ( ~["\\] | '\\' . )* '"' // double-quoted
| '\'' ( ~['\\] | '\\' . )* '\'' // single-quoted
)
;
// Keys can have letters, digits, underscores, dots, and bracket pairs
// e.g. service.name, service.namespace, db.queries[].query_duration
KEY
: [a-zA-Z0-9_] [a-zA-Z0-9_.[\]]*
;
// Ignore whitespace
WS
: [ \t\r\n]+ -> skip
;
// Digits used by NUMBER
fragment DIGIT
: [0-9]
;
FREETEXT : (~[ \t\r\n=()'"<>![\]])+ ;

View File

@@ -6,6 +6,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
var (
@@ -33,16 +34,16 @@ type Alertmanager interface {
ListAllChannels(context.Context) ([]*alertmanagertypes.Channel, error)
// GetChannelByID gets a channel for the organization.
GetChannelByID(context.Context, string, int) (*alertmanagertypes.Channel, error)
GetChannelByID(context.Context, string, valuer.UUID) (*alertmanagertypes.Channel, error)
// UpdateChannel updates a channel for the organization.
UpdateChannelByReceiverAndID(context.Context, string, alertmanagertypes.Receiver, int) error
UpdateChannelByReceiverAndID(context.Context, string, alertmanagertypes.Receiver, valuer.UUID) error
// CreateChannel creates a channel for the organization.
CreateChannel(context.Context, string, alertmanagertypes.Receiver) error
// DeleteChannelByID deletes a channel for the organization.
DeleteChannelByID(context.Context, string, int) error
DeleteChannelByID(context.Context, string, valuer.UUID) error
// SetConfig sets the config for the organization.
SetConfig(context.Context, *alertmanagertypes.Config) error

View File

@@ -8,6 +8,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/tidwall/gjson"
"github.com/uptrace/bun"
)
@@ -99,7 +100,7 @@ func (store *config) CreateChannel(ctx context.Context, channel *alertmanagertyp
}, opts...)
}
func (store *config) GetChannelByID(ctx context.Context, orgID string, id int) (*alertmanagertypes.Channel, error) {
func (store *config) GetChannelByID(ctx context.Context, orgID string, id valuer.UUID) (*alertmanagertypes.Channel, error) {
channel := new(alertmanagertypes.Channel)
err := store.
@@ -108,11 +109,11 @@ func (store *config) GetChannelByID(ctx context.Context, orgID string, id int) (
NewSelect().
Model(channel).
Where("org_id = ?", orgID).
Where("id = ?", id).
Where("id = ?", id.StringValue()).
Scan(ctx)
if err != nil {
if err == sql.ErrNoRows {
return nil, errors.Newf(errors.TypeNotFound, alertmanagertypes.ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %d", id)
return nil, errors.Newf(errors.TypeNotFound, alertmanagertypes.ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %s", id.StringValue())
}
return nil, err
}
@@ -136,7 +137,7 @@ func (store *config) UpdateChannel(ctx context.Context, orgID string, channel *a
}, opts...)
}
func (store *config) DeleteChannelByID(ctx context.Context, orgID string, id int, opts ...alertmanagertypes.StoreOption) error {
func (store *config) DeleteChannelByID(ctx context.Context, orgID string, id valuer.UUID, opts ...alertmanagertypes.StoreOption) error {
return store.wrap(ctx, func(ctx context.Context) error {
channel := new(alertmanagertypes.Channel)
@@ -146,7 +147,7 @@ func (store *config) DeleteChannelByID(ctx context.Context, orgID string, id int
NewDelete().
Model(channel).
Where("org_id = ?", orgID).
Where("id = ?", id).
Where("id = ?", id.StringValue()).
Exec(ctx); err != nil {
return err
}

View File

@@ -4,13 +4,13 @@ import (
"context"
"io"
"net/http"
"strconv"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
)
@@ -140,9 +140,9 @@ func (api *API) GetChannelByID(rw http.ResponseWriter, req *http.Request) {
return
}
id, err := strconv.Atoi(idString)
id, err := valuer.NewUUID(idString)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid integer"))
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
@@ -177,9 +177,9 @@ func (api *API) UpdateChannelByID(rw http.ResponseWriter, req *http.Request) {
return
}
id, err := strconv.Atoi(idString)
id, err := valuer.NewUUID(idString)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid integer"))
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
@@ -227,9 +227,9 @@ func (api *API) DeleteChannelByID(rw http.ResponseWriter, req *http.Request) {
return
}
id, err := strconv.Atoi(idString)
id, err := valuer.NewUUID(idString)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid integer"))
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}

View File

@@ -16,6 +16,7 @@ import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/tidwall/gjson"
)
@@ -24,6 +25,25 @@ type postableAlert struct {
Receivers []string `json:"receivers"`
}
func (pa *postableAlert) MarshalJSON() ([]byte, error) {
// Marshal the embedded PostableAlert to get its JSON representation.
alertJSON, err := json.Marshal(pa.PostableAlert)
if err != nil {
return nil, err
}
// Unmarshal that JSON into a map so we can add extra fields.
var m map[string]interface{}
if err := json.Unmarshal(alertJSON, &m); err != nil {
return nil, err
}
// Add the Receivers field.
m["receivers"] = pa.Receivers
return json.Marshal(m)
}
const (
alertsPath string = "/v1/alerts"
routesPath string = "/v1/routes"
@@ -269,11 +289,11 @@ func (provider *provider) ListAllChannels(ctx context.Context) ([]*alertmanagert
return channels, nil
}
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID int) (*alertmanagertypes.Channel, error) {
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) (*alertmanagertypes.Channel, error) {
return provider.configStore.GetChannelByID(ctx, orgID, channelID)
}
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id int) error {
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id valuer.UUID) error {
channel, err := provider.configStore.GetChannelByID(ctx, orgID, id)
if err != nil {
return err
@@ -378,7 +398,7 @@ func (provider *provider) CreateChannel(ctx context.Context, orgID string, recei
}))
}
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID int) error {
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) error {
channel, err := provider.configStore.GetChannelByID(ctx, orgID, channelID)
if err != nil {
return err

View File

@@ -0,0 +1,35 @@
package legacyalertmanager
import (
"encoding/json"
"testing"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/prometheus/alertmanager/api/v2/models"
"github.com/stretchr/testify/assert"
)
func TestProvider_TestAlert(t *testing.T) {
pa := &postableAlert{
PostableAlert: &alertmanagertypes.PostableAlert{
Alert: models.Alert{
Labels: models.LabelSet{
"alertname": "test",
},
GeneratorURL: "http://localhost:9090/graph?g0.expr=up&g0.tab=1",
},
Annotations: models.LabelSet{
"summary": "test",
},
},
Receivers: []string{"receiver1", "receiver2"},
}
body, err := json.Marshal(pa)
if err != nil {
t.Fatalf("failed to marshal postable alert: %v", err)
}
assert.Contains(t, string(body), "receiver1")
assert.Contains(t, string(body), "receiver2")
}

View File

@@ -10,6 +10,7 @@ import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type provider struct {
@@ -99,11 +100,11 @@ func (provider *provider) ListAllChannels(ctx context.Context) ([]*alertmanagert
return nil, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "not supported by provider signoz")
}
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID int) (*alertmanagertypes.Channel, error) {
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) (*alertmanagertypes.Channel, error) {
return provider.configStore.GetChannelByID(ctx, orgID, channelID)
}
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id int) error {
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id valuer.UUID) error {
channel, err := provider.configStore.GetChannelByID(ctx, orgID, id)
if err != nil {
return err
@@ -127,7 +128,7 @@ func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgI
}))
}
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID int) error {
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) error {
channel, err := provider.configStore.GetChannelByID(ctx, orgID, channelID)
if err != nil {
return err

View File

@@ -0,0 +1,149 @@
package preference
import (
"encoding/json"
"net/http"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
"github.com/gorilla/mux"
)
type API interface {
GetOrgPreference(http.ResponseWriter, *http.Request)
UpdateOrgPreference(http.ResponseWriter, *http.Request)
GetAllOrgPreferences(http.ResponseWriter, *http.Request)
GetUserPreference(http.ResponseWriter, *http.Request)
UpdateUserPreference(http.ResponseWriter, *http.Request)
GetAllUserPreferences(http.ResponseWriter, *http.Request)
}
type preferenceAPI struct {
usecase Usecase
}
func NewAPI(usecase Usecase) API {
return &preferenceAPI{usecase: usecase}
}
func (p *preferenceAPI) GetOrgPreference(rw http.ResponseWriter, r *http.Request) {
preferenceId := mux.Vars(r)["preferenceId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
preference, err := p.usecase.GetOrgPreference(
r.Context(), preferenceId, claims.OrgID,
)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, preference)
}
func (p *preferenceAPI) UpdateOrgPreference(rw http.ResponseWriter, r *http.Request) {
preferenceId := mux.Vars(r)["preferenceId"]
req := preferencetypes.UpdatablePreference{}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
err := json.NewDecoder(r.Body).Decode(&req)
if err != nil {
render.Error(rw, err)
return
}
err = p.usecase.UpdateOrgPreference(r.Context(), preferenceId, req.PreferenceValue, claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusNoContent, nil)
}
func (p *preferenceAPI) GetAllOrgPreferences(rw http.ResponseWriter, r *http.Request) {
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
preferences, err := p.usecase.GetAllOrgPreferences(
r.Context(), claims.OrgID,
)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, preferences)
}
func (p *preferenceAPI) GetUserPreference(rw http.ResponseWriter, r *http.Request) {
preferenceId := mux.Vars(r)["preferenceId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
preference, err := p.usecase.GetUserPreference(
r.Context(), preferenceId, claims.OrgID, claims.UserID,
)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, preference)
}
func (p *preferenceAPI) UpdateUserPreference(rw http.ResponseWriter, r *http.Request) {
preferenceId := mux.Vars(r)["preferenceId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
req := preferencetypes.UpdatablePreference{}
err := json.NewDecoder(r.Body).Decode(&req)
if err != nil {
render.Error(rw, err)
return
}
err = p.usecase.UpdateUserPreference(r.Context(), preferenceId, req.PreferenceValue, claims.UserID)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusNoContent, nil)
}
func (p *preferenceAPI) GetAllUserPreferences(rw http.ResponseWriter, r *http.Request) {
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
preferences, err := p.usecase.GetAllUserPreferences(
r.Context(), claims.OrgID, claims.UserID,
)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, preferences)
}

View File

@@ -0,0 +1,278 @@
package core
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type usecase struct {
store preferencetypes.PreferenceStore
defaultMap map[string]preferencetypes.Preference
}
func NewPreference(store preferencetypes.PreferenceStore, defaultMap map[string]preferencetypes.Preference) preference.Usecase {
return &usecase{store: store, defaultMap: defaultMap}
}
func (usecase *usecase) GetOrgPreference(ctx context.Context, preferenceID string, orgID string) (*preferencetypes.GettablePreference, error) {
preference, seen := usecase.defaultMap[preferenceID]
if !seen {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
}
isPreferenceEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
if !isPreferenceEnabled {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at org scope: %s", preferenceID))
}
orgPreference, err := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
if err != nil {
if err == sql.ErrNoRows {
return &preferencetypes.GettablePreference{
PreferenceID: preferenceID,
PreferenceValue: preference.DefaultValue,
}, nil
}
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the org preference: %s", preferenceID))
}
return &preferencetypes.GettablePreference{
PreferenceID: preferenceID,
PreferenceValue: preference.SanitizeValue(orgPreference.PreferenceValue),
}, nil
}
func (usecase *usecase) UpdateOrgPreference(ctx context.Context, preferenceID string, preferenceValue interface{}, orgID string) error {
preference, seen := usecase.defaultMap[preferenceID]
if !seen {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
}
isPreferenceEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
if !isPreferenceEnabled {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at org scope: %s", preferenceID))
}
err := preference.IsValidValue(preferenceValue)
if err != nil {
return err
}
storablePreferenceValue, encodeErr := json.Marshal(preferenceValue)
if encodeErr != nil {
return errors.Wrapf(encodeErr, errors.TypeInvalidInput, errors.CodeInvalidInput, "error in encoding the preference value")
}
orgPreference, dberr := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
if dberr != nil && dberr != sql.ErrNoRows {
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in getting the preference value")
}
if dberr != nil {
orgPreference.ID = valuer.GenerateUUID()
orgPreference.PreferenceID = preferenceID
orgPreference.PreferenceValue = string(storablePreferenceValue)
orgPreference.OrgID = orgID
} else {
orgPreference.PreferenceValue = string(storablePreferenceValue)
}
dberr = usecase.store.UpsertOrgPreference(ctx, orgPreference)
if dberr != nil {
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in setting the preference value")
}
return nil
}
func (usecase *usecase) GetAllOrgPreferences(ctx context.Context, orgID string) ([]*preferencetypes.PreferenceWithValue, error) {
allOrgPreferences := []*preferencetypes.PreferenceWithValue{}
orgPreferences, err := usecase.store.GetAllOrgPreferences(ctx, orgID)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all org preference values")
}
preferenceValueMap := map[string]interface{}{}
for _, preferenceValue := range orgPreferences {
preferenceValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
}
for _, preference := range usecase.defaultMap {
isEnabledForOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
if isEnabledForOrgScope {
preferenceWithValue := &preferencetypes.PreferenceWithValue{}
preferenceWithValue.Key = preference.Key
preferenceWithValue.Name = preference.Name
preferenceWithValue.Description = preference.Description
preferenceWithValue.AllowedScopes = preference.AllowedScopes
preferenceWithValue.AllowedValues = preference.AllowedValues
preferenceWithValue.DefaultValue = preference.DefaultValue
preferenceWithValue.Range = preference.Range
preferenceWithValue.ValueType = preference.ValueType
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
value, seen := preferenceValueMap[preference.Key]
if seen {
preferenceWithValue.Value = value
} else {
preferenceWithValue.Value = preference.DefaultValue
}
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
allOrgPreferences = append(allOrgPreferences, preferenceWithValue)
}
}
return allOrgPreferences, nil
}
func (usecase *usecase) GetUserPreference(ctx context.Context, preferenceID string, orgID string, userID string) (*preferencetypes.GettablePreference, error) {
preference, seen := usecase.defaultMap[preferenceID]
if !seen {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
}
preferenceValue := preferencetypes.GettablePreference{
PreferenceID: preferenceID,
PreferenceValue: preference.DefaultValue,
}
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
if !isPreferenceEnabledAtUserScope {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at user scope: %s", preferenceID))
}
isPreferenceEnabledAtOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
if isPreferenceEnabledAtOrgScope {
orgPreference, err := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
if err != nil && err != sql.ErrNoRows {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the org preference: %s", preferenceID))
}
if err == nil {
preferenceValue.PreferenceValue = orgPreference.PreferenceValue
}
}
userPreference, err := usecase.store.GetUserPreference(ctx, userID, preferenceID)
if err != nil && err != sql.ErrNoRows {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the user preference: %s", preferenceID))
}
if err == nil {
preferenceValue.PreferenceValue = userPreference.PreferenceValue
}
return &preferencetypes.GettablePreference{
PreferenceID: preferenceValue.PreferenceID,
PreferenceValue: preference.SanitizeValue(preferenceValue.PreferenceValue),
}, nil
}
func (usecase *usecase) UpdateUserPreference(ctx context.Context, preferenceID string, preferenceValue interface{}, userID string) error {
preference, seen := usecase.defaultMap[preferenceID]
if !seen {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
}
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
if !isPreferenceEnabledAtUserScope {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at user scope: %s", preferenceID))
}
err := preference.IsValidValue(preferenceValue)
if err != nil {
return err
}
storablePreferenceValue, encodeErr := json.Marshal(preferenceValue)
if encodeErr != nil {
return errors.Wrapf(encodeErr, errors.TypeInvalidInput, errors.CodeInvalidInput, "error in encoding the preference value")
}
userPreference, dberr := usecase.store.GetUserPreference(ctx, userID, preferenceID)
if dberr != nil && dberr != sql.ErrNoRows {
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in getting the preference value")
}
if dberr != nil {
userPreference.ID = valuer.GenerateUUID()
userPreference.PreferenceID = preferenceID
userPreference.PreferenceValue = string(storablePreferenceValue)
userPreference.UserID = userID
} else {
userPreference.PreferenceValue = string(storablePreferenceValue)
}
dberr = usecase.store.UpsertUserPreference(ctx, userPreference)
if dberr != nil {
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in setting the preference value")
}
return nil
}
func (usecase *usecase) GetAllUserPreferences(ctx context.Context, orgID string, userID string) ([]*preferencetypes.PreferenceWithValue, error) {
allUserPreferences := []*preferencetypes.PreferenceWithValue{}
orgPreferences, err := usecase.store.GetAllOrgPreferences(ctx, orgID)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all org preference values")
}
preferenceOrgValueMap := map[string]interface{}{}
for _, preferenceValue := range orgPreferences {
preferenceOrgValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
}
userPreferences, err := usecase.store.GetAllUserPreferences(ctx, userID)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all user preference values")
}
preferenceUserValueMap := map[string]interface{}{}
for _, preferenceValue := range userPreferences {
preferenceUserValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
}
for _, preference := range usecase.defaultMap {
isEnabledForUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
if isEnabledForUserScope {
preferenceWithValue := &preferencetypes.PreferenceWithValue{}
preferenceWithValue.Key = preference.Key
preferenceWithValue.Name = preference.Name
preferenceWithValue.Description = preference.Description
preferenceWithValue.AllowedScopes = preference.AllowedScopes
preferenceWithValue.AllowedValues = preference.AllowedValues
preferenceWithValue.DefaultValue = preference.DefaultValue
preferenceWithValue.Range = preference.Range
preferenceWithValue.ValueType = preference.ValueType
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
preferenceWithValue.Value = preference.DefaultValue
isEnabledForOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
if isEnabledForOrgScope {
value, seen := preferenceOrgValueMap[preference.Key]
if seen {
preferenceWithValue.Value = value
}
}
value, seen := preferenceUserValueMap[preference.Key]
if seen {
preferenceWithValue.Value = value
}
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
allUserPreferences = append(allUserPreferences, preferenceWithValue)
}
}
return allUserPreferences, nil
}

View File

@@ -0,0 +1,116 @@
package core
import (
"context"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
)
type store struct {
store sqlstore.SQLStore
}
func NewStore(db sqlstore.SQLStore) preferencetypes.PreferenceStore {
return &store{store: db}
}
func (store *store) GetOrgPreference(ctx context.Context, orgID string, preferenceID string) (*preferencetypes.StorableOrgPreference, error) {
orgPreference := new(preferencetypes.StorableOrgPreference)
err := store.
store.
BunDB().
NewSelect().
Model(orgPreference).
Where("preference_id = ?", preferenceID).
Where("org_id = ?", orgID).
Scan(ctx)
if err != nil {
return orgPreference, err
}
return orgPreference, nil
}
func (store *store) GetAllOrgPreferences(ctx context.Context, orgID string) ([]*preferencetypes.StorableOrgPreference, error) {
orgPreferences := make([]*preferencetypes.StorableOrgPreference, 0)
err := store.
store.
BunDB().
NewSelect().
Model(&orgPreferences).
Where("org_id = ?", orgID).
Scan(ctx)
if err != nil {
return orgPreferences, err
}
return orgPreferences, nil
}
func (store *store) UpsertOrgPreference(ctx context.Context, orgPreference *preferencetypes.StorableOrgPreference) error {
_, err := store.
store.
BunDB().
NewInsert().
Model(orgPreference).
On("CONFLICT (id) DO UPDATE").
Exec(ctx)
if err != nil {
return err
}
return nil
}
func (store *store) GetUserPreference(ctx context.Context, userID string, preferenceID string) (*preferencetypes.StorableUserPreference, error) {
userPreference := new(preferencetypes.StorableUserPreference)
err := store.
store.
BunDB().
NewSelect().
Model(userPreference).
Where("preference_id = ?", preferenceID).
Where("user_id = ?", userID).
Scan(ctx)
if err != nil {
return userPreference, err
}
return userPreference, nil
}
func (store *store) GetAllUserPreferences(ctx context.Context, userID string) ([]*preferencetypes.StorableUserPreference, error) {
userPreferences := make([]*preferencetypes.StorableUserPreference, 0)
err := store.
store.
BunDB().
NewSelect().
Model(&userPreferences).
Where("user_id = ?", userID).
Scan(ctx)
if err != nil {
return userPreferences, err
}
return userPreferences, nil
}
func (store *store) UpsertUserPreference(ctx context.Context, userPreference *preferencetypes.StorableUserPreference) error {
_, err := store.
store.
BunDB().
NewInsert().
Model(userPreference).
On("CONFLICT (id) DO UPDATE").
Exec(ctx)
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,17 @@
package preference
import (
"context"
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
)
type Usecase interface {
GetOrgPreference(ctx context.Context, preferenceId string, orgId string) (*preferencetypes.GettablePreference, error)
UpdateOrgPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, orgId string) error
GetAllOrgPreferences(ctx context.Context, orgId string) ([]*preferencetypes.PreferenceWithValue, error)
GetUserPreference(ctx context.Context, preferenceId string, orgId string, userId string) (*preferencetypes.GettablePreference, error)
UpdateUserPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, userId string) error
GetAllUserPreferences(ctx context.Context, orgId string, userId string) ([]*preferencetypes.PreferenceWithValue, error)
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,45 @@
LPAREN=1
RPAREN=2
LBRACK=3
RBRACK=4
COMMA=5
EQUALS=6
NOT_EQUALS=7
NEQ=8
LT=9
LE=10
GT=11
GE=12
LIKE=13
NOT_LIKE=14
ILIKE=15
NOT_ILIKE=16
BETWEEN=17
EXISTS=18
REGEXP=19
CONTAINS=20
IN=21
NOT=22
AND=23
OR=24
HAS=25
HASANY=26
HASALL=27
HASNONE=28
BOOL=29
NUMBER=30
QUOTED_TEXT=31
KEY=32
WS=33
FREETEXT=34
'('=1
')'=2
'['=3
']'=4
','=5
'!='=7
'<>'=8
'<'=9
'<='=10
'>'=11
'>='=12

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,45 @@
LPAREN=1
RPAREN=2
LBRACK=3
RBRACK=4
COMMA=5
EQUALS=6
NOT_EQUALS=7
NEQ=8
LT=9
LE=10
GT=11
GE=12
LIKE=13
NOT_LIKE=14
ILIKE=15
NOT_ILIKE=16
BETWEEN=17
EXISTS=18
REGEXP=19
CONTAINS=20
IN=21
NOT=22
AND=23
OR=24
HAS=25
HASANY=26
HASALL=27
HASNONE=28
BOOL=29
NUMBER=30
QUOTED_TEXT=31
KEY=32
WS=33
FREETEXT=34
'('=1
')'=2
'['=3
']'=4
','=5
'!='=7
'<>'=8
'<'=9
'<='=10
'>'=11
'>='=12

View File

@@ -0,0 +1,124 @@
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // FilterQuery
import "github.com/antlr4-go/antlr/v4"
// BaseFilterQueryListener is a complete listener for a parse tree produced by FilterQueryParser.
type BaseFilterQueryListener struct{}
var _ FilterQueryListener = &BaseFilterQueryListener{}
// VisitTerminal is called when a terminal node is visited.
func (s *BaseFilterQueryListener) VisitTerminal(node antlr.TerminalNode) {}
// VisitErrorNode is called when an error node is visited.
func (s *BaseFilterQueryListener) VisitErrorNode(node antlr.ErrorNode) {}
// EnterEveryRule is called when any rule is entered.
func (s *BaseFilterQueryListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}
// ExitEveryRule is called when any rule is exited.
func (s *BaseFilterQueryListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}
// EnterQuery is called when production query is entered.
func (s *BaseFilterQueryListener) EnterQuery(ctx *QueryContext) {}
// ExitQuery is called when production query is exited.
func (s *BaseFilterQueryListener) ExitQuery(ctx *QueryContext) {}
// EnterExpression is called when production expression is entered.
func (s *BaseFilterQueryListener) EnterExpression(ctx *ExpressionContext) {}
// ExitExpression is called when production expression is exited.
func (s *BaseFilterQueryListener) ExitExpression(ctx *ExpressionContext) {}
// EnterOrExpression is called when production orExpression is entered.
func (s *BaseFilterQueryListener) EnterOrExpression(ctx *OrExpressionContext) {}
// ExitOrExpression is called when production orExpression is exited.
func (s *BaseFilterQueryListener) ExitOrExpression(ctx *OrExpressionContext) {}
// EnterAndExpression is called when production andExpression is entered.
func (s *BaseFilterQueryListener) EnterAndExpression(ctx *AndExpressionContext) {}
// ExitAndExpression is called when production andExpression is exited.
func (s *BaseFilterQueryListener) ExitAndExpression(ctx *AndExpressionContext) {}
// EnterUnaryExpression is called when production unaryExpression is entered.
func (s *BaseFilterQueryListener) EnterUnaryExpression(ctx *UnaryExpressionContext) {}
// ExitUnaryExpression is called when production unaryExpression is exited.
func (s *BaseFilterQueryListener) ExitUnaryExpression(ctx *UnaryExpressionContext) {}
// EnterPrimary is called when production primary is entered.
func (s *BaseFilterQueryListener) EnterPrimary(ctx *PrimaryContext) {}
// ExitPrimary is called when production primary is exited.
func (s *BaseFilterQueryListener) ExitPrimary(ctx *PrimaryContext) {}
// EnterComparison is called when production comparison is entered.
func (s *BaseFilterQueryListener) EnterComparison(ctx *ComparisonContext) {}
// ExitComparison is called when production comparison is exited.
func (s *BaseFilterQueryListener) ExitComparison(ctx *ComparisonContext) {}
// EnterInClause is called when production inClause is entered.
func (s *BaseFilterQueryListener) EnterInClause(ctx *InClauseContext) {}
// ExitInClause is called when production inClause is exited.
func (s *BaseFilterQueryListener) ExitInClause(ctx *InClauseContext) {}
// EnterNotInClause is called when production notInClause is entered.
func (s *BaseFilterQueryListener) EnterNotInClause(ctx *NotInClauseContext) {}
// ExitNotInClause is called when production notInClause is exited.
func (s *BaseFilterQueryListener) ExitNotInClause(ctx *NotInClauseContext) {}
// EnterValueList is called when production valueList is entered.
func (s *BaseFilterQueryListener) EnterValueList(ctx *ValueListContext) {}
// ExitValueList is called when production valueList is exited.
func (s *BaseFilterQueryListener) ExitValueList(ctx *ValueListContext) {}
// EnterFullText is called when production fullText is entered.
func (s *BaseFilterQueryListener) EnterFullText(ctx *FullTextContext) {}
// ExitFullText is called when production fullText is exited.
func (s *BaseFilterQueryListener) ExitFullText(ctx *FullTextContext) {}
// EnterFunctionCall is called when production functionCall is entered.
func (s *BaseFilterQueryListener) EnterFunctionCall(ctx *FunctionCallContext) {}
// ExitFunctionCall is called when production functionCall is exited.
func (s *BaseFilterQueryListener) ExitFunctionCall(ctx *FunctionCallContext) {}
// EnterFunctionParamList is called when production functionParamList is entered.
func (s *BaseFilterQueryListener) EnterFunctionParamList(ctx *FunctionParamListContext) {}
// ExitFunctionParamList is called when production functionParamList is exited.
func (s *BaseFilterQueryListener) ExitFunctionParamList(ctx *FunctionParamListContext) {}
// EnterFunctionParam is called when production functionParam is entered.
func (s *BaseFilterQueryListener) EnterFunctionParam(ctx *FunctionParamContext) {}
// ExitFunctionParam is called when production functionParam is exited.
func (s *BaseFilterQueryListener) ExitFunctionParam(ctx *FunctionParamContext) {}
// EnterArray is called when production array is entered.
func (s *BaseFilterQueryListener) EnterArray(ctx *ArrayContext) {}
// ExitArray is called when production array is exited.
func (s *BaseFilterQueryListener) ExitArray(ctx *ArrayContext) {}
// EnterValue is called when production value is entered.
func (s *BaseFilterQueryListener) EnterValue(ctx *ValueContext) {}
// ExitValue is called when production value is exited.
func (s *BaseFilterQueryListener) ExitValue(ctx *ValueContext) {}
// EnterKey is called when production key is entered.
func (s *BaseFilterQueryListener) EnterKey(ctx *KeyContext) {}
// ExitKey is called when production key is exited.
func (s *BaseFilterQueryListener) ExitKey(ctx *KeyContext) {}

View File

@@ -0,0 +1,77 @@
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // FilterQuery
import "github.com/antlr4-go/antlr/v4"
type BaseFilterQueryVisitor struct {
*antlr.BaseParseTreeVisitor
}
func (v *BaseFilterQueryVisitor) VisitQuery(ctx *QueryContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitExpression(ctx *ExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitOrExpression(ctx *OrExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitAndExpression(ctx *AndExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitUnaryExpression(ctx *UnaryExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitPrimary(ctx *PrimaryContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitComparison(ctx *ComparisonContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitInClause(ctx *InClauseContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitNotInClause(ctx *NotInClauseContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitValueList(ctx *ValueListContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitFullText(ctx *FullTextContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitFunctionCall(ctx *FunctionCallContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitFunctionParamList(ctx *FunctionParamListContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitFunctionParam(ctx *FunctionParamContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitArray(ctx *ArrayContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitValue(ctx *ValueContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitKey(ctx *KeyContext) interface{} {
return v.VisitChildren(ctx)
}

View File

@@ -0,0 +1,271 @@
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser
import (
"fmt"
"github.com/antlr4-go/antlr/v4"
"sync"
"unicode"
)
// Suppress unused import error
var _ = fmt.Printf
var _ = sync.Once{}
var _ = unicode.IsLetter
type FilterQueryLexer struct {
*antlr.BaseLexer
channelNames []string
modeNames []string
// TODO: EOF string
}
var FilterQueryLexerLexerStaticData struct {
once sync.Once
serializedATN []int32
ChannelNames []string
ModeNames []string
LiteralNames []string
SymbolicNames []string
RuleNames []string
PredictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
func filterquerylexerLexerInit() {
staticData := &FilterQueryLexerLexerStaticData
staticData.ChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
staticData.ModeNames = []string{
"DEFAULT_MODE",
}
staticData.LiteralNames = []string{
"", "'('", "')'", "'['", "']'", "','", "", "'!='", "'<>'", "'<'", "'<='",
"'>'", "'>='",
}
staticData.SymbolicNames = []string{
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
"KEY", "WS", "FREETEXT",
}
staticData.RuleNames = []string{
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
"KEY", "WS", "DIGIT", "FREETEXT",
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 34, 280, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 1, 0, 1, 0, 1, 1,
1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 85, 8,
5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1,
10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13,
1, 13, 1, 13, 1, 13, 4, 13, 112, 8, 13, 11, 13, 12, 13, 113, 1, 13, 1,
13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15,
1, 15, 1, 15, 1, 15, 4, 15, 131, 8, 15, 11, 15, 12, 15, 132, 1, 15, 1,
15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 155, 8,
17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19,
1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 172, 8, 19, 1, 20, 1, 20, 1,
20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23,
1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1,
25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27,
1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1,
28, 1, 28, 1, 28, 1, 28, 1, 28, 3, 28, 223, 8, 28, 1, 29, 4, 29, 226, 8,
29, 11, 29, 12, 29, 227, 1, 29, 1, 29, 4, 29, 232, 8, 29, 11, 29, 12, 29,
233, 3, 29, 236, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 242, 8, 30,
10, 30, 12, 30, 245, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 252,
8, 30, 10, 30, 12, 30, 255, 9, 30, 1, 30, 3, 30, 258, 8, 30, 1, 31, 1,
31, 5, 31, 262, 8, 31, 10, 31, 12, 31, 265, 9, 31, 1, 32, 4, 32, 268, 8,
32, 11, 32, 12, 32, 269, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 4, 34, 277,
8, 34, 11, 34, 12, 34, 278, 0, 0, 35, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11,
6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15,
31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24,
49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33,
67, 0, 69, 34, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105,
2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110,
2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2,
0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0,
83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0,
80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68,
68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85,
85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39,
92, 92, 4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 6, 0, 46, 46, 48, 57, 65,
91, 93, 93, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57,
7, 0, 9, 10, 13, 13, 32, 34, 39, 41, 60, 62, 91, 91, 93, 93, 295, 0, 1,
1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9,
1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0,
17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0,
0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0,
0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0,
0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1,
0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55,
1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0,
63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 1, 71, 1, 0, 0, 0,
3, 73, 1, 0, 0, 0, 5, 75, 1, 0, 0, 0, 7, 77, 1, 0, 0, 0, 9, 79, 1, 0, 0,
0, 11, 84, 1, 0, 0, 0, 13, 86, 1, 0, 0, 0, 15, 89, 1, 0, 0, 0, 17, 92,
1, 0, 0, 0, 19, 94, 1, 0, 0, 0, 21, 97, 1, 0, 0, 0, 23, 99, 1, 0, 0, 0,
25, 102, 1, 0, 0, 0, 27, 107, 1, 0, 0, 0, 29, 120, 1, 0, 0, 0, 31, 126,
1, 0, 0, 0, 33, 140, 1, 0, 0, 0, 35, 148, 1, 0, 0, 0, 37, 156, 1, 0, 0,
0, 39, 163, 1, 0, 0, 0, 41, 173, 1, 0, 0, 0, 43, 176, 1, 0, 0, 0, 45, 180,
1, 0, 0, 0, 47, 184, 1, 0, 0, 0, 49, 187, 1, 0, 0, 0, 51, 191, 1, 0, 0,
0, 53, 198, 1, 0, 0, 0, 55, 205, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 225,
1, 0, 0, 0, 61, 257, 1, 0, 0, 0, 63, 259, 1, 0, 0, 0, 65, 267, 1, 0, 0,
0, 67, 273, 1, 0, 0, 0, 69, 276, 1, 0, 0, 0, 71, 72, 5, 40, 0, 0, 72, 2,
1, 0, 0, 0, 73, 74, 5, 41, 0, 0, 74, 4, 1, 0, 0, 0, 75, 76, 5, 91, 0, 0,
76, 6, 1, 0, 0, 0, 77, 78, 5, 93, 0, 0, 78, 8, 1, 0, 0, 0, 79, 80, 5, 44,
0, 0, 80, 10, 1, 0, 0, 0, 81, 85, 5, 61, 0, 0, 82, 83, 5, 61, 0, 0, 83,
85, 5, 61, 0, 0, 84, 81, 1, 0, 0, 0, 84, 82, 1, 0, 0, 0, 85, 12, 1, 0,
0, 0, 86, 87, 5, 33, 0, 0, 87, 88, 5, 61, 0, 0, 88, 14, 1, 0, 0, 0, 89,
90, 5, 60, 0, 0, 90, 91, 5, 62, 0, 0, 91, 16, 1, 0, 0, 0, 92, 93, 5, 60,
0, 0, 93, 18, 1, 0, 0, 0, 94, 95, 5, 60, 0, 0, 95, 96, 5, 61, 0, 0, 96,
20, 1, 0, 0, 0, 97, 98, 5, 62, 0, 0, 98, 22, 1, 0, 0, 0, 99, 100, 5, 62,
0, 0, 100, 101, 5, 61, 0, 0, 101, 24, 1, 0, 0, 0, 102, 103, 7, 0, 0, 0,
103, 104, 7, 1, 0, 0, 104, 105, 7, 2, 0, 0, 105, 106, 7, 3, 0, 0, 106,
26, 1, 0, 0, 0, 107, 108, 7, 4, 0, 0, 108, 109, 7, 5, 0, 0, 109, 111, 7,
6, 0, 0, 110, 112, 7, 7, 0, 0, 111, 110, 1, 0, 0, 0, 112, 113, 1, 0, 0,
0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115,
116, 7, 0, 0, 0, 116, 117, 7, 1, 0, 0, 117, 118, 7, 2, 0, 0, 118, 119,
7, 3, 0, 0, 119, 28, 1, 0, 0, 0, 120, 121, 7, 1, 0, 0, 121, 122, 7, 0,
0, 0, 122, 123, 7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0,
125, 30, 1, 0, 0, 0, 126, 127, 7, 4, 0, 0, 127, 128, 7, 5, 0, 0, 128, 130,
7, 6, 0, 0, 129, 131, 7, 7, 0, 0, 130, 129, 1, 0, 0, 0, 131, 132, 1, 0,
0, 0, 132, 130, 1, 0, 0, 0, 132, 133, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0,
134, 135, 7, 1, 0, 0, 135, 136, 7, 0, 0, 0, 136, 137, 7, 1, 0, 0, 137,
138, 7, 2, 0, 0, 138, 139, 7, 3, 0, 0, 139, 32, 1, 0, 0, 0, 140, 141, 7,
8, 0, 0, 141, 142, 7, 3, 0, 0, 142, 143, 7, 6, 0, 0, 143, 144, 7, 9, 0,
0, 144, 145, 7, 3, 0, 0, 145, 146, 7, 3, 0, 0, 146, 147, 7, 4, 0, 0, 147,
34, 1, 0, 0, 0, 148, 149, 7, 3, 0, 0, 149, 150, 7, 10, 0, 0, 150, 151,
7, 1, 0, 0, 151, 152, 7, 11, 0, 0, 152, 154, 7, 6, 0, 0, 153, 155, 7, 11,
0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 36, 1, 0, 0, 0,
156, 157, 7, 12, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159, 7, 13, 0, 0, 159,
160, 7, 3, 0, 0, 160, 161, 7, 10, 0, 0, 161, 162, 7, 14, 0, 0, 162, 38,
1, 0, 0, 0, 163, 164, 7, 15, 0, 0, 164, 165, 7, 5, 0, 0, 165, 166, 7, 4,
0, 0, 166, 167, 7, 6, 0, 0, 167, 168, 7, 16, 0, 0, 168, 169, 7, 1, 0, 0,
169, 171, 7, 4, 0, 0, 170, 172, 7, 11, 0, 0, 171, 170, 1, 0, 0, 0, 171,
172, 1, 0, 0, 0, 172, 40, 1, 0, 0, 0, 173, 174, 7, 1, 0, 0, 174, 175, 7,
4, 0, 0, 175, 42, 1, 0, 0, 0, 176, 177, 7, 4, 0, 0, 177, 178, 7, 5, 0,
0, 178, 179, 7, 6, 0, 0, 179, 44, 1, 0, 0, 0, 180, 181, 7, 16, 0, 0, 181,
182, 7, 4, 0, 0, 182, 183, 7, 17, 0, 0, 183, 46, 1, 0, 0, 0, 184, 185,
7, 5, 0, 0, 185, 186, 7, 12, 0, 0, 186, 48, 1, 0, 0, 0, 187, 188, 7, 18,
0, 0, 188, 189, 7, 16, 0, 0, 189, 190, 7, 11, 0, 0, 190, 50, 1, 0, 0, 0,
191, 192, 7, 18, 0, 0, 192, 193, 7, 16, 0, 0, 193, 194, 7, 11, 0, 0, 194,
195, 7, 16, 0, 0, 195, 196, 7, 4, 0, 0, 196, 197, 7, 19, 0, 0, 197, 52,
1, 0, 0, 0, 198, 199, 7, 18, 0, 0, 199, 200, 7, 16, 0, 0, 200, 201, 7,
11, 0, 0, 201, 202, 7, 16, 0, 0, 202, 203, 7, 0, 0, 0, 203, 204, 7, 0,
0, 0, 204, 54, 1, 0, 0, 0, 205, 206, 7, 18, 0, 0, 206, 207, 7, 16, 0, 0,
207, 208, 7, 11, 0, 0, 208, 209, 7, 4, 0, 0, 209, 210, 7, 5, 0, 0, 210,
211, 7, 4, 0, 0, 211, 212, 7, 3, 0, 0, 212, 56, 1, 0, 0, 0, 213, 214, 7,
6, 0, 0, 214, 215, 7, 12, 0, 0, 215, 216, 7, 20, 0, 0, 216, 223, 7, 3,
0, 0, 217, 218, 7, 21, 0, 0, 218, 219, 7, 16, 0, 0, 219, 220, 7, 0, 0,
0, 220, 221, 7, 11, 0, 0, 221, 223, 7, 3, 0, 0, 222, 213, 1, 0, 0, 0, 222,
217, 1, 0, 0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 67, 33, 0, 225, 224,
1, 0, 0, 0, 226, 227, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 227, 228, 1, 0,
0, 0, 228, 235, 1, 0, 0, 0, 229, 231, 5, 46, 0, 0, 230, 232, 3, 67, 33,
0, 231, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 233,
234, 1, 0, 0, 0, 234, 236, 1, 0, 0, 0, 235, 229, 1, 0, 0, 0, 235, 236,
1, 0, 0, 0, 236, 60, 1, 0, 0, 0, 237, 243, 5, 34, 0, 0, 238, 242, 8, 22,
0, 0, 239, 240, 5, 92, 0, 0, 240, 242, 9, 0, 0, 0, 241, 238, 1, 0, 0, 0,
241, 239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243,
244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 258,
5, 34, 0, 0, 247, 253, 5, 39, 0, 0, 248, 252, 8, 23, 0, 0, 249, 250, 5,
92, 0, 0, 250, 252, 9, 0, 0, 0, 251, 248, 1, 0, 0, 0, 251, 249, 1, 0, 0,
0, 252, 255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254,
256, 1, 0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 258, 5, 39, 0, 0, 257, 237,
1, 0, 0, 0, 257, 247, 1, 0, 0, 0, 258, 62, 1, 0, 0, 0, 259, 263, 7, 24,
0, 0, 260, 262, 7, 25, 0, 0, 261, 260, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0,
263, 261, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 64, 1, 0, 0, 0, 265, 263,
1, 0, 0, 0, 266, 268, 7, 26, 0, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0,
0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0,
271, 272, 6, 32, 0, 0, 272, 66, 1, 0, 0, 0, 273, 274, 7, 27, 0, 0, 274,
68, 1, 0, 0, 0, 275, 277, 8, 28, 0, 0, 276, 275, 1, 0, 0, 0, 277, 278,
1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 70, 1, 0,
0, 0, 18, 0, 84, 113, 132, 154, 171, 222, 227, 233, 235, 241, 243, 251,
253, 257, 263, 269, 278, 1, 6, 0, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// FilterQueryLexerInit initializes any static state used to implement FilterQueryLexer. By default the
// static state used to implement the lexer is lazily initialized during the first call to
// NewFilterQueryLexer(). You can call this function if you wish to initialize the static state ahead
// of time.
func FilterQueryLexerInit() {
staticData := &FilterQueryLexerLexerStaticData
staticData.once.Do(filterquerylexerLexerInit)
}
// NewFilterQueryLexer produces a new lexer instance for the optional input antlr.CharStream.
func NewFilterQueryLexer(input antlr.CharStream) *FilterQueryLexer {
FilterQueryLexerInit()
l := new(FilterQueryLexer)
l.BaseLexer = antlr.NewBaseLexer(input)
staticData := &FilterQueryLexerLexerStaticData
l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache)
l.channelNames = staticData.ChannelNames
l.modeNames = staticData.ModeNames
l.RuleNames = staticData.RuleNames
l.LiteralNames = staticData.LiteralNames
l.SymbolicNames = staticData.SymbolicNames
l.GrammarFileName = "FilterQuery.g4"
// TODO: l.EOF = antlr.TokenEOF
return l
}
// FilterQueryLexer tokens.
const (
FilterQueryLexerLPAREN = 1
FilterQueryLexerRPAREN = 2
FilterQueryLexerLBRACK = 3
FilterQueryLexerRBRACK = 4
FilterQueryLexerCOMMA = 5
FilterQueryLexerEQUALS = 6
FilterQueryLexerNOT_EQUALS = 7
FilterQueryLexerNEQ = 8
FilterQueryLexerLT = 9
FilterQueryLexerLE = 10
FilterQueryLexerGT = 11
FilterQueryLexerGE = 12
FilterQueryLexerLIKE = 13
FilterQueryLexerNOT_LIKE = 14
FilterQueryLexerILIKE = 15
FilterQueryLexerNOT_ILIKE = 16
FilterQueryLexerBETWEEN = 17
FilterQueryLexerEXISTS = 18
FilterQueryLexerREGEXP = 19
FilterQueryLexerCONTAINS = 20
FilterQueryLexerIN = 21
FilterQueryLexerNOT = 22
FilterQueryLexerAND = 23
FilterQueryLexerOR = 24
FilterQueryLexerHAS = 25
FilterQueryLexerHASANY = 26
FilterQueryLexerHASALL = 27
FilterQueryLexerHASNONE = 28
FilterQueryLexerBOOL = 29
FilterQueryLexerNUMBER = 30
FilterQueryLexerQUOTED_TEXT = 31
FilterQueryLexerKEY = 32
FilterQueryLexerWS = 33
FilterQueryLexerFREETEXT = 34
)

View File

@@ -0,0 +1,112 @@
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // FilterQuery
import "github.com/antlr4-go/antlr/v4"
// FilterQueryListener is a complete listener for a parse tree produced by FilterQueryParser.
type FilterQueryListener interface {
antlr.ParseTreeListener
// EnterQuery is called when entering the query production.
EnterQuery(c *QueryContext)
// EnterExpression is called when entering the expression production.
EnterExpression(c *ExpressionContext)
// EnterOrExpression is called when entering the orExpression production.
EnterOrExpression(c *OrExpressionContext)
// EnterAndExpression is called when entering the andExpression production.
EnterAndExpression(c *AndExpressionContext)
// EnterUnaryExpression is called when entering the unaryExpression production.
EnterUnaryExpression(c *UnaryExpressionContext)
// EnterPrimary is called when entering the primary production.
EnterPrimary(c *PrimaryContext)
// EnterComparison is called when entering the comparison production.
EnterComparison(c *ComparisonContext)
// EnterInClause is called when entering the inClause production.
EnterInClause(c *InClauseContext)
// EnterNotInClause is called when entering the notInClause production.
EnterNotInClause(c *NotInClauseContext)
// EnterValueList is called when entering the valueList production.
EnterValueList(c *ValueListContext)
// EnterFullText is called when entering the fullText production.
EnterFullText(c *FullTextContext)
// EnterFunctionCall is called when entering the functionCall production.
EnterFunctionCall(c *FunctionCallContext)
// EnterFunctionParamList is called when entering the functionParamList production.
EnterFunctionParamList(c *FunctionParamListContext)
// EnterFunctionParam is called when entering the functionParam production.
EnterFunctionParam(c *FunctionParamContext)
// EnterArray is called when entering the array production.
EnterArray(c *ArrayContext)
// EnterValue is called when entering the value production.
EnterValue(c *ValueContext)
// EnterKey is called when entering the key production.
EnterKey(c *KeyContext)
// ExitQuery is called when exiting the query production.
ExitQuery(c *QueryContext)
// ExitExpression is called when exiting the expression production.
ExitExpression(c *ExpressionContext)
// ExitOrExpression is called when exiting the orExpression production.
ExitOrExpression(c *OrExpressionContext)
// ExitAndExpression is called when exiting the andExpression production.
ExitAndExpression(c *AndExpressionContext)
// ExitUnaryExpression is called when exiting the unaryExpression production.
ExitUnaryExpression(c *UnaryExpressionContext)
// ExitPrimary is called when exiting the primary production.
ExitPrimary(c *PrimaryContext)
// ExitComparison is called when exiting the comparison production.
ExitComparison(c *ComparisonContext)
// ExitInClause is called when exiting the inClause production.
ExitInClause(c *InClauseContext)
// ExitNotInClause is called when exiting the notInClause production.
ExitNotInClause(c *NotInClauseContext)
// ExitValueList is called when exiting the valueList production.
ExitValueList(c *ValueListContext)
// ExitFullText is called when exiting the fullText production.
ExitFullText(c *FullTextContext)
// ExitFunctionCall is called when exiting the functionCall production.
ExitFunctionCall(c *FunctionCallContext)
// ExitFunctionParamList is called when exiting the functionParamList production.
ExitFunctionParamList(c *FunctionParamListContext)
// ExitFunctionParam is called when exiting the functionParam production.
ExitFunctionParam(c *FunctionParamContext)
// ExitArray is called when exiting the array production.
ExitArray(c *ArrayContext)
// ExitValue is called when exiting the value production.
ExitValue(c *ValueContext)
// ExitKey is called when exiting the key production.
ExitKey(c *KeyContext)
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,61 @@
// Code generated from grammar/FilterQuery.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // FilterQuery
import "github.com/antlr4-go/antlr/v4"
// A complete Visitor for a parse tree produced by FilterQueryParser.
type FilterQueryVisitor interface {
antlr.ParseTreeVisitor
// Visit a parse tree produced by FilterQueryParser#query.
VisitQuery(ctx *QueryContext) interface{}
// Visit a parse tree produced by FilterQueryParser#expression.
VisitExpression(ctx *ExpressionContext) interface{}
// Visit a parse tree produced by FilterQueryParser#orExpression.
VisitOrExpression(ctx *OrExpressionContext) interface{}
// Visit a parse tree produced by FilterQueryParser#andExpression.
VisitAndExpression(ctx *AndExpressionContext) interface{}
// Visit a parse tree produced by FilterQueryParser#unaryExpression.
VisitUnaryExpression(ctx *UnaryExpressionContext) interface{}
// Visit a parse tree produced by FilterQueryParser#primary.
VisitPrimary(ctx *PrimaryContext) interface{}
// Visit a parse tree produced by FilterQueryParser#comparison.
VisitComparison(ctx *ComparisonContext) interface{}
// Visit a parse tree produced by FilterQueryParser#inClause.
VisitInClause(ctx *InClauseContext) interface{}
// Visit a parse tree produced by FilterQueryParser#notInClause.
VisitNotInClause(ctx *NotInClauseContext) interface{}
// Visit a parse tree produced by FilterQueryParser#valueList.
VisitValueList(ctx *ValueListContext) interface{}
// Visit a parse tree produced by FilterQueryParser#fullText.
VisitFullText(ctx *FullTextContext) interface{}
// Visit a parse tree produced by FilterQueryParser#functionCall.
VisitFunctionCall(ctx *FunctionCallContext) interface{}
// Visit a parse tree produced by FilterQueryParser#functionParamList.
VisitFunctionParamList(ctx *FunctionParamListContext) interface{}
// Visit a parse tree produced by FilterQueryParser#functionParam.
VisitFunctionParam(ctx *FunctionParamContext) interface{}
// Visit a parse tree produced by FilterQueryParser#array.
VisitArray(ctx *ArrayContext) interface{}
// Visit a parse tree produced by FilterQueryParser#value.
VisitValue(ctx *ValueContext) interface{}
// Visit a parse tree produced by FilterQueryParser#key.
VisitKey(ctx *KeyContext) interface{}
}

View File

@@ -0,0 +1,22 @@
ARG ALPINE_SHA="pass-a-valid-docker-sha-otherwise-this-will-fail"
FROM alpine@sha256:${ALPINE_SHA}
LABEL maintainer="signoz"
WORKDIR /root
ARG OS="linux"
ARG ARCH
RUN apk update && \
apk add ca-certificates && \
rm -rf /var/cache/apk/*
COPY ./target/${OS}-${ARCH}/signoz-community /root/signoz-community
COPY ./conf/prometheus.yml /root/config/prometheus.yml
COPY ./templates/email /root/templates
COPY frontend/build/ /etc/signoz/web/
RUN chmod 755 /root /root/signoz-community
ENTRYPOINT ["./signoz-community"]
CMD ["-config", "/root/config/prometheus.yml"]

File diff suppressed because it is too large Load Diff

View File

@@ -8,68 +8,59 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/google/uuid"
"github.com/jmoiron/sqlx"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
)
type cloudProviderAccountsRepository interface {
listConnected(ctx context.Context, cloudProvider string) ([]AccountRecord, *model.ApiError)
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
get(ctx context.Context, cloudProvider string, id string) (*AccountRecord, *model.ApiError)
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
getConnectedCloudAccount(
ctx context.Context, cloudProvider string, cloudAccountId string,
) (*AccountRecord, *model.ApiError)
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
// Insert an account or update it by (cloudProvider, id)
// for specified non-empty fields
upsert(
ctx context.Context,
cloudProvider string,
orgId string,
provider string,
id *string,
config *AccountConfig,
cloudAccountId *string,
agentReport *AgentReport,
config *types.AccountConfig,
accountId *string,
agentReport *types.AgentReport,
removedAt *time.Time,
) (*AccountRecord, *model.ApiError)
) (*types.CloudIntegration, *model.ApiError)
}
func newCloudProviderAccountsRepository(db *sqlx.DB) (
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
*cloudProviderAccountsSQLRepository, error,
) {
return &cloudProviderAccountsSQLRepository{
db: db,
store: store,
}, nil
}
type cloudProviderAccountsSQLRepository struct {
db *sqlx.DB
store sqlstore.SQLStore
}
func (r *cloudProviderAccountsSQLRepository) listConnected(
ctx context.Context, cloudProvider string,
) ([]AccountRecord, *model.ApiError) {
accounts := []AccountRecord{}
ctx context.Context, orgId string, cloudProvider string,
) ([]types.CloudIntegration, *model.ApiError) {
accounts := []types.CloudIntegration{}
err := r.store.BunDB().NewSelect().
Model(&accounts).
Where("org_id = ?", orgId).
Where("provider = ?", cloudProvider).
Where("removed_at is NULL").
Where("account_id is not NULL").
Where("last_agent_report is not NULL").
Order("created_at").
Scan(ctx)
err := r.db.SelectContext(
ctx, &accounts, `
select
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
created_at,
removed_at
from cloud_integrations_accounts
where
cloud_provider=$1
and removed_at is NULL
and cloud_account_id is not NULL
and last_agent_report_json is not NULL
order by created_at
`, cloudProvider,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query connected cloud accounts: %w", err,
@@ -80,27 +71,16 @@ func (r *cloudProviderAccountsSQLRepository) listConnected(
}
func (r *cloudProviderAccountsSQLRepository) get(
ctx context.Context, cloudProvider string, id string,
) (*AccountRecord, *model.ApiError) {
var result AccountRecord
ctx context.Context, orgId string, provider string, id string,
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
err := r.db.GetContext(
ctx, &result, `
select
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
created_at,
removed_at
from cloud_integrations_accounts
where
cloud_provider=$1
and id=$2
`,
cloudProvider, id,
)
err := r.store.BunDB().NewSelect().
Model(&result).
Where("org_id = ?", orgId).
Where("provider = ?", provider).
Where("id = ?", id).
Scan(ctx)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
@@ -116,33 +96,22 @@ func (r *cloudProviderAccountsSQLRepository) get(
}
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
ctx context.Context, cloudProvider string, cloudAccountId string,
) (*AccountRecord, *model.ApiError) {
var result AccountRecord
ctx context.Context, orgId string, provider string, accountId string,
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
err := r.db.GetContext(
ctx, &result, `
select
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
created_at,
removed_at
from cloud_integrations_accounts
where
cloud_provider=$1
and cloud_account_id=$2
and last_agent_report_json is not NULL
and removed_at is NULL
`,
cloudProvider, cloudAccountId,
)
err := r.store.BunDB().NewSelect().
Model(&result).
Where("org_id = ?", orgId).
Where("provider = ?", provider).
Where("account_id = ?", accountId).
Where("last_agent_report is not NULL").
Where("removed_at is NULL").
Scan(ctx)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find connected cloud account %s", cloudAccountId,
"couldn't find connected cloud account %s", accountId,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
@@ -155,17 +124,18 @@ func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
func (r *cloudProviderAccountsSQLRepository) upsert(
ctx context.Context,
cloudProvider string,
orgId string,
provider string,
id *string,
config *AccountConfig,
cloudAccountId *string,
agentReport *AgentReport,
config *types.AccountConfig,
accountId *string,
agentReport *types.AgentReport,
removedAt *time.Time,
) (*AccountRecord, *model.ApiError) {
) (*types.CloudIntegration, *model.ApiError) {
// Insert
if id == nil {
newId := uuid.NewString()
id = &newId
temp := valuer.GenerateUUID().StringValue()
id = &temp
}
// Prepare clause for setting values in `on conflict do update`
@@ -176,19 +146,19 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
if config != nil {
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("config_json"),
onConflictSetStmts, setColStatement("config"),
)
}
if cloudAccountId != nil {
if accountId != nil {
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("cloud_account_id"),
onConflictSetStmts, setColStatement("account_id"),
)
}
if agentReport != nil {
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("last_agent_report_json"),
onConflictSetStmts, setColStatement("last_agent_report"),
)
}
@@ -198,37 +168,45 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
)
}
// set updated_at to current timestamp if it's an upsert
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("updated_at"),
)
onConflictClause := ""
if len(onConflictSetStmts) > 0 {
onConflictClause = fmt.Sprintf(
"on conflict(cloud_provider, id) do update SET\n%s",
"conflict(id, provider, org_id) do update SET\n%s",
strings.Join(onConflictSetStmts, ",\n"),
)
}
insertQuery := fmt.Sprintf(`
INSERT INTO cloud_integrations_accounts (
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
removed_at
) values ($1, $2, $3, $4, $5, $6)
%s`, onConflictClause,
)
integration := types.CloudIntegration{
OrgID: orgId,
Provider: provider,
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Config: config,
AccountID: accountId,
LastAgentReport: agentReport,
RemovedAt: removedAt,
}
_, dbErr := r.store.BunDB().NewInsert().
Model(&integration).
On(onConflictClause).
Exec(ctx)
_, dbErr := r.db.ExecContext(
ctx, insertQuery,
cloudProvider, id, config, cloudAccountId, agentReport, removedAt,
)
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud account record: %w", dbErr,
))
}
upsertedAccount, apiErr := r.get(ctx, cloudProvider, *id)
upsertedAccount, apiErr := r.get(ctx, orgId, provider, *id)
if apiErr != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't fetch upserted account by id: %w", apiErr.ToError(),

View File

@@ -33,12 +33,12 @@ type Controller struct {
func NewController(sqlStore sqlstore.SQLStore) (
*Controller, error,
) {
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore.SQLxDB())
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore)
if err != nil {
return nil, fmt.Errorf("couldn't create cloud provider accounts repo: %w", err)
}
serviceConfigRepo, err := newServiceConfigRepository(sqlStore.SQLxDB())
serviceConfigRepo, err := newServiceConfigRepository(sqlStore)
if err != nil {
return nil, fmt.Errorf("couldn't create cloud provider service config repo: %w", err)
}
@@ -49,19 +49,12 @@ func NewController(sqlStore sqlstore.SQLStore) (
}, nil
}
type Account struct {
Id string `json:"id"`
CloudAccountId string `json:"cloud_account_id"`
Config AccountConfig `json:"config"`
Status AccountStatus `json:"status"`
}
type ConnectedAccountsListResponse struct {
Accounts []Account `json:"accounts"`
Accounts []types.Account `json:"accounts"`
}
func (c *Controller) ListConnectedAccounts(
ctx context.Context, cloudProvider string,
ctx context.Context, orgId string, cloudProvider string,
) (
*ConnectedAccountsListResponse, *model.ApiError,
) {
@@ -69,14 +62,14 @@ func (c *Controller) ListConnectedAccounts(
return nil, apiErr
}
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgId, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list cloud accounts")
}
connectedAccounts := []Account{}
connectedAccounts := []types.Account{}
for _, a := range accountRecords {
connectedAccounts = append(connectedAccounts, a.account())
connectedAccounts = append(connectedAccounts, a.Account())
}
return &ConnectedAccountsListResponse{
@@ -88,7 +81,7 @@ type GenerateConnectionUrlRequest struct {
// Optional. To be specified for updates.
AccountId *string `json:"account_id,omitempty"`
AccountConfig AccountConfig `json:"account_config"`
AccountConfig types.AccountConfig `json:"account_config"`
AgentConfig SigNozAgentConfig `json:"agent_config"`
}
@@ -109,7 +102,7 @@ type GenerateConnectionUrlResponse struct {
}
func (c *Controller) GenerateConnectionUrl(
ctx context.Context, cloudProvider string, req GenerateConnectionUrlRequest,
ctx context.Context, orgId string, cloudProvider string, req GenerateConnectionUrlRequest,
) (*GenerateConnectionUrlResponse, *model.ApiError) {
// Account connection with a simple connection URL may not be available for all providers.
if cloudProvider != "aws" {
@@ -117,7 +110,7 @@ func (c *Controller) GenerateConnectionUrl(
}
account, apiErr := c.accountsRepo.upsert(
ctx, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
ctx, orgId, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
@@ -135,7 +128,7 @@ func (c *Controller) GenerateConnectionUrl(
"param_SigNozIntegrationAgentVersion": agentVersion,
"param_SigNozApiUrl": req.AgentConfig.SigNozAPIUrl,
"param_SigNozApiKey": req.AgentConfig.SigNozAPIKey,
"param_SigNozAccountId": account.Id,
"param_SigNozAccountId": account.ID.StringValue(),
"param_IngestionUrl": req.AgentConfig.IngestionUrl,
"param_IngestionKey": req.AgentConfig.IngestionKey,
"stackName": "signoz-integration",
@@ -148,19 +141,19 @@ func (c *Controller) GenerateConnectionUrl(
}
return &GenerateConnectionUrlResponse{
AccountId: account.Id,
AccountId: account.ID.StringValue(),
ConnectionUrl: connectionUrl,
}, nil
}
type AccountStatusResponse struct {
Id string `json:"id"`
CloudAccountId *string `json:"cloud_account_id,omitempty"`
Status AccountStatus `json:"status"`
Id string `json:"id"`
CloudAccountId *string `json:"cloud_account_id,omitempty"`
Status types.AccountStatus `json:"status"`
}
func (c *Controller) GetAccountStatus(
ctx context.Context, cloudProvider string, accountId string,
ctx context.Context, orgId string, cloudProvider string, accountId string,
) (
*AccountStatusResponse, *model.ApiError,
) {
@@ -168,23 +161,23 @@ func (c *Controller) GetAccountStatus(
return nil, apiErr
}
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
if apiErr != nil {
return nil, apiErr
}
resp := AccountStatusResponse{
Id: account.Id,
CloudAccountId: account.CloudAccountId,
Status: account.status(),
Id: account.ID.StringValue(),
CloudAccountId: account.AccountID,
Status: account.Status(),
}
return &resp, nil
}
type AgentCheckInRequest struct {
AccountId string `json:"account_id"`
CloudAccountId string `json:"cloud_account_id"`
ID string `json:"account_id"`
AccountID string `json:"cloud_account_id"`
// Arbitrary cloud specific Agent data
Data map[string]any `json:"data,omitempty"`
}
@@ -204,35 +197,35 @@ type IntegrationConfigForAgent struct {
}
func (c *Controller) CheckInAsAgent(
ctx context.Context, cloudProvider string, req AgentCheckInRequest,
ctx context.Context, orgId string, cloudProvider string, req AgentCheckInRequest,
) (*AgentCheckInResponse, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
existingAccount, apiErr := c.accountsRepo.get(ctx, cloudProvider, req.AccountId)
if existingAccount != nil && existingAccount.CloudAccountId != nil && *existingAccount.CloudAccountId != req.CloudAccountId {
existingAccount, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, req.ID)
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in with new %s account id %s for account %s with existing %s id %s",
cloudProvider, req.CloudAccountId, existingAccount.Id, cloudProvider, *existingAccount.CloudAccountId,
cloudProvider, req.AccountID, existingAccount.ID.StringValue(), cloudProvider, *existingAccount.AccountID,
))
}
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, cloudProvider, req.CloudAccountId)
if existingAccount != nil && existingAccount.Id != req.AccountId {
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, orgId, cloudProvider, req.AccountID)
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in to %s account %s with id %s. already connected with id %s",
cloudProvider, req.CloudAccountId, req.AccountId, existingAccount.Id,
cloudProvider, req.AccountID, req.ID, existingAccount.ID.StringValue(),
))
}
agentReport := AgentReport{
agentReport := types.AgentReport{
TimestampMillis: time.Now().UnixMilli(),
Data: req.Data,
}
account, apiErr := c.accountsRepo.upsert(
ctx, cloudProvider, &req.AccountId, nil, &req.CloudAccountId, &agentReport, nil,
ctx, orgId, cloudProvider, &req.ID, nil, &req.AccountID, &agentReport, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
@@ -265,7 +258,7 @@ func (c *Controller) CheckInAsAgent(
}
svcConfigs, apiErr := c.serviceConfigRepo.getAllForAccount(
ctx, cloudProvider, *account.CloudAccountId,
ctx, orgId, account.ID.StringValue(),
)
if apiErr != nil {
return nil, model.WrapApiError(
@@ -298,54 +291,55 @@ func (c *Controller) CheckInAsAgent(
}
return &AgentCheckInResponse{
AccountId: account.Id,
CloudAccountId: *account.CloudAccountId,
AccountId: account.ID.StringValue(),
CloudAccountId: *account.AccountID,
RemovedAt: account.RemovedAt,
IntegrationConfig: agentConfig,
}, nil
}
type UpdateAccountConfigRequest struct {
Config AccountConfig `json:"config"`
Config types.AccountConfig `json:"config"`
}
func (c *Controller) UpdateAccountConfig(
ctx context.Context,
orgId string,
cloudProvider string,
accountId string,
req UpdateAccountConfigRequest,
) (*Account, *model.ApiError) {
) (*types.Account, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
accountRecord, apiErr := c.accountsRepo.upsert(
ctx, cloudProvider, &accountId, &req.Config, nil, nil, nil,
ctx, orgId, cloudProvider, &accountId, &req.Config, nil, nil, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
}
account := accountRecord.account()
account := accountRecord.Account()
return &account, nil
}
func (c *Controller) DisconnectAccount(
ctx context.Context, cloudProvider string, accountId string,
) (*AccountRecord, *model.ApiError) {
ctx context.Context, orgId string, cloudProvider string, accountId string,
) (*types.CloudIntegration, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
}
tsNow := time.Now()
account, apiErr = c.accountsRepo.upsert(
ctx, cloudProvider, &accountId, nil, nil, nil, &tsNow,
ctx, orgId, cloudProvider, &accountId, nil, nil, nil, &tsNow,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
@@ -360,6 +354,7 @@ type ListServicesResponse struct {
func (c *Controller) ListServices(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId *string,
) (*ListServicesResponse, *model.ApiError) {
@@ -373,10 +368,16 @@ func (c *Controller) ListServices(
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
}
svcConfigs := map[string]*CloudServiceConfig{}
svcConfigs := map[string]*types.CloudServiceConfig{}
if cloudAccountId != nil {
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, *cloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't get active account")
}
svcConfigs, apiErr = c.serviceConfigRepo.getAllForAccount(
ctx, cloudProvider, *cloudAccountId,
ctx, orgID, activeAccount.ID.StringValue(),
)
if apiErr != nil {
return nil, model.WrapApiError(
@@ -400,6 +401,7 @@ func (c *Controller) ListServices(
func (c *Controller) GetServiceDetails(
ctx context.Context,
orgID string,
cloudProvider string,
serviceId string,
cloudAccountId *string,
@@ -415,8 +417,16 @@ func (c *Controller) GetServiceDetails(
}
if cloudAccountId != nil {
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, *cloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't get active account")
}
config, apiErr := c.serviceConfigRepo.get(
ctx, cloudProvider, *cloudAccountId, serviceId,
ctx, orgID, activeAccount.ID.StringValue(), serviceId,
)
if apiErr != nil && apiErr.Type() != model.ErrorNotFound {
return nil, model.WrapApiError(apiErr, "couldn't fetch service config")
@@ -425,15 +435,22 @@ func (c *Controller) GetServiceDetails(
if config != nil {
service.Config = config
enabled := false
if config.Metrics != nil && config.Metrics.Enabled {
// add links to service dashboards, making them clickable.
for i, d := range service.Assets.Dashboards {
dashboardUuid := c.dashboardUuid(
cloudProvider, serviceId, d.Id,
)
enabled = true
}
// add links to service dashboards, making them clickable.
for i, d := range service.Assets.Dashboards {
dashboardUuid := c.dashboardUuid(
cloudProvider, serviceId, d.Id,
)
if enabled {
service.Assets.Dashboards[i].Url = fmt.Sprintf(
"/dashboard/%s", dashboardUuid,
)
} else {
service.Assets.Dashboards[i].Url = ""
}
}
}
@@ -443,17 +460,18 @@ func (c *Controller) GetServiceDetails(
}
type UpdateServiceConfigRequest struct {
CloudAccountId string `json:"cloud_account_id"`
Config CloudServiceConfig `json:"config"`
CloudAccountId string `json:"cloud_account_id"`
Config types.CloudServiceConfig `json:"config"`
}
type UpdateServiceConfigResponse struct {
Id string `json:"id"`
Config CloudServiceConfig `json:"config"`
Id string `json:"id"`
Config types.CloudServiceConfig `json:"config"`
}
func (c *Controller) UpdateServiceConfig(
ctx context.Context,
orgID string,
cloudProvider string,
serviceId string,
req UpdateServiceConfigRequest,
@@ -465,7 +483,7 @@ func (c *Controller) UpdateServiceConfig(
// can only update config for a connected cloud account id
_, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, cloudProvider, req.CloudAccountId,
ctx, orgID, cloudProvider, req.CloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't find connected cloud account")
@@ -478,7 +496,7 @@ func (c *Controller) UpdateServiceConfig(
}
updatedConfig, apiErr := c.serviceConfigRepo.upsert(
ctx, cloudProvider, req.CloudAccountId, serviceId, req.Config,
ctx, orgID, cloudProvider, req.CloudAccountId, serviceId, req.Config,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't update service config")
@@ -492,13 +510,13 @@ func (c *Controller) UpdateServiceConfig(
// All dashboards that are available based on cloud integrations configuration
// across all cloud providers
func (c *Controller) AvailableDashboards(ctx context.Context) (
func (c *Controller) AvailableDashboards(ctx context.Context, orgId string) (
[]types.Dashboard, *model.ApiError,
) {
allDashboards := []types.Dashboard{}
for _, provider := range []string{"aws"} {
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, provider)
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, provider)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, fmt.Sprintf("couldn't get available dashboards for %s", provider),
@@ -512,10 +530,10 @@ func (c *Controller) AvailableDashboards(ctx context.Context) (
}
func (c *Controller) AvailableDashboardsForCloudProvider(
ctx context.Context, cloudProvider string,
ctx context.Context, orgID string, cloudProvider string,
) ([]types.Dashboard, *model.ApiError) {
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgID, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list connected cloud accounts")
}
@@ -524,9 +542,9 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
servicesWithAvailableMetrics := map[string]*time.Time{}
for _, ar := range accountRecords {
if ar.CloudAccountId != nil {
if ar.AccountID != nil {
configsBySvcId, apiErr := c.serviceConfigRepo.getAllForAccount(
ctx, cloudProvider, *ar.CloudAccountId,
ctx, orgID, ar.ID.StringValue(),
)
if apiErr != nil {
return nil, apiErr
@@ -574,6 +592,7 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
}
func (c *Controller) GetDashboardById(
ctx context.Context,
orgId string,
dashboardUuid string,
) (*types.Dashboard, *model.ApiError) {
cloudProvider, _, _, apiErr := c.parseDashboardUuid(dashboardUuid)
@@ -581,7 +600,7 @@ func (c *Controller) GetDashboardById(
return nil, apiErr
}
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, cloudProvider)
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, fmt.Sprintf("couldn't list available dashboards"),

View File

@@ -4,23 +4,30 @@ import (
"context"
"testing"
"github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/dao"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
require := require.New(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// should be able to generate connection url for
// same account id again with updated config
testAccountConfig1 := AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
testAccountConfig1 := types.AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
resp1, apiErr := controller.GenerateConnectionUrl(
context.TODO(), "aws", GenerateConnectionUrlRequest{
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
AccountConfig: testAccountConfig1,
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
},
@@ -31,14 +38,14 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
testAccountId := resp1.AccountId
account, apiErr := controller.accountsRepo.get(
context.TODO(), "aws", testAccountId,
context.TODO(), user.OrgID, "aws", testAccountId,
)
require.Nil(apiErr)
require.Equal(testAccountConfig1, *account.Config)
testAccountConfig2 := AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
testAccountConfig2 := types.AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
resp2, apiErr := controller.GenerateConnectionUrl(
context.TODO(), "aws", GenerateConnectionUrlRequest{
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
AccountId: &testAccountId,
AccountConfig: testAccountConfig2,
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
@@ -48,7 +55,7 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
require.Equal(testAccountId, resp2.AccountId)
account, apiErr = controller.accountsRepo.get(
context.TODO(), "aws", testAccountId,
context.TODO(), user.OrgID, "aws", testAccountId,
)
require.Nil(apiErr)
require.Equal(testAccountConfig2, *account.Config)
@@ -56,18 +63,21 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
func TestAgentCheckIns(t *testing.T) {
require := require.New(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// An agent should be able to check in from a cloud account even
// if no connection url was requested (no account with agent's account id exists)
testAccountId1 := uuid.NewString()
testCloudAccountId1 := "546311234"
resp1, apiErr := controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId1,
CloudAccountId: testCloudAccountId1,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId1,
AccountID: testCloudAccountId1,
},
)
require.Nil(apiErr)
@@ -78,9 +88,9 @@ func TestAgentCheckIns(t *testing.T) {
// cloud account id for the same account.
testCloudAccountId2 := "99999999"
_, apiErr = controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId1,
CloudAccountId: testCloudAccountId2,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId1,
AccountID: testCloudAccountId2,
},
)
require.NotNil(apiErr)
@@ -90,18 +100,18 @@ func TestAgentCheckIns(t *testing.T) {
// i.e. there can't be 2 connected account records for the same cloud account id
// at any point in time.
existingConnected, apiErr := controller.accountsRepo.getConnectedCloudAccount(
context.TODO(), "aws", testCloudAccountId1,
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
)
require.Nil(apiErr)
require.NotNil(existingConnected)
require.Equal(testCloudAccountId1, *existingConnected.CloudAccountId)
require.Equal(testCloudAccountId1, *existingConnected.AccountID)
require.Nil(existingConnected.RemovedAt)
testAccountId2 := uuid.NewString()
_, apiErr = controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId2,
CloudAccountId: testCloudAccountId1,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId2,
AccountID: testCloudAccountId1,
},
)
require.NotNil(apiErr)
@@ -109,29 +119,29 @@ func TestAgentCheckIns(t *testing.T) {
// After disconnecting existing account record, the agent should be able to
// connected for a particular cloud account id
_, apiErr = controller.DisconnectAccount(
context.TODO(), "aws", testAccountId1,
context.TODO(), user.OrgID, "aws", testAccountId1,
)
existingConnected, apiErr = controller.accountsRepo.getConnectedCloudAccount(
context.TODO(), "aws", testCloudAccountId1,
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
)
require.Nil(existingConnected)
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
_, apiErr = controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId2,
CloudAccountId: testCloudAccountId1,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId2,
AccountID: testCloudAccountId1,
},
)
require.Nil(apiErr)
// should be able to keep checking in
_, apiErr = controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId2,
CloudAccountId: testCloudAccountId1,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId2,
AccountID: testCloudAccountId1,
},
)
require.Nil(apiErr)
@@ -139,13 +149,16 @@ func TestAgentCheckIns(t *testing.T) {
func TestCantDisconnectNonExistentAccount(t *testing.T) {
require := require.New(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// Attempting to disconnect a non-existent account should return error
account, apiErr := controller.DisconnectAccount(
context.TODO(), "aws", uuid.NewString(),
context.TODO(), user.OrgID, "aws", uuid.NewString(),
)
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
@@ -154,15 +167,23 @@ func TestCantDisconnectNonExistentAccount(t *testing.T) {
func TestConfigureService(t *testing.T) {
require := require.New(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// create a connected account
testCloudAccountId := "546311234"
testConnectedAccount := makeTestConnectedAccount(t, user.OrgID, controller, testCloudAccountId)
require.Nil(testConnectedAccount.RemovedAt)
require.NotEmpty(testConnectedAccount.AccountID)
require.Equal(testCloudAccountId, *testConnectedAccount.AccountID)
// should start out without any service config
svcListResp, apiErr := controller.ListServices(
context.TODO(), "aws", &testCloudAccountId,
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
)
require.Nil(apiErr)
@@ -170,25 +191,20 @@ func TestConfigureService(t *testing.T) {
require.Nil(svcListResp.Services[0].Config)
svcDetails, apiErr := controller.GetServiceDetails(
context.TODO(), "aws", testSvcId, &testCloudAccountId,
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
)
require.Nil(apiErr)
require.Equal(testSvcId, svcDetails.Id)
require.Nil(svcDetails.Config)
// should be able to configure a service for a connected account
testConnectedAccount := makeTestConnectedAccount(t, controller, testCloudAccountId)
require.Nil(testConnectedAccount.RemovedAt)
require.NotNil(testConnectedAccount.CloudAccountId)
require.Equal(testCloudAccountId, *testConnectedAccount.CloudAccountId)
testSvcConfig := CloudServiceConfig{
Metrics: &CloudServiceMetricsConfig{
testSvcConfig := types.CloudServiceConfig{
Metrics: &types.CloudServiceMetricsConfig{
Enabled: true,
},
}
updateSvcConfigResp, apiErr := controller.UpdateServiceConfig(
context.TODO(), "aws", testSvcId, UpdateServiceConfigRequest{
context.TODO(), user.OrgID, "aws", testSvcId, UpdateServiceConfigRequest{
CloudAccountId: testCloudAccountId,
Config: testSvcConfig,
},
@@ -198,14 +214,14 @@ func TestConfigureService(t *testing.T) {
require.Equal(testSvcConfig, updateSvcConfigResp.Config)
svcDetails, apiErr = controller.GetServiceDetails(
context.TODO(), "aws", testSvcId, &testCloudAccountId,
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
)
require.Nil(apiErr)
require.Equal(testSvcId, svcDetails.Id)
require.Equal(testSvcConfig, *svcDetails.Config)
svcListResp, apiErr = controller.ListServices(
context.TODO(), "aws", &testCloudAccountId,
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
)
require.Nil(apiErr)
for _, svc := range svcListResp.Services {
@@ -216,12 +232,12 @@ func TestConfigureService(t *testing.T) {
// should not be able to configure service after cloud account has been disconnected
_, apiErr = controller.DisconnectAccount(
context.TODO(), "aws", testConnectedAccount.Id,
context.TODO(), user.OrgID, "aws", testConnectedAccount.ID.StringValue(),
)
require.Nil(apiErr)
_, apiErr = controller.UpdateServiceConfig(
context.TODO(), "aws", testSvcId,
context.TODO(), user.OrgID, "aws", testSvcId,
UpdateServiceConfigRequest{
CloudAccountId: testCloudAccountId,
Config: testSvcConfig,
@@ -231,7 +247,7 @@ func TestConfigureService(t *testing.T) {
// should not be able to configure a service for a cloud account id that is not connected yet
_, apiErr = controller.UpdateServiceConfig(
context.TODO(), "aws", testSvcId,
context.TODO(), user.OrgID, "aws", testSvcId,
UpdateServiceConfigRequest{
CloudAccountId: "9999999999",
Config: testSvcConfig,
@@ -241,7 +257,7 @@ func TestConfigureService(t *testing.T) {
// should not be able to set config for an unsupported service
_, apiErr = controller.UpdateServiceConfig(
context.TODO(), "aws", "bad-service", UpdateServiceConfigRequest{
context.TODO(), user.OrgID, "aws", "bad-service", UpdateServiceConfigRequest{
CloudAccountId: testCloudAccountId,
Config: testSvcConfig,
},
@@ -250,22 +266,54 @@ func TestConfigureService(t *testing.T) {
}
func makeTestConnectedAccount(t *testing.T, controller *Controller, cloudAccountId string) *AccountRecord {
func makeTestConnectedAccount(t *testing.T, orgId string, controller *Controller, cloudAccountId string) *types.CloudIntegration {
require := require.New(t)
// a check in from SigNoz agent creates or updates a connected account.
testAccountId := uuid.NewString()
resp, apiErr := controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: cloudAccountId,
context.TODO(), orgId, "aws", AgentCheckInRequest{
ID: testAccountId,
AccountID: cloudAccountId,
},
)
require.Nil(apiErr)
require.Equal(testAccountId, resp.AccountId)
require.Equal(cloudAccountId, resp.CloudAccountId)
acc, err := controller.accountsRepo.get(context.TODO(), "aws", resp.AccountId)
acc, err := controller.accountsRepo.get(context.TODO(), orgId, "aws", resp.AccountId)
require.Nil(err)
return acc
}
func createTestUser() (*types.User, *model.ApiError) {
// Create a test user for auth
ctx := context.Background()
org, apiErr := dao.DB().CreateOrg(ctx, &types.Organization{
Name: "test",
})
if apiErr != nil {
return nil, apiErr
}
group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
if apiErr != nil {
return nil, apiErr
}
auth.InitAuthCache(ctx)
userId := uuid.NewString()
return dao.DB().CreateUser(
ctx,
&types.User{
ID: userId,
Name: "test",
Email: userId[:8] + "test@test.com",
Password: "test",
OrgID: org.ID,
GroupID: group.ID,
},
true,
)
}

View File

@@ -1,123 +1,11 @@
package cloudintegrations
import (
"database/sql/driver"
"encoding/json"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/types"
)
// Represents a cloud provider account for cloud integrations
type AccountRecord struct {
CloudProvider string `json:"cloud_provider" db:"cloud_provider"`
Id string `json:"id" db:"id"`
Config *AccountConfig `json:"config" db:"config_json"`
CloudAccountId *string `json:"cloud_account_id" db:"cloud_account_id"`
LastAgentReport *AgentReport `json:"last_agent_report" db:"last_agent_report_json"`
CreatedAt time.Time `json:"created_at" db:"created_at"`
RemovedAt *time.Time `json:"removed_at" db:"removed_at"`
}
type AccountConfig struct {
EnabledRegions []string `json:"regions"`
}
func DefaultAccountConfig() AccountConfig {
return AccountConfig{
EnabledRegions: []string{},
}
}
// For serializing from db
func (c *AccountConfig) Scan(src any) error {
data, ok := src.([]byte)
if !ok {
return fmt.Errorf("tried to scan from %T instead of bytes", src)
}
return json.Unmarshal(data, &c)
}
// For serializing to db
func (c *AccountConfig) Value() (driver.Value, error) {
if c == nil {
return nil, nil
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, fmt.Errorf(
"couldn't serialize cloud account config to JSON: %w", err,
)
}
return serialized, nil
}
type AgentReport struct {
TimestampMillis int64 `json:"timestamp_millis"`
Data map[string]any `json:"data"`
}
// For serializing from db
func (r *AgentReport) Scan(src any) error {
data, ok := src.([]byte)
if !ok {
return fmt.Errorf("tried to scan from %T instead of bytes", src)
}
return json.Unmarshal(data, &r)
}
// For serializing to db
func (r *AgentReport) Value() (driver.Value, error) {
if r == nil {
return nil, nil
}
serialized, err := json.Marshal(r)
if err != nil {
return nil, fmt.Errorf(
"couldn't serialize agent report to JSON: %w", err,
)
}
return serialized, nil
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
func (a *AccountRecord) status() AccountStatus {
status := AccountStatus{}
if a.LastAgentReport != nil {
lastHeartbeat := a.LastAgentReport.TimestampMillis
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
}
return status
}
func (a *AccountRecord) account() Account {
ca := Account{Id: a.Id, Status: a.status()}
if a.CloudAccountId != nil {
ca.CloudAccountId = *a.CloudAccountId
}
if a.Config != nil {
ca.Config = *a.Config
} else {
ca.Config = DefaultAccountConfig()
}
return ca
}
type CloudServiceSummary struct {
Id string `json:"id"`
Title string `json:"title"`
@@ -125,7 +13,7 @@ type CloudServiceSummary struct {
// Present only if the service has been configured in the
// context of a cloud provider account.
Config *CloudServiceConfig `json:"config,omitempty"`
Config *types.CloudServiceConfig `json:"config,omitempty"`
}
type CloudServiceDetails struct {
@@ -144,44 +32,6 @@ type CloudServiceDetails struct {
TelemetryCollectionStrategy *CloudTelemetryCollectionStrategy `json:"telemetry_collection_strategy"`
}
type CloudServiceConfig struct {
Logs *CloudServiceLogsConfig `json:"logs,omitempty"`
Metrics *CloudServiceMetricsConfig `json:"metrics,omitempty"`
}
// For serializing from db
func (c *CloudServiceConfig) Scan(src any) error {
data, ok := src.([]byte)
if !ok {
return fmt.Errorf("tried to scan from %T instead of bytes", src)
}
return json.Unmarshal(data, &c)
}
// For serializing to db
func (c *CloudServiceConfig) Value() (driver.Value, error) {
if c == nil {
return nil, nil
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, fmt.Errorf(
"couldn't serialize cloud service config to JSON: %w", err,
)
}
return serialized, nil
}
type CloudServiceLogsConfig struct {
Enabled bool `json:"enabled"`
}
type CloudServiceMetricsConfig struct {
Enabled bool `json:"enabled"`
}
type CloudServiceAssets struct {
Dashboards []CloudServiceDashboard `json:"dashboards"`
}

View File

@@ -4,161 +4,161 @@ import (
"context"
"database/sql"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/jmoiron/sqlx"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
)
type serviceConfigRepository interface {
get(
ctx context.Context,
cloudProvider string,
orgID string,
cloudAccountId string,
serviceId string,
) (*CloudServiceConfig, *model.ApiError)
serviceType string,
) (*types.CloudServiceConfig, *model.ApiError)
upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config CloudServiceConfig,
) (*CloudServiceConfig, *model.ApiError)
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError)
getAllForAccount(
ctx context.Context,
cloudProvider string,
orgID string,
cloudAccountId string,
) (
configsBySvcId map[string]*CloudServiceConfig,
configsBySvcId map[string]*types.CloudServiceConfig,
apiErr *model.ApiError,
)
}
func newServiceConfigRepository(db *sqlx.DB) (
func newServiceConfigRepository(store sqlstore.SQLStore) (
*serviceConfigSQLRepository, error,
) {
return &serviceConfigSQLRepository{
db: db,
store: store,
}, nil
}
type serviceConfigSQLRepository struct {
db *sqlx.DB
store sqlstore.SQLStore
}
func (r *serviceConfigSQLRepository) get(
ctx context.Context,
cloudProvider string,
orgID string,
cloudAccountId string,
serviceId string,
) (*CloudServiceConfig, *model.ApiError) {
serviceType string,
) (*types.CloudServiceConfig, *model.ApiError) {
var result CloudServiceConfig
var result types.CloudIntegrationService
err := r.db.GetContext(
ctx, &result, `
select
config_json
from cloud_integrations_service_configs
where
cloud_provider=$1
and cloud_account_id=$2
and service_id=$3
`,
cloudProvider, cloudAccountId, serviceId,
)
err := r.store.BunDB().NewSelect().
Model(&result).
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
Where("ci.org_id = ?", orgID).
Where("ci.id = ?", cloudAccountId).
Where("cis.type = ?", serviceType).
Scan(ctx)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find %s %s config for %s",
cloudProvider, serviceId, cloudAccountId,
"couldn't find config for cloud account %s",
cloudAccountId,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud service config: %w", err,
))
}
return &result, nil
return &result.Config, nil
}
func (r *serviceConfigSQLRepository) upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config CloudServiceConfig,
) (*CloudServiceConfig, *model.ApiError) {
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError) {
query := `
INSERT INTO cloud_integrations_service_configs (
cloud_provider,
cloud_account_id,
service_id,
config_json
) values ($1, $2, $3, $4)
on conflict(cloud_provider, cloud_account_id, service_id)
do update set config_json=excluded.config_json
`
_, dbErr := r.db.ExecContext(
ctx, query,
cloudProvider, cloudAccountId, serviceId, &config,
)
if dbErr != nil {
// get cloud integration id from account id
// if the account is not connected, we don't need to upsert the config
var cloudIntegrationId string
err := r.store.BunDB().NewSelect().
Model((*types.CloudIntegration)(nil)).
Column("id").
Where("provider = ?", cloudProvider).
Where("account_id = ?", cloudAccountId).
Where("org_id = ?", orgID).
Where("removed_at is NULL").
Where("last_agent_report is not NULL").
Scan(ctx, &cloudIntegrationId)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud service config: %w", dbErr,
"couldn't query cloud integration id: %w", err,
))
}
upsertedConfig, apiErr := r.get(ctx, cloudProvider, cloudAccountId, serviceId)
if apiErr != nil {
serviceConfig := types.CloudIntegrationService{
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Config: config,
Type: serviceId,
CloudIntegrationID: cloudIntegrationId,
}
_, err = r.store.BunDB().NewInsert().
Model(&serviceConfig).
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
Exec(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't fetch upserted service config: %w", apiErr.ToError(),
"could not upsert cloud service config: %w", err,
))
}
return upsertedConfig, nil
return &serviceConfig.Config, nil
}
func (r *serviceConfigSQLRepository) getAllForAccount(
ctx context.Context,
cloudProvider string,
orgID string,
cloudAccountId string,
) (map[string]*CloudServiceConfig, *model.ApiError) {
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
type ScannedServiceConfigRecord struct {
ServiceId string `db:"service_id"`
Config CloudServiceConfig `db:"config_json"`
}
serviceConfigs := []types.CloudIntegrationService{}
records := []ScannedServiceConfigRecord{}
err := r.db.SelectContext(
ctx, &records, `
select
service_id,
config_json
from cloud_integrations_service_configs
where
cloud_provider=$1
and cloud_account_id=$2
`,
cloudProvider, cloudAccountId,
)
err := r.store.BunDB().NewSelect().
Model(&serviceConfigs).
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
Where("ci.id = ?", cloudAccountId).
Where("ci.org_id = ?", orgID).
Scan(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query service configs from db: %w", err,
))
}
result := map[string]*CloudServiceConfig{}
result := map[string]*types.CloudServiceConfig{}
for _, r := range records {
result[r.ServiceId] = &r.Config
for _, r := range serviceConfigs {
result[r.Type] = &r.Config
}
return result, nil

View File

@@ -9,7 +9,6 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
@@ -41,7 +40,7 @@ func InitDB(sqlStore sqlstore.SQLStore) error {
}
// CreateDashboard creates a new dashboard
func CreateDashboard(ctx context.Context, orgID string, email string, data map[string]interface{}, fm interfaces.FeatureLookup) (*types.Dashboard, *model.ApiError) {
func CreateDashboard(ctx context.Context, orgID string, email string, data map[string]interface{}) (*types.Dashboard, *model.ApiError) {
dash := &types.Dashboard{
Data: data,
}
@@ -77,7 +76,7 @@ func GetDashboards(ctx context.Context, orgID string) ([]types.Dashboard, *model
return dashboards, nil
}
func DeleteDashboard(ctx context.Context, orgID, uuid string, fm interfaces.FeatureLookup) *model.ApiError {
func DeleteDashboard(ctx context.Context, orgID, uuid string) *model.ApiError {
dashboard, dErr := GetDashboard(ctx, orgID, uuid)
if dErr != nil {
@@ -116,7 +115,7 @@ func GetDashboard(ctx context.Context, orgID, uuid string) (*types.Dashboard, *m
return &dashboard, nil
}
func UpdateDashboard(ctx context.Context, orgID, userEmail, uuid string, data map[string]interface{}, fm interfaces.FeatureLookup) (*types.Dashboard, *model.ApiError) {
func UpdateDashboard(ctx context.Context, orgID, userEmail, uuid string, data map[string]interface{}) (*types.Dashboard, *model.ApiError) {
mapData, err := json.Marshal(data)
if err != nil {

View File

@@ -21,6 +21,8 @@ import (
"github.com/SigNoz/signoz/pkg/alertmanager"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -36,7 +38,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
"github.com/SigNoz/signoz/pkg/query-service/app/explorer"
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/thirdPartyApi"
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
@@ -44,7 +45,6 @@ import (
logsv4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
"github.com/SigNoz/signoz/pkg/query-service/app/metrics"
metricsv3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
@@ -142,6 +142,8 @@ type APIHandler struct {
AlertmanagerAPI *alertmanager.API
Signoz *signoz.SigNoz
Preference preference.API
}
type APIHandlerOpts struct {
@@ -187,6 +189,8 @@ type APIHandlerOpts struct {
AlertmanagerAPI *alertmanager.API
Signoz *signoz.SigNoz
Preference preference.API
}
// NewAPIHandler returns an APIHandler
@@ -196,7 +200,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
Cache: opts.Cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: opts.FluxInterval,
FeatureLookup: opts.FeatureFlags,
UseLogsNewSchema: opts.UseLogsNewSchema,
UseTraceNewSchema: opts.UseTraceNewSchema,
}
@@ -206,7 +209,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
Cache: opts.Cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: opts.FluxInterval,
FeatureLookup: opts.FeatureFlags,
UseLogsNewSchema: opts.UseLogsNewSchema,
UseTraceNewSchema: opts.UseTraceNewSchema,
}
@@ -257,6 +259,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
SummaryService: summaryService,
AlertmanagerAPI: opts.AlertmanagerAPI,
Signoz: opts.Signoz,
Preference: opts.Preference,
}
logsQueryBuilder := logsv3.PrepareLogsQuery
@@ -274,7 +277,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
BuildTraceQuery: tracesQueryBuilder,
BuildLogQuery: logsQueryBuilder,
}
aH.queryBuilder = queryBuilder.NewQueryBuilder(builderOpts, aH.featureFlags)
aH.queryBuilder = queryBuilder.NewQueryBuilder(builderOpts)
// check if at least one user is created
hasUsers, err := aH.appDao.GetUsersWithOpts(context.Background(), 1)
@@ -1079,14 +1082,14 @@ func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
}
ic := aH.IntegrationsController
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context())
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context(), claims.OrgID)
if err != nil {
zap.L().Error("failed to get dashboards for installed integrations", zap.Error(err))
} else {
allDashboards = append(allDashboards, installedIntegrationDashboards...)
}
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context())
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context(), claims.OrgID)
if err != nil {
zap.L().Error("failed to get cloud dashboards", zap.Error(err))
} else {
@@ -1143,7 +1146,7 @@ func (aH *APIHandler) deleteDashboard(w http.ResponseWriter, r *http.Request) {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
err := dashboards.DeleteDashboard(r.Context(), claims.OrgID, uuid, aH.featureFlags)
err := dashboards.DeleteDashboard(r.Context(), claims.OrgID, uuid)
if err != nil {
RespondError(w, err, nil)
@@ -1235,7 +1238,7 @@ func (aH *APIHandler) updateDashboard(w http.ResponseWriter, r *http.Request) {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
dashboard, apiError := dashboards.UpdateDashboard(r.Context(), claims.OrgID, claims.Email, uuid, postData, aH.featureFlags)
dashboard, apiError := dashboards.UpdateDashboard(r.Context(), claims.OrgID, claims.Email, uuid, postData)
if apiError != nil {
RespondError(w, apiError, nil)
return
@@ -1264,7 +1267,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(uuid) {
dashboard, apiError = aH.CloudIntegrationsController.GetDashboardById(
r.Context(), uuid,
r.Context(), claims.OrgID, uuid,
)
if apiError != nil {
RespondError(w, apiError, nil)
@@ -1273,7 +1276,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
} else {
dashboard, apiError = aH.IntegrationsController.GetInstalledIntegrationDashboardById(
r.Context(), uuid,
r.Context(), claims.OrgID, uuid,
)
if apiError != nil {
RespondError(w, apiError, nil)
@@ -1308,7 +1311,7 @@ func (aH *APIHandler) createDashboards(w http.ResponseWriter, r *http.Request) {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
dash, apiErr := dashboards.CreateDashboard(r.Context(), claims.OrgID, claims.Email, postData, aH.featureFlags)
dash, apiErr := dashboards.CreateDashboard(r.Context(), claims.OrgID, claims.Email, postData)
if apiErr != nil {
RespondError(w, apiErr, nil)
@@ -1723,14 +1726,13 @@ func (aH *APIHandler) getServicesList(w http.ResponseWriter, r *http.Request) {
}
func (aH *APIHandler) SearchTraces(w http.ResponseWriter, r *http.Request) {
params, err := ParseSearchTracesParams(r)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
return
}
result, err := aH.reader.SearchTraces(r.Context(), params, nil)
result, err := aH.reader.SearchTraces(r.Context(), params)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
@@ -1865,8 +1867,15 @@ func (aH *APIHandler) setTTL(w http.ResponseWriter, r *http.Request) {
return
}
ctx := r.Context()
claims, ok := authtypes.ClaimsFromContext(ctx)
if !ok {
RespondError(w, &model.ApiError{Err: errors.New("failed to get org id from context"), Typ: model.ErrorInternal}, nil)
return
}
// Context is not used here as TTL is long duration DB operation
result, apiErr := aH.reader.SetTTL(context.Background(), ttlParams)
result, apiErr := aH.reader.SetTTL(context.Background(), claims.OrgID, ttlParams)
if apiErr != nil {
if apiErr.Typ == model.ErrorConflict {
aH.HandleError(w, apiErr.Err, http.StatusConflict)
@@ -1886,7 +1895,14 @@ func (aH *APIHandler) getTTL(w http.ResponseWriter, r *http.Request) {
return
}
result, apiErr := aH.reader.GetTTL(r.Context(), ttlParams)
ctx := r.Context()
claims, ok := authtypes.ClaimsFromContext(ctx)
if !ok {
RespondError(w, &model.ApiError{Err: errors.New("failed to get org id from context"), Typ: model.ErrorInternal}, nil)
return
}
result, apiErr := aH.reader.GetTTL(r.Context(), claims.OrgID, ttlParams)
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
@@ -2191,6 +2207,11 @@ func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) {
old.ProfilePictureURL = update.ProfilePictureURL
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(old.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
return
}
_, apiErr = dao.DB().EditUser(ctx, &types.User{
ID: old.ID,
Name: old.Name,
@@ -2222,6 +2243,11 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
return
}
if user == nil {
RespondError(w, &model.ApiError{
Typ: model.ErrorNotFound,
@@ -3415,132 +3441,37 @@ func (aH *APIHandler) getProducerConsumerEval(
func (aH *APIHandler) getUserPreference(
w http.ResponseWriter, r *http.Request,
) {
preferenceId := mux.Vars(r)["preferenceId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
preference, apiErr := preferences.GetUserPreference(
r.Context(), preferenceId, claims.OrgID, claims.UserID,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, preference)
aH.Preference.GetUserPreference(w, r)
}
func (aH *APIHandler) updateUserPreference(
w http.ResponseWriter, r *http.Request,
) {
preferenceId := mux.Vars(r)["preferenceId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
req := preferences.UpdatePreference{}
err := json.NewDecoder(r.Body).Decode(&req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
preference, apiErr := preferences.UpdateUserPreference(r.Context(), preferenceId, req.PreferenceValue, claims.UserID)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, preference)
aH.Preference.UpdateUserPreference(w, r)
}
func (aH *APIHandler) getAllUserPreferences(
w http.ResponseWriter, r *http.Request,
) {
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
preference, apiErr := preferences.GetAllUserPreferences(
r.Context(), claims.OrgID, claims.UserID,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, preference)
aH.Preference.GetAllUserPreferences(w, r)
}
func (aH *APIHandler) getOrgPreference(
w http.ResponseWriter, r *http.Request,
) {
preferenceId := mux.Vars(r)["preferenceId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
preference, apiErr := preferences.GetOrgPreference(
r.Context(), preferenceId, claims.OrgID,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, preference)
aH.Preference.GetOrgPreference(w, r)
}
func (aH *APIHandler) updateOrgPreference(
w http.ResponseWriter, r *http.Request,
) {
preferenceId := mux.Vars(r)["preferenceId"]
req := preferences.UpdatePreference{}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
err := json.NewDecoder(r.Body).Decode(&req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
preference, apiErr := preferences.UpdateOrgPreference(r.Context(), preferenceId, req.PreferenceValue, claims.OrgID)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, preference)
aH.Preference.UpdateOrgPreference(w, r)
}
func (aH *APIHandler) getAllOrgPreferences(
w http.ResponseWriter, r *http.Request,
) {
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
preference, apiErr := preferences.GetAllOrgPreferences(
r.Context(), claims.OrgID,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, preference)
aH.Preference.GetAllOrgPreferences(w, r)
}
// RegisterIntegrationRoutes Registers all Integrations
@@ -3576,9 +3507,14 @@ func (aH *APIHandler) ListIntegrations(
for k, values := range r.URL.Query() {
params[k] = values[0]
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.IntegrationsController.ListIntegrations(
r.Context(), params,
r.Context(), claims.OrgID, params,
)
if apiErr != nil {
RespondError(w, apiErr, "Failed to fetch integrations")
@@ -3591,8 +3527,13 @@ func (aH *APIHandler) GetIntegration(
w http.ResponseWriter, r *http.Request,
) {
integrationId := mux.Vars(r)["integrationId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
integration, apiErr := aH.IntegrationsController.GetIntegration(
r.Context(), integrationId,
r.Context(), claims.OrgID, integrationId,
)
if apiErr != nil {
RespondError(w, apiErr, "Failed to fetch integration details")
@@ -3606,8 +3547,13 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
w http.ResponseWriter, r *http.Request,
) {
integrationId := mux.Vars(r)["integrationId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
isInstalled, apiErr := aH.IntegrationsController.IsIntegrationInstalled(
r.Context(), integrationId,
r.Context(), claims.OrgID, integrationId,
)
if apiErr != nil {
RespondError(w, apiErr, "failed to check if integration is installed")
@@ -3621,7 +3567,7 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
}
connectionTests, apiErr := aH.IntegrationsController.GetIntegrationConnectionTests(
r.Context(), integrationId,
r.Context(), claims.OrgID, integrationId,
)
if apiErr != nil {
RespondError(w, apiErr, "failed to fetch integration connection tests")
@@ -3820,8 +3766,14 @@ func (aH *APIHandler) InstallIntegration(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
integration, apiErr := aH.IntegrationsController.Install(
r.Context(), &req,
r.Context(), claims.OrgID, &req,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
@@ -3842,7 +3794,13 @@ func (aH *APIHandler) UninstallIntegration(
return
}
apiErr := aH.IntegrationsController.Uninstall(r.Context(), &req)
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
apiErr := aH.IntegrationsController.Uninstall(r.Context(), claims.OrgID, &req)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
@@ -3898,8 +3856,14 @@ func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.ListConnectedAccounts(
r.Context(), cloudProvider,
r.Context(), claims.OrgID, cloudProvider,
)
if apiErr != nil {
@@ -3920,8 +3884,14 @@ func (aH *APIHandler) CloudIntegrationsGenerateConnectionUrl(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.GenerateConnectionUrl(
r.Context(), cloudProvider, req,
r.Context(), claims.OrgID, cloudProvider, req,
)
if apiErr != nil {
@@ -3938,8 +3908,14 @@ func (aH *APIHandler) CloudIntegrationsGetAccountStatus(
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.GetAccountStatus(
r.Context(), cloudProvider, accountId,
r.Context(), claims.OrgID, cloudProvider, accountId,
)
if apiErr != nil {
@@ -3960,8 +3936,14 @@ func (aH *APIHandler) CloudIntegrationsAgentCheckIn(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.CheckInAsAgent(
r.Context(), cloudProvider, req,
r.Context(), claims.OrgID, cloudProvider, req,
)
if apiErr != nil {
@@ -3984,8 +3966,14 @@ func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.UpdateAccountConfig(
r.Context(), cloudProvider, accountId, req,
r.Context(), claims.OrgID, cloudProvider, accountId, req,
)
if apiErr != nil {
@@ -4002,8 +3990,14 @@ func (aH *APIHandler) CloudIntegrationsDisconnectAccount(
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.DisconnectAccount(
r.Context(), cloudProvider, accountId,
r.Context(), claims.OrgID, cloudProvider, accountId,
)
if apiErr != nil {
@@ -4026,8 +4020,14 @@ func (aH *APIHandler) CloudIntegrationsListServices(
cloudAccountId = &cloudAccountIdQP
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.ListServices(
r.Context(), cloudProvider, cloudAccountId,
r.Context(), claims.OrgID, cloudProvider, cloudAccountId,
)
if apiErr != nil {
@@ -4050,8 +4050,14 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
cloudAccountId = &cloudAccountIdQP
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.GetServiceDetails(
r.Context(), cloudProvider, serviceId, cloudAccountId,
r.Context(), claims.OrgID, cloudProvider, serviceId, cloudAccountId,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
@@ -4290,8 +4296,14 @@ func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.UpdateServiceConfig(
r.Context(), cloudProvider, serviceId, req,
r.Context(), claims.OrgID, cloudProvider, serviceId, req,
)
if apiErr != nil {

View File

@@ -18,7 +18,7 @@ type Controller struct {
func NewController(sqlStore sqlstore.SQLStore) (
*Controller, error,
) {
mgr, err := NewManager(sqlStore.SQLxDB())
mgr, err := NewManager(sqlStore)
if err != nil {
return nil, fmt.Errorf("couldn't create integrations manager: %w", err)
}
@@ -35,7 +35,7 @@ type IntegrationsListResponse struct {
}
func (c *Controller) ListIntegrations(
ctx context.Context, params map[string]string,
ctx context.Context, orgId string, params map[string]string,
) (
*IntegrationsListResponse, *model.ApiError,
) {
@@ -47,7 +47,7 @@ func (c *Controller) ListIntegrations(
}
}
integrations, apiErr := c.mgr.ListIntegrations(ctx, filters)
integrations, apiErr := c.mgr.ListIntegrations(ctx, orgId, filters)
if apiErr != nil {
return nil, apiErr
}
@@ -58,16 +58,15 @@ func (c *Controller) ListIntegrations(
}
func (c *Controller) GetIntegration(
ctx context.Context, integrationId string,
ctx context.Context, orgId string, integrationId string,
) (*Integration, *model.ApiError) {
return c.mgr.GetIntegration(ctx, integrationId)
return c.mgr.GetIntegration(ctx, orgId, integrationId)
}
func (c *Controller) IsIntegrationInstalled(
ctx context.Context,
integrationId string,
ctx context.Context, orgId string, integrationId string,
) (bool, *model.ApiError) {
installation, apiErr := c.mgr.getInstalledIntegration(ctx, integrationId)
installation, apiErr := c.mgr.getInstalledIntegration(ctx, orgId, integrationId)
if apiErr != nil {
return false, apiErr
}
@@ -76,9 +75,9 @@ func (c *Controller) IsIntegrationInstalled(
}
func (c *Controller) GetIntegrationConnectionTests(
ctx context.Context, integrationId string,
ctx context.Context, orgId string, integrationId string,
) (*IntegrationConnectionTests, *model.ApiError) {
return c.mgr.GetIntegrationConnectionTests(ctx, integrationId)
return c.mgr.GetIntegrationConnectionTests(ctx, orgId, integrationId)
}
type InstallIntegrationRequest struct {
@@ -87,10 +86,10 @@ type InstallIntegrationRequest struct {
}
func (c *Controller) Install(
ctx context.Context, req *InstallIntegrationRequest,
ctx context.Context, orgId string, req *InstallIntegrationRequest,
) (*IntegrationsListItem, *model.ApiError) {
res, apiErr := c.mgr.InstallIntegration(
ctx, req.IntegrationId, req.Config,
ctx, orgId, req.IntegrationId, req.Config,
)
if apiErr != nil {
return nil, apiErr
@@ -104,7 +103,7 @@ type UninstallIntegrationRequest struct {
}
func (c *Controller) Uninstall(
ctx context.Context, req *UninstallIntegrationRequest,
ctx context.Context, orgId string, req *UninstallIntegrationRequest,
) *model.ApiError {
if len(req.IntegrationId) < 1 {
return model.BadRequest(fmt.Errorf(
@@ -113,7 +112,7 @@ func (c *Controller) Uninstall(
}
apiErr := c.mgr.UninstallIntegration(
ctx, req.IntegrationId,
ctx, orgId, req.IntegrationId,
)
if apiErr != nil {
return apiErr
@@ -123,19 +122,19 @@ func (c *Controller) Uninstall(
}
func (c *Controller) GetPipelinesForInstalledIntegrations(
ctx context.Context,
ctx context.Context, orgId string,
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
return c.mgr.GetPipelinesForInstalledIntegrations(ctx)
return c.mgr.GetPipelinesForInstalledIntegrations(ctx, orgId)
}
func (c *Controller) GetDashboardsForInstalledIntegrations(
ctx context.Context,
ctx context.Context, orgId string,
) ([]types.Dashboard, *model.ApiError) {
return c.mgr.GetDashboardsForInstalledIntegrations(ctx)
return c.mgr.GetDashboardsForInstalledIntegrations(ctx, orgId)
}
func (c *Controller) GetInstalledIntegrationDashboardById(
ctx context.Context, dashboardUuid string,
ctx context.Context, orgId string, dashboardUuid string,
) (*types.Dashboard, *model.ApiError) {
return c.mgr.GetInstalledIntegrationDashboardById(ctx, dashboardUuid)
return c.mgr.GetInstalledIntegrationDashboardById(ctx, orgId, dashboardUuid)
}

View File

@@ -5,15 +5,14 @@ import (
"fmt"
"slices"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/jmoiron/sqlx"
)
type IntegrationAuthor struct {
@@ -105,16 +104,9 @@ type IntegrationsListItem struct {
IsInstalled bool `json:"is_installed"`
}
type InstalledIntegration struct {
IntegrationId string `json:"integration_id" db:"integration_id"`
Config InstalledIntegrationConfig `json:"config_json" db:"config_json"`
InstalledAt time.Time `json:"installed_at" db:"installed_at"`
}
type InstalledIntegrationConfig map[string]interface{}
type Integration struct {
IntegrationDetails
Installation *InstalledIntegration `json:"installation"`
Installation *types.InstalledIntegration `json:"installation"`
}
type Manager struct {
@@ -122,8 +114,8 @@ type Manager struct {
installedIntegrationsRepo InstalledIntegrationsRepo
}
func NewManager(db *sqlx.DB) (*Manager, error) {
iiRepo, err := NewInstalledIntegrationsSqliteRepo(db)
func NewManager(store sqlstore.SQLStore) (*Manager, error) {
iiRepo, err := NewInstalledIntegrationsSqliteRepo(store)
if err != nil {
return nil, fmt.Errorf(
"could not init sqlite DB for installed integrations: %w", err,
@@ -142,6 +134,7 @@ type IntegrationsFilter struct {
func (m *Manager) ListIntegrations(
ctx context.Context,
orgId string,
filter *IntegrationsFilter,
// Expected to have pagination over time.
) ([]IntegrationsListItem, *model.ApiError) {
@@ -152,22 +145,22 @@ func (m *Manager) ListIntegrations(
)
}
installed, apiErr := m.installedIntegrationsRepo.list(ctx)
installed, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, "could not fetch installed integrations",
)
}
installedIds := []string{}
installedTypes := []string{}
for _, ii := range installed {
installedIds = append(installedIds, ii.IntegrationId)
installedTypes = append(installedTypes, ii.Type)
}
result := []IntegrationsListItem{}
for _, ai := range available {
result = append(result, IntegrationsListItem{
IntegrationSummary: ai.IntegrationSummary,
IsInstalled: slices.Contains(installedIds, ai.Id),
IsInstalled: slices.Contains(installedTypes, ai.Id),
})
}
@@ -188,6 +181,7 @@ func (m *Manager) ListIntegrations(
func (m *Manager) GetIntegration(
ctx context.Context,
orgId string,
integrationId string,
) (*Integration, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(
@@ -198,7 +192,7 @@ func (m *Manager) GetIntegration(
}
installation, apiErr := m.getInstalledIntegration(
ctx, integrationId,
ctx, orgId, integrationId,
)
if apiErr != nil {
return nil, apiErr
@@ -212,6 +206,7 @@ func (m *Manager) GetIntegration(
func (m *Manager) GetIntegrationConnectionTests(
ctx context.Context,
orgId string,
integrationId string,
) (*IntegrationConnectionTests, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(
@@ -225,8 +220,9 @@ func (m *Manager) GetIntegrationConnectionTests(
func (m *Manager) InstallIntegration(
ctx context.Context,
orgId string,
integrationId string,
config InstalledIntegrationConfig,
config types.InstalledIntegrationConfig,
) (*IntegrationsListItem, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
if apiErr != nil {
@@ -234,7 +230,7 @@ func (m *Manager) InstallIntegration(
}
_, apiErr = m.installedIntegrationsRepo.upsert(
ctx, integrationId, config,
ctx, orgId, integrationId, config,
)
if apiErr != nil {
return nil, model.WrapApiError(
@@ -250,15 +246,17 @@ func (m *Manager) InstallIntegration(
func (m *Manager) UninstallIntegration(
ctx context.Context,
orgId string,
integrationId string,
) *model.ApiError {
return m.installedIntegrationsRepo.delete(ctx, integrationId)
return m.installedIntegrationsRepo.delete(ctx, orgId, integrationId)
}
func (m *Manager) GetPipelinesForInstalledIntegrations(
ctx context.Context,
orgId string,
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
if apiErr != nil {
return nil, apiErr
}
@@ -308,6 +306,7 @@ func (m *Manager) parseDashboardUuid(dashboardUuid string) (
func (m *Manager) GetInstalledIntegrationDashboardById(
ctx context.Context,
orgId string,
dashboardUuid string,
) (*types.Dashboard, *model.ApiError) {
integrationId, dashboardId, apiErr := m.parseDashboardUuid(dashboardUuid)
@@ -315,7 +314,7 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
return nil, apiErr
}
integration, apiErr := m.GetIntegration(ctx, integrationId)
integration, apiErr := m.GetIntegration(ctx, orgId, integrationId)
if apiErr != nil {
return nil, apiErr
}
@@ -355,8 +354,9 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
func (m *Manager) GetDashboardsForInstalledIntegrations(
ctx context.Context,
orgId string,
) ([]types.Dashboard, *model.ApiError) {
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
if apiErr != nil {
return nil, apiErr
}
@@ -421,10 +421,11 @@ func (m *Manager) getIntegrationDetails(
func (m *Manager) getInstalledIntegration(
ctx context.Context,
orgId string,
integrationId string,
) (*InstalledIntegration, *model.ApiError) {
) (*types.InstalledIntegration, *model.ApiError) {
iis, apiErr := m.installedIntegrationsRepo.get(
ctx, []string{integrationId},
ctx, orgId, []string{integrationId},
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, fmt.Sprintf(
@@ -441,32 +442,33 @@ func (m *Manager) getInstalledIntegration(
func (m *Manager) getInstalledIntegrations(
ctx context.Context,
orgId string,
) (
map[string]Integration, *model.ApiError,
) {
installations, apiErr := m.installedIntegrationsRepo.list(ctx)
installations, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
if apiErr != nil {
return nil, apiErr
}
installedIds := utils.MapSlice(installations, func(i InstalledIntegration) string {
return i.IntegrationId
installedTypes := utils.MapSlice(installations, func(i types.InstalledIntegration) string {
return i.Type
})
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedIds)
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedTypes)
if apiErr != nil {
return nil, apiErr
}
result := map[string]Integration{}
for _, ii := range installations {
iDetails, exists := integrationDetails[ii.IntegrationId]
iDetails, exists := integrationDetails[ii.Type]
if !exists {
return nil, model.InternalError(fmt.Errorf(
"couldn't find integration details for %s", ii.IntegrationId,
"couldn't find integration details for %s", ii.Type,
))
}
result[ii.IntegrationId] = Integration{
result[ii.Type] = Integration{
Installation: &ii,
IntegrationDetails: iDetails,
}

Some files were not shown because too many files have changed in this diff Show More