Compare commits
167 Commits
platform-p
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
529a9e7009 | ||
|
|
b00687b43f | ||
|
|
8771919de6 | ||
|
|
497972f23c | ||
|
|
a9e30919d1 | ||
|
|
925c4c4a3d | ||
|
|
e66bfe5961 | ||
|
|
42943f72b7 | ||
|
|
7a72a209e5 | ||
|
|
44f00943a8 | ||
|
|
8867e1ef38 | ||
|
|
c08e520941 | ||
|
|
139cc4452d | ||
|
|
2f3baeb302 | ||
|
|
3d42b0058e | ||
|
|
ed70e3c5f5 | ||
|
|
7d6918f8b6 | ||
|
|
2885bc851e | ||
|
|
857258f8c3 | ||
|
|
ece5c2b7ad | ||
|
|
1078f98388 | ||
|
|
b4e2326f38 | ||
|
|
c79b154215 | ||
|
|
a59c0188cc | ||
|
|
3df426625a | ||
|
|
646f359f33 | ||
|
|
81167c6947 | ||
|
|
bc1295b93a | ||
|
|
3db0e1f66a | ||
|
|
d52b54aeb3 | ||
|
|
c8608c18ae | ||
|
|
cde99ba1a0 | ||
|
|
a7e9d442b7 | ||
|
|
0b0d622f6b | ||
|
|
127e760b00 | ||
|
|
63e333de0d | ||
|
|
af57d11b6a | ||
|
|
8d61ee338b | ||
|
|
5d9dc17645 | ||
|
|
5288022ffd | ||
|
|
cdc18af4a2 | ||
|
|
918a90e3c1 | ||
|
|
e8ce7b22f5 | ||
|
|
b752fdd30a | ||
|
|
d73b7fadab | ||
|
|
bc4b65dbb9 | ||
|
|
e716a2a7b1 | ||
|
|
891c56b059 | ||
|
|
d01e6fc891 | ||
|
|
17f8c1040f | ||
|
|
ffa5a9725e | ||
|
|
92cab8e049 | ||
|
|
7b9e6e3cbb | ||
|
|
4837ddb601 | ||
|
|
9c818955af | ||
|
|
134a051196 | ||
|
|
c904ab5d99 | ||
|
|
d53f9a7e16 | ||
|
|
1b01b61026 | ||
|
|
95a26cecba | ||
|
|
15af828005 | ||
|
|
e5b99703ac | ||
|
|
f0941c7b2e | ||
|
|
12c9b921a7 | ||
|
|
52228bc6c4 | ||
|
|
79988b448f | ||
|
|
4bfd7ba3d7 | ||
|
|
3349158213 | ||
|
|
1c9f4efb9f | ||
|
|
fd839ff1db | ||
|
|
09cbe4aa0d | ||
|
|
096e38ee91 | ||
|
|
48590c03e2 | ||
|
|
38af897bcc | ||
|
|
2b79678e63 | ||
|
|
a4f54baf1f | ||
|
|
4e6c42dd17 | ||
|
|
39bd169b89 | ||
|
|
c7c2d2a7ef | ||
|
|
0cfb809605 | ||
|
|
6a378ed7b4 | ||
|
|
8e41847523 | ||
|
|
779df62093 | ||
|
|
3763794531 | ||
|
|
e9fa68e1f3 | ||
|
|
7bd3e1c453 | ||
|
|
a48455b2b3 | ||
|
|
fbb66f14ba | ||
|
|
54b67d9cfd | ||
|
|
1a193015a7 | ||
|
|
245179cbf7 | ||
|
|
dbb6b333c8 | ||
|
|
56f8e53d88 | ||
|
|
2f4e371dac | ||
|
|
db75ec56bc | ||
|
|
02755a6527 | ||
|
|
9f089e0784 | ||
|
|
fb9a7ad3cd | ||
|
|
ad631d70b6 | ||
|
|
c44efeab33 | ||
|
|
e9743fa7ac | ||
|
|
b7ece08d3e | ||
|
|
e5f4f5cc72 | ||
|
|
4437630127 | ||
|
|
89639b239e | ||
|
|
785ae9f0bd | ||
|
|
8752022cef | ||
|
|
c7e4a9c45d | ||
|
|
bf92c92204 | ||
|
|
bd63633be7 | ||
|
|
1158e1199b | ||
|
|
0a60c49314 | ||
|
|
c25e3beb81 | ||
|
|
c9e0f2b9ca | ||
|
|
6d831849c1 | ||
|
|
83eeb46f99 | ||
|
|
287558dc9d | ||
|
|
83aad793c2 | ||
|
|
3eff689c85 | ||
|
|
f5bcd65e2e | ||
|
|
e7772d93af | ||
|
|
bbf987ebd7 | ||
|
|
105c3a3b8c | ||
|
|
c1a4a5b8db | ||
|
|
c9591f4341 | ||
|
|
fd216fdee1 | ||
|
|
f5bf4293a1 | ||
|
|
155a44a25d | ||
|
|
4b21c9d5f9 | ||
|
|
5ef0a18867 | ||
|
|
c8266d1aec | ||
|
|
adfd16ce1b | ||
|
|
6db74a5585 | ||
|
|
f8e0db0085 | ||
|
|
01e0b36d62 | ||
|
|
e90bb016f7 | ||
|
|
bdecbfb7f5 | ||
|
|
3dced2b082 | ||
|
|
1285666087 | ||
|
|
1655397eaa | ||
|
|
718360a966 | ||
|
|
2f5995b071 | ||
|
|
a061c9de0f | ||
|
|
7b1ca9a1a6 | ||
|
|
0d1131e99f | ||
|
|
44d1d0f994 | ||
|
|
bdce97a727 | ||
|
|
5f8cfbe474 | ||
|
|
55c2f98768 | ||
|
|
624bb5cc62 | ||
|
|
95f8fa1566 | ||
|
|
fa97e63912 | ||
|
|
c8419c1f82 | ||
|
|
e05ede3978 | ||
|
|
437d0d1345 | ||
|
|
64e379c413 | ||
|
|
d05d394f57 | ||
|
|
b4e5085a5a | ||
|
|
88f7502a15 | ||
|
|
b0442761ac | ||
|
|
d539ca9bab | ||
|
|
c8194e9abb | ||
|
|
c919102fee | ||
|
|
765370752c | ||
|
|
db5c102f14 | ||
|
|
8a0c5bc3c8 | ||
|
|
a28ccffd01 |
@@ -42,7 +42,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
44
.github/CODEOWNERS
vendored
@@ -2,47 +2,11 @@
|
||||
# Owners are automatically requested for review for PRs that changes code
|
||||
# that they own.
|
||||
|
||||
/frontend/ @SigNoz/frontend @YounixM
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
/frontend/ @YounixM @aks07
|
||||
|
||||
# Dashboard, Alert, Metrics, Service Map, Services
|
||||
/frontend/src/container/ListOfDashboard/ @srikanthccv
|
||||
/frontend/src/container/NewDashboard/ @srikanthccv
|
||||
/frontend/src/pages/DashboardsListPage/ @srikanthccv
|
||||
/frontend/src/pages/DashboardWidget/ @srikanthccv
|
||||
/frontend/src/pages/NewDashboard/ @srikanthccv
|
||||
/frontend/src/providers/Dashboard/ @srikanthccv
|
||||
|
||||
# Alerts
|
||||
/frontend/src/container/AlertHistory/ @srikanthccv
|
||||
/frontend/src/container/AllAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/AnomalyAlertEvaluationView/ @srikanthccv
|
||||
/frontend/src/container/CreateAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/CreateAlertRule/ @srikanthccv
|
||||
/frontend/src/container/EditAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/FormAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/FormAlertRules/ @srikanthccv
|
||||
/frontend/src/container/ListAlertRules/ @srikanthccv
|
||||
/frontend/src/container/TriggeredAlerts/ @srikanthccv
|
||||
/frontend/src/pages/AlertChannelCreate/ @srikanthccv
|
||||
/frontend/src/pages/AlertDetails/ @srikanthccv
|
||||
/frontend/src/pages/AlertHistory/ @srikanthccv
|
||||
/frontend/src/pages/AlertList/ @srikanthccv
|
||||
/frontend/src/pages/CreateAlert/ @srikanthccv
|
||||
/frontend/src/providers/Alert.tsx @srikanthccv
|
||||
|
||||
# Metrics
|
||||
/frontend/src/container/MetricsExplorer/ @srikanthccv
|
||||
/frontend/src/pages/MetricsApplication/ @srikanthccv
|
||||
/frontend/src/pages/MetricsExplorer/ @srikanthccv
|
||||
|
||||
# Services and Service Map
|
||||
/frontend/src/container/ServiceApplication/ @srikanthccv
|
||||
/frontend/src/container/ServiceTable/ @srikanthccv
|
||||
/frontend/src/pages/Services/ @srikanthccv
|
||||
/frontend/src/pages/ServiceTopLevelOperations/ @srikanthccv
|
||||
/frontend/src/container/Home/Services/ @srikanthccv
|
||||
# Onboarding
|
||||
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
|
||||
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
|
||||
|
||||
/deploy/ @SigNoz/devops
|
||||
.github @SigNoz/devops
|
||||
|
||||
7
.github/workflows/build-community.yaml
vendored
@@ -3,8 +3,8 @@ name: build-community
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@@ -69,14 +69,13 @@ jobs:
|
||||
GO_BUILD_CONTEXT: ./cmd/community
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-ldflags='-s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/community/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
|
||||
7
.github/workflows/build-enterprise.yaml
vendored
@@ -69,6 +69,7 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -84,7 +85,7 @@ jobs:
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
@@ -99,7 +100,7 @@ jobs:
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-ldflags='-s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
@@ -107,10 +108,8 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
|
||||
7
.github/workflows/build-staging.yaml
vendored
@@ -68,6 +68,7 @@ jobs:
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.NP_PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -98,7 +99,7 @@ jobs:
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-ldflags='-s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
@@ -106,10 +107,8 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
@@ -125,4 +124,4 @@ jobs:
|
||||
GITHUB_SILENT: true
|
||||
GITHUB_REPOSITORY_NAME: charts-saas-v3-staging
|
||||
GITHUB_EVENT_NAME: releaser
|
||||
GITHUB_EVENT_PAYLOAD: "{\"deployment\": \"${{ needs.prepare.outputs.deployment }}\", \"signoz_version\": \"${{ needs.prepare.outputs.version }}\"}"
|
||||
GITHUB_EVENT_PAYLOAD: '{"deployment": "${{ needs.prepare.outputs.deployment }}", "signoz_version": "${{ needs.prepare.outputs.version }}"}'
|
||||
|
||||
16
.github/workflows/goci.yaml
vendored
@@ -73,3 +73,19 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
make docker-build-enterprise
|
||||
openapi:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
- name: generate-openapi
|
||||
run: |
|
||||
go run cmd/enterprise/*.go generate openapi
|
||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
|
||||
|
||||
1
.github/workflows/gor-signoz.yaml
vendored
@@ -35,6 +35,7 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> .env
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
|
||||
2
.github/workflows/integrationci.yaml
vendored
@@ -17,6 +17,8 @@ jobs:
|
||||
- bootstrap
|
||||
- passwordauthn
|
||||
- callbackauthn
|
||||
- cloudintegrations
|
||||
- dashboard
|
||||
- querier
|
||||
- ttl
|
||||
sqlstore-provider:
|
||||
|
||||
@@ -1,39 +1,63 @@
|
||||
version: "2"
|
||||
linters:
|
||||
default: standard
|
||||
default: none
|
||||
enable:
|
||||
- bodyclose
|
||||
- depguard
|
||||
- errcheck
|
||||
- forbidigo
|
||||
- govet
|
||||
- iface
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nilnil
|
||||
- sloglint
|
||||
- depguard
|
||||
- iface
|
||||
- unparam
|
||||
- forbidigo
|
||||
|
||||
linters-settings:
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
msg-style: lowercased
|
||||
key-naming-case: snake
|
||||
depguard:
|
||||
rules:
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: "go.uber.org/zap"
|
||||
desc: "Do not use zap logger. Use slog instead."
|
||||
noerrors:
|
||||
deny:
|
||||
- pkg: "errors"
|
||||
desc: "Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead."
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
issues:
|
||||
exclude-dirs:
|
||||
- "pkg/query-service"
|
||||
- "ee/query-service"
|
||||
- "scripts/"
|
||||
- unused
|
||||
settings:
|
||||
depguard:
|
||||
rules:
|
||||
noerrors:
|
||||
deny:
|
||||
- pkg: errors
|
||||
desc: Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead.
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: go.uber.org/zap
|
||||
desc: Do not use zap logger. Use slog instead.
|
||||
forbidigo:
|
||||
forbid:
|
||||
- pattern: fmt.Errorf
|
||||
- pattern: ^(fmt\.Print.*|print|println)$
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
key-naming-case: snake
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
paths:
|
||||
- pkg/query-service
|
||||
- ee/query-service
|
||||
- scripts/
|
||||
- tmp/
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
|
||||
24
Makefile
@@ -84,10 +84,9 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go server
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@@ -102,10 +101,9 @@ go-run-community: ## Runs the community go backend server
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server
|
||||
|
||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
go-build-community: ## Builds the go backend server for community
|
||||
@@ -114,9 +112,9 @@ $(GO_BUILD_ARCHS_COMMUNITY): go-build-community-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)-community"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
fi
|
||||
|
||||
|
||||
@@ -127,9 +125,9 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
@@ -139,9 +137,9 @@ $(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
##############################################################
|
||||
@@ -208,4 +206,4 @@ py-lint: ## Run lint for integration tests
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
|
||||
@@ -12,12 +12,6 @@ builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: ./cmd/community
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}
|
||||
{{- if eq .Arch "arm64" }}CC=aarch64-linux-gnu-gcc{{- end }}
|
||||
{{- end }}
|
||||
goos:
|
||||
- linux
|
||||
- darwin
|
||||
@@ -36,8 +30,6 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.time={{ .CommitTimestamp }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
tags:
|
||||
- timetzdata
|
||||
|
||||
@@ -13,6 +13,7 @@ func main() {
|
||||
|
||||
// register a list of commands to the root command
|
||||
registerServer(cmd.RootCmd, logger)
|
||||
cmd.RegisterGenerate(cmd.RootCmd, logger)
|
||||
|
||||
cmd.Execute(logger)
|
||||
}
|
||||
|
||||
@@ -5,9 +5,12 @@ import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
@@ -76,6 +79,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -12,12 +12,6 @@ builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: ./cmd/enterprise
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}
|
||||
{{- if eq .Arch "arm64" }}CC=aarch64-linux-gnu-gcc{{- end }}
|
||||
{{- end }}
|
||||
goos:
|
||||
- linux
|
||||
- darwin
|
||||
@@ -37,11 +31,8 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
tags:
|
||||
- timetzdata
|
||||
|
||||
@@ -13,6 +13,7 @@ func main() {
|
||||
|
||||
// register a list of commands to the root command
|
||||
registerServer(cmd.RootCmd, logger)
|
||||
cmd.RegisterGenerate(cmd.RootCmd, logger)
|
||||
|
||||
cmd.Execute(logger)
|
||||
}
|
||||
|
||||
@@ -8,6 +8,8 @@ import (
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||
@@ -17,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
@@ -105,6 +108,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
|
||||
return authNs, nil
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
21
cmd/generate.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func RegisterGenerate(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||
var generateCmd = &cobra.Command{
|
||||
Use: "generate",
|
||||
Short: "Generate artifacts",
|
||||
SilenceUsage: true,
|
||||
SilenceErrors: true,
|
||||
CompletionOptions: cobra.CompletionOptions{DisableDefaultCmd: true},
|
||||
}
|
||||
|
||||
registerGenerateOpenAPI(generateCmd)
|
||||
|
||||
parentCmd.AddCommand(generateCmd)
|
||||
}
|
||||
41
cmd/openapi.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func registerGenerateOpenAPI(parentCmd *cobra.Command) {
|
||||
openapiCmd := &cobra.Command{
|
||||
Use: "openapi",
|
||||
Short: "Generate OpenAPI schema for SigNoz",
|
||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||
return runGenerateOpenAPI(currCmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
parentCmd.AddCommand(openapiCmd)
|
||||
}
|
||||
|
||||
func runGenerateOpenAPI(ctx context.Context) error {
|
||||
instrumentation, err := instrumentation.New(ctx, instrumentation.Config{Logs: instrumentation.LogsConfig{Level: slog.LevelInfo}}, version.Info, "signoz")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
openapi, err := signoz.NewOpenAPI(ctx, instrumentation)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := openapi.CreateAndWrite("docs/api/openapi.yml"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
##################### SigNoz Configuration Example #####################
|
||||
#
|
||||
#
|
||||
# Do not modify this file
|
||||
#
|
||||
|
||||
@@ -47,10 +47,10 @@ cache:
|
||||
provider: memory
|
||||
# memory: Uses in-memory caching.
|
||||
memory:
|
||||
# Time-to-live for cache entries in memory. Specify the duration in ns
|
||||
ttl: 60000000000
|
||||
# The interval at which the cache will be cleaned up
|
||||
cleanup_interval: 1m
|
||||
# Max items for the in-memory cache (10x the entries)
|
||||
num_counters: 100000
|
||||
# Total cost in bytes allocated bounded cache
|
||||
max_cost: 67108864
|
||||
# redis: Uses Redis as the caching backend.
|
||||
redis:
|
||||
# The hostname or IP address of the Redis server.
|
||||
@@ -58,7 +58,7 @@ cache:
|
||||
# The port on which the Redis server is running. Default is usually 6379.
|
||||
port: 6379
|
||||
# The password for authenticating with the Redis server, if required.
|
||||
password:
|
||||
password:
|
||||
# The Redis database number to use
|
||||
db: 0
|
||||
|
||||
@@ -71,6 +71,10 @@ sqlstore:
|
||||
sqlite:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
# Mode is the mode to use for the sqlite database.
|
||||
mode: delete
|
||||
# BusyTimeout is the timeout for the sqlite database to wait for a lock.
|
||||
busy_timeout: 10s
|
||||
|
||||
##################### APIServer #####################
|
||||
apiserver:
|
||||
@@ -238,7 +242,6 @@ statsreporter:
|
||||
# Whether to collect identities and traits (emails).
|
||||
identities: true
|
||||
|
||||
|
||||
##################### Gateway (License only) #####################
|
||||
gateway:
|
||||
# The URL of the gateway's api.
|
||||
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.97.0
|
||||
image: signoz/signoz:v0.104.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -209,7 +209,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.7
|
||||
image: signoz/signoz-otel-collector:v0.129.12
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -233,7 +233,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.97.0
|
||||
image: signoz/signoz:v0.104.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,7 +150,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.7
|
||||
image: signoz/signoz-otel-collector:v0.129.12
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
connectors:
|
||||
signozmeter:
|
||||
metrics_flush_interval: 1h
|
||||
dimensions:
|
||||
- name: service.name
|
||||
- name: deployment.environment
|
||||
- name: host.name
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
@@ -21,6 +28,10 @@ processors:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
batch/meter:
|
||||
send_batch_max_size: 25000
|
||||
send_batch_size: 20000
|
||||
timeout: 1s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors: [env, system]
|
||||
@@ -66,6 +77,11 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
signozclickhousemeter:
|
||||
dsn: tcp://clickhouse:9000/signoz_meter
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -77,16 +93,20 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
exporters: [signozclickhousemeter]
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.97.0}
|
||||
image: signoz/signoz:${VERSION:-v0.104.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,7 +213,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -239,7 +239,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +250,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.97.0}
|
||||
image: signoz/signoz:${VERSION:-v0.104.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
connectors:
|
||||
signozmeter:
|
||||
metrics_flush_interval: 1h
|
||||
dimensions:
|
||||
- name: service.name
|
||||
- name: deployment.environment
|
||||
- name: host.name
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
@@ -21,6 +28,10 @@ processors:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
batch/meter:
|
||||
send_batch_max_size: 25000
|
||||
send_batch_size: 20000
|
||||
timeout: 1s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors: [env, system]
|
||||
@@ -66,6 +77,11 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
signozclickhousemeter:
|
||||
dsn: tcp://clickhouse:9000/signoz_meter
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -77,16 +93,20 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
exporters: [signozclickhousemeter]
|
||||
|
||||
2293
docs/api/openapi.yml
Normal file
@@ -13,8 +13,6 @@ Before diving in, make sure you have these tools installed:
|
||||
- Download from [go.dev/dl](https://go.dev/dl/)
|
||||
- Check [go.mod](../../go.mod#L3) for the minimum version
|
||||
|
||||
- **GCC** - Required for CGO dependencies
|
||||
- Download from [gcc.gnu.org](https://gcc.gnu.org/)
|
||||
|
||||
- **Node** - Powers our frontend
|
||||
- Download from [nodejs.org](https://nodejs.org)
|
||||
|
||||
@@ -103,9 +103,19 @@ Remember to replace the region and ingestion key with proper values as obtained
|
||||
|
||||
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
|
||||
|
||||
Also, both SigNoz and OTel Demo App have the same `PROMETHEUS_PORT` configured, by default both of them try to start at `9090`, which may cause either of them to fail depending upon which one acquires it first. To prevent this, we need to mofify the value of `PROMETHEUS_PORT` too.
|
||||
|
||||
|
||||
```sh
|
||||
ENVOY_PORT=8081 docker compose up -d
|
||||
ENVOY_PORT=8081 PROMETHEUS_PORT=9091 docker compose up -d
|
||||
```
|
||||
|
||||
Alternatively, we can modify these values using the `.env` file too, which reduces the command as just:
|
||||
|
||||
```sh
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
|
||||
```sh
|
||||
docker compose ps -a
|
||||
|
||||
@@ -129,6 +129,12 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (*oidc.Provider, *oauth2.Config, error) {
|
||||
if authDomain.AuthDomainConfig().OIDC.IssuerAlias != "" {
|
||||
ctx = oidc.InsecureIssuerURLContext(ctx, authDomain.AuthDomainConfig().OIDC.IssuerAlias)
|
||||
|
||||
@@ -99,6 +99,14 @@ func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*aut
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
state := authtypes.NewState(&url.URL{Path: "login"}, authDomain.StorableAuthDomain().ID).URL.String()
|
||||
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: &state,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) serviceProvider(siteURL *url.URL, authDomain *authtypes.AuthDomain) (*saml2.SAMLServiceProvider, error) {
|
||||
certStore, err := a.getCertificateStore(authDomain)
|
||||
if err != nil {
|
||||
|
||||
@@ -48,7 +48,26 @@ func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -15,18 +15,18 @@ type anonymous
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user]
|
||||
define assignee: [user, anonymous]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type resources
|
||||
type metaresources
|
||||
relations
|
||||
define create: [user, role#assignee]
|
||||
define list: [user, role#assignee]
|
||||
|
||||
type resource
|
||||
type metaresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
@@ -35,6 +35,6 @@ type resource
|
||||
define block: [user, role#assignee]
|
||||
|
||||
|
||||
type telemetry
|
||||
type telemetryresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define read: [user, role#assignee]
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package licensing
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
)
|
||||
|
||||
@@ -18,7 +18,7 @@ func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
|
||||
once.Do(func() {
|
||||
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(fmt.Errorf("invalid licensing config: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid licensing config"))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -19,7 +19,12 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -56,6 +61,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -88,10 +94,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// routes available only in ee version
|
||||
router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
|
||||
|
||||
// paid plans specific routes
|
||||
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.Signoz.Handlers.Session.CreateSessionBySAMLCallback)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/complete/oidc", am.OpenAccess(ah.Signoz.Handlers.Session.CreateSessionByOIDCCallback)).Methods(http.MethodGet)
|
||||
|
||||
// base overrides
|
||||
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
|
||||
|
||||
@@ -99,6 +101,39 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
|
||||
|
||||
// dashboards
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.CreatePublic)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.GetPublic)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.UpdatePublic)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.DeletePublic)).Methods(http.MethodDelete)
|
||||
|
||||
// public access for dashboards
|
||||
router.HandleFunc("/api/v1/public/dashboards/{id}", am.CheckWithoutClaims(
|
||||
ah.Signoz.Handlers.Dashboard.GetPublicData,
|
||||
authtypes.RelationRead, authtypes.RelationRead,
|
||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/public/dashboards/{id}/widgets/{index}/query_range", am.CheckWithoutClaims(
|
||||
ah.Signoz.Handlers.Dashboard.GetPublicWidgetQueryRange,
|
||||
authtypes.RelationRead, authtypes.RelationRead,
|
||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
// v3
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)
|
||||
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -77,7 +76,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
ingestionUrl, signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't deduce ingestion url and signoz api url",
|
||||
@@ -186,48 +185,37 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
return cloudIntegrationUser, nil
|
||||
}
|
||||
|
||||
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, string, *basemodel.ApiError,
|
||||
) {
|
||||
url := fmt.Sprintf(
|
||||
"%s%s",
|
||||
strings.TrimSuffix(constants.ZeusURL, "/"),
|
||||
"/v2/deployments/me",
|
||||
)
|
||||
|
||||
// TODO: remove this struct from here
|
||||
type deploymentResponse struct {
|
||||
Status string `json:"status"`
|
||||
Error string `json:"error"`
|
||||
Data struct {
|
||||
Name string `json:"name"`
|
||||
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
} `json:"data"`
|
||||
Name string `json:"name"`
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
}
|
||||
|
||||
resp, apiErr := requestAndParseResponse[deploymentResponse](
|
||||
ctx, url, map[string]string{"X-Signoz-Cloud-Api-Key": licenseKey}, nil,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
return "", "", basemodel.WrapApiError(
|
||||
apiErr, "couldn't query for deployment info",
|
||||
)
|
||||
}
|
||||
|
||||
if resp.Status != "success" {
|
||||
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: status: %s, error: %s",
|
||||
resp.Status, resp.Error,
|
||||
"couldn't query for deployment info: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.Data.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Data.Name
|
||||
resp := new(deploymentResponse)
|
||||
|
||||
err = json.Unmarshal(respBytes, resp)
|
||||
if err != nil {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't unmarshal deployment info response: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Name
|
||||
|
||||
if len(regionDns) < 1 || len(deploymentName) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
_ "net/http/pprof" // http profiler
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
|
||||
"go.opentelemetry.io/otel/propagation"
|
||||
@@ -74,13 +75,26 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
NumCounters: 10 * 10000,
|
||||
MaxCost: 1 << 27, // 128 MB
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := clickhouseReader.NewReader(
|
||||
signoz.SQLStore,
|
||||
signoz.TelemetryStore,
|
||||
signoz.Prometheus,
|
||||
signoz.TelemetryStore.Cluster(),
|
||||
config.Querier.FluxInterval,
|
||||
cacheForTraceDetail,
|
||||
signoz.Cache,
|
||||
nil,
|
||||
)
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
@@ -192,7 +206,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
|
||||
|
||||
r.Use(otelmux.Middleware(
|
||||
"apiserver",
|
||||
@@ -229,6 +243,11 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.MetricExplorerRoutes(r, am)
|
||||
apiHandler.RegisterTraceFunnelsRoutes(r, am)
|
||||
|
||||
err := s.signoz.APIServer.AddToRouter(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: []string{"*"},
|
||||
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"},
|
||||
@@ -239,7 +258,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
|
||||
handler = handlers.CompressHandler(handler)
|
||||
|
||||
err := web.AddToRouter(r)
|
||||
err = web.AddToRouter(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -10,9 +10,6 @@ var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
|
||||
// this is set via build time variable
|
||||
var ZeusURL = "https://api.signoz.cloud"
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
if len(v) == 0 {
|
||||
|
||||
@@ -246,7 +246,9 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
continue
|
||||
}
|
||||
}
|
||||
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -296,7 +298,9 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
continue
|
||||
}
|
||||
}
|
||||
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -410,6 +414,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
Missing: smpl.IsMissing,
|
||||
IsRecovering: smpl.IsRecovering,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -422,6 +427,9 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
// Update the recovering and missing state of existing alert
|
||||
alert.IsRecovering = a.IsRecovering
|
||||
alert.Missing = a.Missing
|
||||
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
|
||||
alert.Receivers = ruleReceiverMap[v]
|
||||
}
|
||||
@@ -480,6 +488,30 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
|
||||
// We need to change firing alert to recovering if the returned sample meets recovery threshold
|
||||
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
|
||||
// We need to change recovering alerts to firing if the returned sample meets target threshold
|
||||
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
|
||||
// in any of the above case we need to update the status of alert
|
||||
if changeFiringToRecovering || changeRecoveringToFiring {
|
||||
state := model.StateRecovering
|
||||
if changeRecoveringToFiring {
|
||||
state = model.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
currentState := r.State()
|
||||
|
||||
@@ -30,6 +30,8 @@ func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
|
||||
return sqlschema.DataTypeBoolean
|
||||
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
|
||||
return sqlschema.DataTypeText
|
||||
case "BYTEA":
|
||||
return sqlschema.DataTypeBytea
|
||||
}
|
||||
|
||||
return formatter.Formatter.DataTypeOf(dataType)
|
||||
|
||||
@@ -2,6 +2,7 @@ package postgressqlschema
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
@@ -47,50 +48,45 @@ func (provider *provider) Operator() sqlschema.SQLOperator {
|
||||
}
|
||||
|
||||
func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) {
|
||||
rows, err := provider.
|
||||
columns := []struct {
|
||||
ColumnName string `bun:"column_name"`
|
||||
Nullable bool `bun:"nullable"`
|
||||
SQLDataType string `bun:"udt_name"`
|
||||
DefaultVal *string `bun:"column_default"`
|
||||
}{}
|
||||
|
||||
err := provider.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
QueryContext(ctx, `
|
||||
NewRaw(`
|
||||
SELECT
|
||||
c.column_name,
|
||||
c.is_nullable = 'YES',
|
||||
c.is_nullable = 'YES' as nullable,
|
||||
c.udt_name,
|
||||
c.column_default
|
||||
FROM
|
||||
information_schema.columns AS c
|
||||
WHERE
|
||||
c.table_name = ?`, string(tableName))
|
||||
c.table_name = ?`, string(tableName)).
|
||||
Scan(ctx, &columns)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if len(columns) == 0 {
|
||||
return nil, nil, sql.ErrNoRows
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := rows.Close(); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
columns := make([]*sqlschema.Column, 0)
|
||||
for rows.Next() {
|
||||
var (
|
||||
name string
|
||||
sqlDataType string
|
||||
nullable bool
|
||||
defaultVal *string
|
||||
)
|
||||
if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
sqlschemaColumns := make([]*sqlschema.Column, 0)
|
||||
for _, column := range columns {
|
||||
columnDefault := ""
|
||||
if defaultVal != nil {
|
||||
columnDefault = *defaultVal
|
||||
if column.DefaultVal != nil {
|
||||
columnDefault = *column.DefaultVal
|
||||
}
|
||||
|
||||
columns = append(columns, &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName(name),
|
||||
Nullable: nullable,
|
||||
DataType: provider.fmter.DataTypeOf(sqlDataType),
|
||||
sqlschemaColumns = append(sqlschemaColumns, &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName(column.ColumnName),
|
||||
Nullable: column.Nullable,
|
||||
DataType: provider.fmter.DataTypeOf(column.SQLDataType),
|
||||
Default: columnDefault,
|
||||
})
|
||||
}
|
||||
@@ -208,7 +204,7 @@ WHERE
|
||||
|
||||
return &sqlschema.Table{
|
||||
Name: tableName,
|
||||
Columns: columns,
|
||||
Columns: sqlschemaColumns,
|
||||
PrimaryKeyConstraint: primaryKeyConstraint,
|
||||
ForeignKeyConstraints: foreignKeyConstraints,
|
||||
}, uniqueConstraints, nil
|
||||
|
||||
153
ee/sqlstore/postgressqlstore/formatter.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgres(path)...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_typeof("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = "...)
|
||||
sql = schema.Append(f.bunf, sql, "array")
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements_text("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), "::text"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), ".key"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_agg("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_build_array("...)
|
||||
for idx, value := range values {
|
||||
if idx > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = schema.Append(f.bunf, sql, value)
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, "::jsonb"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgres(jsonPath string) []byte {
|
||||
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) []byte {
|
||||
path := strings.TrimPrefix(strings.TrimPrefix(jsonPath, "$"), ".")
|
||||
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := strings.Split(path, ".")
|
||||
|
||||
var validParts []string
|
||||
for _, part := range parts {
|
||||
if part != "" {
|
||||
validParts = append(validParts, part)
|
||||
}
|
||||
}
|
||||
|
||||
if len(validParts) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []byte
|
||||
|
||||
for idx, part := range validParts {
|
||||
if idx == len(validParts)-1 {
|
||||
if asText {
|
||||
result = append(result, "->>"...)
|
||||
} else {
|
||||
result = append(result, "->"...)
|
||||
}
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
return result
|
||||
}
|
||||
|
||||
result = append(result, "->"...)
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
500
ee/sqlstore/postgressqlstore/formatter_test.go
Normal file
@@ -0,0 +1,500 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
)
|
||||
|
||||
func TestJSONExtractString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `"data"->>'field'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.name",
|
||||
expected: `"metadata"->'user'->>'name'`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.level1.level2.level3",
|
||||
expected: `"json_col"->'level1'->'level2'->>'level3'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `"json_col"`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `"data"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONExtractString(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONType(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `jsonb_typeof("data"->'field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.age",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'age')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col")`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data")`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONType(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONIsArray(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.items",
|
||||
expected: `jsonb_typeof("data"->'items') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.tags",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col") = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data") = 'array'`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONIsArray(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayElements(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.items",
|
||||
alias: "item",
|
||||
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.user.tags",
|
||||
alias: "tag",
|
||||
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayOfStrings(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.strings",
|
||||
alias: "s",
|
||||
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONKeys(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.object",
|
||||
alias: "key",
|
||||
expected: `jsonb_each("metadata"->'object') AS "key"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayAgg(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column",
|
||||
expression: "id",
|
||||
expected: "jsonb_agg(id)",
|
||||
},
|
||||
{
|
||||
name: "expression with function",
|
||||
expression: "DISTINCT name",
|
||||
expected: "jsonb_agg(DISTINCT name)",
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "data->>'field'",
|
||||
expected: "jsonb_agg(data->>'field')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayAgg(tt.expression))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayLiteral(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
values []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty array",
|
||||
values: []string{},
|
||||
expected: "jsonb_build_array()",
|
||||
},
|
||||
{
|
||||
name: "single value",
|
||||
values: []string{"value1"},
|
||||
expected: "jsonb_build_array('value1')",
|
||||
},
|
||||
{
|
||||
name: "multiple values",
|
||||
values: []string{"value1", "value2", "value3"},
|
||||
expected: "jsonb_build_array('value1', 'value2', 'value3')",
|
||||
},
|
||||
{
|
||||
name: "values with special characters",
|
||||
values: []string{"test", "with space", "with-dash"},
|
||||
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayLiteral(tt.values...))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
jsonPath string
|
||||
asText bool
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path as text",
|
||||
jsonPath: "$.field",
|
||||
asText: true,
|
||||
expected: "->>'field'",
|
||||
},
|
||||
{
|
||||
name: "simple path as json",
|
||||
jsonPath: "$.field",
|
||||
asText: false,
|
||||
expected: "->'field'",
|
||||
},
|
||||
{
|
||||
name: "nested path as text",
|
||||
jsonPath: "$.user.name",
|
||||
asText: true,
|
||||
expected: "->'user'->>'name'",
|
||||
},
|
||||
{
|
||||
name: "nested path as json",
|
||||
jsonPath: "$.user.name",
|
||||
asText: false,
|
||||
expected: "->'user'->'name'",
|
||||
},
|
||||
{
|
||||
name: "deeply nested as text",
|
||||
jsonPath: "$.a.b.c.d",
|
||||
asText: true,
|
||||
expected: "->'a'->'b'->'c'->>'d'",
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
jsonPath: "$",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
jsonPath: "",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New()).(*formatter)
|
||||
got := string(f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextToJsonColumn(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
column: "data",
|
||||
expected: `"data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with underscore",
|
||||
column: "user_data",
|
||||
expected: `"user_data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with special characters",
|
||||
column: "json-col",
|
||||
expected: `"json-col"::jsonb`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.TextToJsonColumn(tt.column))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLowerExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expr string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
expr: "name",
|
||||
expected: "lower(name)",
|
||||
},
|
||||
{
|
||||
name: "quoted column identifier",
|
||||
expr: `"column_name"`,
|
||||
expected: `lower("column_name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb text extraction",
|
||||
expr: "data->>'field'",
|
||||
expected: "lower(data->>'field')",
|
||||
},
|
||||
{
|
||||
name: "nested jsonb extraction",
|
||||
expr: "metadata->'user'->>'name'",
|
||||
expected: "lower(metadata->'user'->>'name')",
|
||||
},
|
||||
{
|
||||
name: "jsonb_typeof expression",
|
||||
expr: "jsonb_typeof(data->'field')",
|
||||
expected: "lower(jsonb_typeof(data->'field'))",
|
||||
},
|
||||
{
|
||||
name: "string concatenation",
|
||||
expr: "first_name || ' ' || last_name",
|
||||
expected: "lower(first_name || ' ' || last_name)",
|
||||
},
|
||||
{
|
||||
name: "CAST expression",
|
||||
expr: "CAST(value AS TEXT)",
|
||||
expected: "lower(CAST(value AS TEXT))",
|
||||
},
|
||||
{
|
||||
name: "COALESCE expression",
|
||||
expr: "COALESCE(name, 'default')",
|
||||
expected: "lower(COALESCE(name, 'default'))",
|
||||
},
|
||||
{
|
||||
name: "subquery column",
|
||||
expr: "users.email",
|
||||
expected: "lower(users.email)",
|
||||
},
|
||||
{
|
||||
name: "quoted identifier with special chars",
|
||||
expr: `"user-name"`,
|
||||
expected: `lower("user-name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb to text cast",
|
||||
expr: "data::text",
|
||||
expected: "lower(data::text)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.LowerExpression(tt.expr))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -15,10 +15,11 @@ import (
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||
@@ -55,11 +56,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
|
||||
sqldb := stdlib.OpenDBFromPool(pool)
|
||||
|
||||
pgDialect := pgdialect.New()
|
||||
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
|
||||
return &provider{
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
|
||||
dialect: new(dialect),
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -75,6 +79,10 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *provider) Formatter() sqlstore.SQLFormatter {
|
||||
return provider.formatter
|
||||
}
|
||||
|
||||
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bundb.BunDBCtx(ctx)
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package zeus
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
neturl "net/url"
|
||||
"sync"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
|
||||
@@ -24,17 +24,17 @@ func Config() zeus.Config {
|
||||
once.Do(func() {
|
||||
parsedURL, err := neturl.Parse(url)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("invalid zeus URL: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus URL"))
|
||||
}
|
||||
|
||||
deprecatedParsedURL, err := neturl.Parse(deprecatedURL)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("invalid zeus deprecated URL: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus deprecated URL"))
|
||||
}
|
||||
|
||||
config = zeus.Config{URL: parsedURL, DeprecatedURL: deprecatedParsedURL}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(fmt.Errorf("invalid zeus config: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus config"))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
module.exports = {
|
||||
ignorePatterns: ['src/parser/*.ts'],
|
||||
ignorePatterns: ['src/parser/*.ts', 'scripts/update-registry.js'],
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
|
||||
@@ -3,5 +3,6 @@ BUNDLE_ANALYSER="true"
|
||||
FRONTEND_API_ENDPOINT="http://localhost:8080/"
|
||||
PYLON_APP_ID="pylon-app-id"
|
||||
APPCUES_APP_ID="appcess-app-id"
|
||||
PYLON_IDENTITY_SECRET="pylon-identity-secret"
|
||||
|
||||
CI="1"
|
||||
@@ -14,7 +14,7 @@
|
||||
"jest": "jest",
|
||||
"jest:coverage": "jest --coverage",
|
||||
"jest:watch": "jest --watch",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure) && node scripts/update-registry.js",
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
@@ -38,7 +38,7 @@
|
||||
"@mdx-js/loader": "2.3.0",
|
||||
"@mdx-js/react": "2.3.0",
|
||||
"@monaco-editor/react": "^4.3.1",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@playwright/test": "1.55.1",
|
||||
"@radix-ui/react-tabs": "1.0.4",
|
||||
"@radix-ui/react-tooltip": "1.0.7",
|
||||
"@sentry/react": "8.41.0",
|
||||
@@ -47,6 +47,8 @@
|
||||
"@signozhq/button": "0.0.2",
|
||||
"@signozhq/calendar": "0.0.0",
|
||||
"@signozhq/callout": "0.0.2",
|
||||
"@signozhq/checkbox": "0.0.2",
|
||||
"@signozhq/command": "0.0.0",
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@signozhq/input": "0.0.2",
|
||||
"@signozhq/popover": "0.0.0",
|
||||
@@ -69,7 +71,7 @@
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"antlr4": "4.13.2",
|
||||
"axios": "1.8.2",
|
||||
"axios": "1.12.0",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
@@ -83,6 +85,7 @@
|
||||
"color": "^4.2.1",
|
||||
"color-alpha": "1.1.3",
|
||||
"cross-env": "^7.0.3",
|
||||
"crypto-js": "4.2.0",
|
||||
"css-loader": "5.0.0",
|
||||
"css-minimizer-webpack-plugin": "5.0.1",
|
||||
"d3-hierarchy": "3.1.2",
|
||||
@@ -102,7 +105,6 @@
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
"jest": "^27.5.1",
|
||||
"js-base64": "^3.7.2",
|
||||
"kbar": "0.1.0-beta.48",
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
@@ -112,7 +114,7 @@
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
"overlayscrollbars-react": "^0.5.6",
|
||||
"papaparse": "5.4.1",
|
||||
"posthog-js": "1.215.5",
|
||||
"posthog-js": "1.298.0",
|
||||
"rc-tween-one": "3.0.6",
|
||||
"react": "18.2.0",
|
||||
"react-addons-update": "15.6.3",
|
||||
@@ -149,7 +151,6 @@
|
||||
"tsconfig-paths-webpack-plugin": "^3.5.1",
|
||||
"typescript": "^4.0.5",
|
||||
"uplot": "1.6.31",
|
||||
"userpilot": "1.3.9",
|
||||
"uuid": "^8.3.2",
|
||||
"web-vitals": "^0.2.4",
|
||||
"webpack": "5.94.0",
|
||||
@@ -186,6 +187,7 @@
|
||||
"@types/color": "^3.0.3",
|
||||
"@types/compression-webpack-plugin": "^9.0.0",
|
||||
"@types/copy-webpack-plugin": "^8.0.1",
|
||||
"@types/crypto-js": "4.2.2",
|
||||
"@types/dompurify": "^2.4.0",
|
||||
"@types/event-source-polyfill": "^1.0.0",
|
||||
"@types/fontfaceobserver": "2.1.0",
|
||||
@@ -279,6 +281,8 @@
|
||||
"prismjs": "1.30.0",
|
||||
"got": "11.8.5",
|
||||
"form-data": "4.0.4",
|
||||
"brace-expansion": "^2.0.2"
|
||||
"brace-expansion": "^2.0.2",
|
||||
"on-headers": "^1.1.0",
|
||||
"tmp": "0.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
1
frontend/public/Logos/amazon-bedrock.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" fill="currentColor" fill-rule="evenodd" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>AWS</title><path d="M6.763 11.212q.002.446.088.71c.064.176.144.368.256.576.04.063.056.127.056.183q.002.12-.152.24l-.503.335a.4.4 0 0 1-.208.072q-.12-.002-.239-.112a2.5 2.5 0 0 1-.287-.375 6 6 0 0 1-.248-.471q-.934 1.101-2.347 1.101c-.67 0-1.205-.191-1.596-.574-.39-.384-.59-.894-.59-1.533 0-.678.24-1.23.726-1.644.487-.415 1.133-.623 1.955-.623.272 0 .551.024.846.064.296.04.6.104.918.176v-.583q-.001-.908-.375-1.277c-.255-.248-.686-.367-1.3-.367-.28 0-.568.031-.863.103s-.583.16-.862.272a2 2 0 0 1-.28.104.5.5 0 0 1-.127.023q-.168.002-.168-.247v-.391c0-.128.016-.224.056-.28a.6.6 0 0 1 .224-.167 4.6 4.6 0 0 1 1.005-.36 4.8 4.8 0 0 1 1.246-.151c.95 0 1.644.216 2.091.647q.661.646.662 1.963v2.586zm-3.24 1.214c.263 0 .534-.048.822-.144a1.8 1.8 0 0 0 .758-.51 1.3 1.3 0 0 0 .272-.512c.047-.191.08-.423.08-.694v-.335a7 7 0 0 0-.735-.136 6 6 0 0 0-.75-.048c-.535 0-.926.104-1.19.32-.263.215-.39.518-.39.917 0 .375.095.655.295.846.191.2.47.296.838.296m6.41.862c-.144 0-.24-.024-.304-.08-.064-.048-.12-.16-.168-.311L7.586 6.726a1.4 1.4 0 0 1-.072-.32c0-.128.064-.2.191-.2h.783q.227-.001.31.08c.065.048.113.16.16.312l1.342 5.284 1.245-5.284q.058-.24.151-.312a.55.55 0 0 1 .32-.08h.638c.152 0 .256.025.32.08.063.048.12.16.151.312l1.261 5.348 1.381-5.348q.074-.24.16-.312a.52.52 0 0 1 .311-.08h.743c.127 0 .2.065.2.2 0 .04-.009.08-.017.128a1 1 0 0 1-.056.2l-1.923 6.17q-.072.24-.168.311a.5.5 0 0 1-.303.08h-.687c-.15 0-.255-.024-.32-.08-.063-.056-.119-.16-.15-.32L12.32 7.747l-1.23 5.14c-.04.16-.087.264-.15.32-.065.056-.177.08-.32.08zm10.256.215c-.415 0-.83-.048-1.229-.143-.399-.096-.71-.2-.918-.32-.128-.071-.215-.151-.247-.223a.6.6 0 0 1-.048-.224v-.407c0-.167.064-.247.183-.247q.072 0 .144.024c.048.016.12.048.2.08q.408.181.878.279c.32.064.63.096.95.096.502 0 .894-.088 1.165-.264a.86.86 0 0 0 .415-.758.78.78 0 0 0-.215-.559c-.144-.151-.416-.287-.807-.415l-1.157-.36c-.583-.183-1.014-.454-1.277-.813a1.9 1.9 0 0 1-.4-1.158q0-.502.216-.886c.144-.255.335-.479.575-.654.24-.184.51-.32.83-.415.32-.096.655-.136 1.006-.136.175 0 .36.008.535.032.183.024.35.056.518.088q.24.058.455.127.216.072.336.144a.7.7 0 0 1 .24.2.43.43 0 0 1 .071.263v.375q-.002.254-.184.256a.8.8 0 0 1-.303-.096 3.65 3.65 0 0 0-1.532-.311c-.455 0-.815.071-1.062.223s-.375.383-.375.71c0 .224.08.416.24.567.16.152.454.304.877.44l1.134.358c.574.184.99.44 1.237.767s.367.702.367 1.117c0 .343-.072.655-.207.926a2.2 2.2 0 0 1-.583.703c-.248.2-.543.343-.886.447-.36.111-.734.167-1.142.167"/><path fill="#f90" d="M.378 15.475c3.384 1.963 7.56 3.153 11.877 3.153 2.914 0 6.114-.607 9.06-1.852.44-.2.814.287.383.607-2.626 1.94-6.442 2.969-9.722 2.969-4.598 0-8.74-1.7-11.87-4.526-.247-.223-.024-.527.272-.351m23.531-.2c.287.36-.08 2.826-1.485 4.007-.215.184-.423.088-.327-.151l.175-.439c.343-.88.802-2.198.52-2.555-.336-.43-2.22-.207-3.074-.103-.255.032-.295-.192-.063-.36 1.5-1.053 3.967-.75 4.254-.399"/></svg>
|
||||
|
After Width: | Height: | Size: 3.0 KiB |
1
frontend/public/Logos/autogen.svg
Normal file
|
After Width: | Height: | Size: 20 KiB |
1
frontend/public/Logos/azure-openai.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>Azure</title><path fill="url(#a)" d="M7.242 1.613A1.11 1.11 0 0 1 8.295.857h6.977L8.03 22.316a1.11 1.11 0 0 1-1.052.755h-5.43a1.11 1.11 0 0 1-1.053-1.466z"/><path fill="#0078d4" d="M18.397 15.296H7.4a.51.51 0 0 0-.347.882l7.066 6.595c.206.192.477.298.758.298h6.226z"/><path fill="url(#b)" d="M15.272.857H7.497L0 23.071h7.775l1.596-4.73 5.068 4.73h6.665l-2.707-7.775h-7.998z"/><path fill="url(#c)" d="M17.193 1.613a1.11 1.11 0 0 0-1.052-.756h-7.81.035c.477 0 .9.304 1.052.756l6.748 19.992a1.11 1.11 0 0 1-1.052 1.466h-.12 7.895a1.11 1.11 0 0 0 1.052-1.466z"/><defs><linearGradient id="a" x1="8.247" x2="1.002" y1="1.626" y2="23.03" gradientUnits="userSpaceOnUse"><stop stop-color="#114a8b"/><stop offset="1" stop-color="#0669bc"/></linearGradient><linearGradient id="b" x1="14.042" x2="12.324" y1="15.302" y2="15.888" gradientUnits="userSpaceOnUse"><stop stop-opacity=".3"/><stop offset=".071" stop-opacity=".2"/><stop offset=".321" stop-opacity=".1"/><stop offset=".623" stop-opacity=".05"/><stop offset="1" stop-opacity="0"/></linearGradient><linearGradient id="c" x1="12.841" x2="20.793" y1="1.626" y2="22.814" gradientUnits="userSpaceOnUse"><stop stop-color="#3ccbf4"/><stop offset="1" stop-color="#2892df"/></linearGradient></defs></svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
1
frontend/public/Logos/crew-ai.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>CrewAI</title><path fill="#461816" d="M19.41 10.783a2.75 2.75 0 0 1 2.471 1.355c.483.806.622 1.772.385 2.68l-.136.522a10 10 0 0 1-3.156 5.058c-.605.517-1.283 1.062-2.083 1.524l-.028.017c-.402.232-.884.511-1.398.756-1.19.602-2.475.997-3.798 1.167-.854.111-1.716.155-2.577.132h-.018a8.6 8.6 0 0 1-5.046-1.87l-.012-.01-.012-.01A8.02 8.02 0 0 1 1.22 17.42a10.9 10.9 0 0 1-.102-3.779A15.6 15.6 0 0 1 2.88 8.4a21.8 21.8 0 0 1 2.432-3.678 15.4 15.4 0 0 1 3.56-3.182A10 10 0 0 1 12.44.104h.004l.003-.002c2.057-.384 3.743.374 5.024 1.26a8.3 8.3 0 0 1 2.395 2.513l.024.04.023.042a5.47 5.47 0 0 1 .508 4.012c-.239.97-.577 1.914-1.01 2.814z"/><path fill="#fff" d="M18.861 13.165a.748.748 0 0 1 1.256.031c.199.332.256.73.159 1.103l-.137.522a7.94 7.94 0 0 1-2.504 4.014c-.572.49-1.138.939-1.774 1.306-.427.247-.857.496-1.303.707a9.6 9.6 0 0 1-3.155.973 14.3 14.3 0 0 1-2.257.116 6.53 6.53 0 0 1-3.837-1.422 5.97 5.97 0 0 1-2.071-3.494 8.9 8.9 0 0 1-.085-3.08 13.6 13.6 0 0 1 1.54-4.568 19.7 19.7 0 0 1 2.212-3.348 13.4 13.4 0 0 1 3.088-2.76 7.9 7.9 0 0 1 2.832-1.14c1.307-.245 2.434.207 3.481.933a6.2 6.2 0 0 1 1.806 1.892c.423.767.536 1.668.314 2.515a12.4 12.4 0 0 1-.99 2.67l-.223.497q-.48 1.07-.97 2.137a.76.76 0 0 1-.97.467 3.39 3.39 0 0 1-2.283-2.49c-.095-.83.04-1.669.39-2.426.288-.746.61-1.477.933-2.208l.248-.563a.53.53 0 0 0-.204-.742 2.35 2.35 0 0 0-1.2.702 25 25 0 0 0-1.614 1.767 21.6 21.6 0 0 0-2.619 4.184 7.6 7.6 0 0 0-.816 2.753 7 7 0 0 0 .07 2.219 2.055 2.055 0 0 0 1.934 1.715c1.801.1 3.59-.363 5.116-1.328a19 19 0 0 0 1.675-1.294c.752-.71 1.376-1.519 1.958-2.36"/></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
1
frontend/public/Logos/dashboards.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><g transform="translate(25 25)"><rect width="60" height="55" fill="#fcd34d" rx="6"/><path stroke="#d97706" stroke-linecap="round" stroke-linejoin="round" stroke-width="4" d="m10 40 15-15 15 8 15-23"/><rect width="35" height="55" x="65" fill="#6ee7b7" rx="6"/><rect width="20" height="6" x="73" y="10" fill="#059669" rx="3"/><rect width="15" height="6" x="73" y="22" fill="#059669" opacity=".7" rx="3"/><rect width="18" height="6" x="73" y="34" fill="#059669" opacity=".7" rx="3"/><rect width="100" height="40" y="60" fill="#a5b4fc" rx="6"/><rect width="80" height="8" x="10" y="70" fill="#4f46e5" rx="4"/><rect width="50" height="8" x="20" y="83" fill="#6366f1" rx="4"/></g></svg>
|
||||
|
After Width: | Height: | Size: 826 B |
1
frontend/public/Logos/envoy.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill="#b31aab" d="m33.172 61.48.176 7.325 7.797 4.785-.176-7.324Zm19.031 30.504-.176-7.18-6.84-4.206c-.085-.059-.203-.145-.289-.203l.176 7.207Zm-24.355 9.688L10.012 90.715l-.438-18.367 8.758-3.746-.172-7.352-13.969 5.969c-1.074.46-1.714 1.441-1.687 2.566l.523 22.055c.032 1.125.73 2.25 1.836 2.941l21.383 13.149c.992.605 2.215.78 3.203.46a.8.8 0 0 0 .29-.113l13.124-5.593-7.129-4.383Zm0 0"/><path fill="#d163ce" d="M85.488 61.047c-.031-1.328-.843-2.625-2.125-3.43L57.38 41.672l-.813.344.176 7.726 20.57 12.63.493 20.648 7.86 4.812.433-.172ZM54.383 97.289 30.262 82.47l-.582-24.883 11-4.7-.203-8.562-17.082 7.293c-1.25.547-2.008 1.672-1.977 3l.668 29.18c.031 1.324.844 2.625 2.125 3.402l28.281 17.387c1.164.723 2.563.894 3.754.547.117-.028.234-.086.348-.145l16.703-7.12-8.32-5.102Zm0 0"/><path fill="#e13eaf" d="M122.234 40.633 85.98 18.343c-1.335-.808-2.937-1.038-4.304-.605-.145.028-.262.086-.406.145l-35.383 15.11c-1.426.605-2.297 1.902-2.27 3.429l.903 37.371c.03 1.527.96 2.996 2.445 3.89l36.254 22.262c1.336.805 2.937 1.035 4.277.606.145-.031.262-.09.406-.145l35.383-15.11c1.426-.605 2.297-1.933 2.27-3.433l-.875-37.367c-.028-1.473-.961-2.969-2.446-3.863M85.398 91.64 53.891 72.293l-.79-32.496 30.727-13.121 31.512 19.347.785 32.497Zm0 0"/></svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
1
frontend/public/Logos/fly-io.svg
Normal file
|
After Width: | Height: | Size: 6.0 KiB |
1
frontend/public/Logos/honeycomb.svg
Normal file
|
After Width: | Height: | Size: 5.5 KiB |
1
frontend/public/Logos/litellm.svg
Normal file
|
After Width: | Height: | Size: 19 KiB |
1
frontend/public/Logos/logs.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><g transform="translate(25 40)"><rect width="12" height="12" fill="#059669" opacity=".8" rx="3"/><rect width="80" height="12" x="20" fill="#10b981" rx="3"/><rect width="12" height="12" y="22" fill="#059669" opacity=".8" rx="3"/><rect width="65" height="12" x="20" y="22" fill="#10b981" rx="3"/><rect width="12" height="12" y="44" fill="#059669" opacity=".8" rx="3"/><rect width="75" height="12" x="20" y="44" fill="#10b981" rx="3"/><rect width="12" height="12" y="66" fill="#059669" opacity=".8" rx="3"/><rect width="50" height="12" x="20" y="66" fill="#10b981" rx="3"/></g></svg>
|
||||
|
After Width: | Height: | Size: 726 B |
14
frontend/public/Logos/mastra.svg
Normal file
@@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" width="512" height="512">
|
||||
<path d="M0 0 C1.01790619 0.00215515 2.03581238 0.0043103 3.08456421 0.00653076 C17.69363523 0.05620146 32.02575853 0.30709809 46.375 3.3125 C47.34985352 3.51117676 48.32470703 3.70985352 49.32910156 3.91455078 C97.7109774 14.03517194 141.69928269 35.9516877 177.375 70.3125 C178.3640332 71.26068604 178.3640332 71.26068604 179.37304688 72.22802734 C188.86130305 81.37247479 197.6012044 90.66984009 205.375 101.3125 C206.23306152 102.46355017 207.09113451 103.61459177 207.94921875 104.765625 C213.98736829 112.95680614 219.34595521 121.47054105 224.375 130.3125 C224.73948242 130.94430176 225.10396484 131.57610352 225.47949219 132.22705078 C247.08534998 169.97532572 256.85198095 212.74189997 256.6875 255.9375 C256.68534485 256.95540619 256.6831897 257.97331238 256.68096924 259.02206421 C256.63129854 273.63113523 256.38040191 287.96325853 253.375 302.3125 C253.07698486 303.77478027 253.07698486 303.77478027 252.77294922 305.26660156 C242.65232806 353.6484774 220.7358123 397.63678269 186.375 433.3125 C185.74287598 433.97185547 185.11075195 434.63121094 184.45947266 435.31054688 C175.31502521 444.79880305 166.01765991 453.5387044 155.375 461.3125 C154.22394983 462.17056152 153.07290823 463.02863451 151.921875 463.88671875 C143.73224019 469.92427396 135.22961724 475.30707302 126.375 480.3125 C125.66778809 480.7146875 124.96057617 481.116875 124.23193359 481.53125 C101.09654164 494.5566741 75.92537591 503.30551461 49.9375 508.625 C49.21860596 508.77235596 48.49971191 508.91971191 47.7590332 509.0715332 C33.08339502 511.86090125 18.55831162 512.66142619 3.64770508 512.62817383 C0.70811513 512.6250221 -2.2304558 512.64854926 -5.16992188 512.67382812 C-18.54306192 512.71848737 -31.43916711 511.51301542 -44.625 509.3125 C-46.33055054 509.03031616 -46.33055054 509.03031616 -48.07055664 508.74243164 C-91.33802759 500.97044532 -132.38793216 480.98345309 -165.625 452.3125 C-166.37007813 451.67828125 -167.11515625 451.0440625 -167.8828125 450.390625 C-181.38779833 438.57948485 -194.05918743 425.82889772 -204.625 411.3125 C-205.48149066 410.16255169 -206.33826433 409.01281411 -207.1953125 407.86328125 C-213.23534209 399.67315742 -218.61867614 391.16870382 -223.625 382.3125 C-224.0271875 381.60528809 -224.429375 380.89807617 -224.84375 380.16943359 C-237.8691741 357.03404164 -246.61801461 331.86287591 -251.9375 305.875 C-252.08485596 305.15610596 -252.23221191 304.43721191 -252.3840332 303.6965332 C-255.35789088 288.05024391 -255.99800362 272.57681954 -255.9375 256.6875 C-255.93534485 255.66959381 -255.9331897 254.65168762 -255.93096924 253.60293579 C-255.88129854 238.99386477 -255.63040191 224.66174147 -252.625 210.3125 C-252.42632324 209.33764648 -252.22764648 208.36279297 -252.02294922 207.35839844 C-241.90232806 158.9765226 -219.9858123 114.98821731 -185.625 79.3125 C-184.99287598 78.65314453 -184.36075195 77.99378906 -183.70947266 77.31445312 C-174.56502521 67.82619695 -165.26765991 59.0862956 -154.625 51.3125 C-153.47394983 50.45443848 -152.32290823 49.59636549 -151.171875 48.73828125 C-142.98224019 42.70072604 -134.47961724 37.31792698 -125.625 32.3125 C-124.91778809 31.9103125 -124.21057617 31.508125 -123.48193359 31.09375 C-100.34654164 18.0683259 -75.17537591 9.31948539 -49.1875 4 C-48.46860596 3.85264404 -47.74971191 3.70528809 -47.0090332 3.5534668 C-31.36274391 0.57960912 -15.88931954 -0.06050362 0 0 Z " fill="#FAFAFA" transform="translate(255.625,-0.3125)"/>
|
||||
<path d="M0 0 C1.01790619 0.00215515 2.03581238 0.0043103 3.08456421 0.00653076 C17.69363523 0.05620146 32.02575853 0.30709809 46.375 3.3125 C47.34985352 3.51117676 48.32470703 3.70985352 49.32910156 3.91455078 C97.7109774 14.03517194 141.69928269 35.9516877 177.375 70.3125 C178.3640332 71.26068604 178.3640332 71.26068604 179.37304688 72.22802734 C188.86130305 81.37247479 197.6012044 90.66984009 205.375 101.3125 C206.23306152 102.46355017 207.09113451 103.61459177 207.94921875 104.765625 C213.98736829 112.95680614 219.34595521 121.47054105 224.375 130.3125 C224.73948242 130.94430176 225.10396484 131.57610352 225.47949219 132.22705078 C247.08534998 169.97532572 256.85198095 212.74189997 256.6875 255.9375 C256.68534485 256.95540619 256.6831897 257.97331238 256.68096924 259.02206421 C256.63129854 273.63113523 256.38040191 287.96325853 253.375 302.3125 C253.07698486 303.77478027 253.07698486 303.77478027 252.77294922 305.26660156 C242.65232806 353.6484774 220.7358123 397.63678269 186.375 433.3125 C185.74287598 433.97185547 185.11075195 434.63121094 184.45947266 435.31054688 C175.31502521 444.79880305 166.01765991 453.5387044 155.375 461.3125 C154.22394983 462.17056152 153.07290823 463.02863451 151.921875 463.88671875 C143.73224019 469.92427396 135.22961724 475.30707302 126.375 480.3125 C125.66778809 480.7146875 124.96057617 481.116875 124.23193359 481.53125 C101.09654164 494.5566741 75.92537591 503.30551461 49.9375 508.625 C49.21860596 508.77235596 48.49971191 508.91971191 47.7590332 509.0715332 C33.08339502 511.86090125 18.55831162 512.66142619 3.64770508 512.62817383 C0.70811513 512.6250221 -2.2304558 512.64854926 -5.16992188 512.67382812 C-18.54306192 512.71848737 -31.43916711 511.51301542 -44.625 509.3125 C-46.33055054 509.03031616 -46.33055054 509.03031616 -48.07055664 508.74243164 C-91.33802759 500.97044532 -132.38793216 480.98345309 -165.625 452.3125 C-166.37007813 451.67828125 -167.11515625 451.0440625 -167.8828125 450.390625 C-181.38779833 438.57948485 -194.05918743 425.82889772 -204.625 411.3125 C-205.48149066 410.16255169 -206.33826433 409.01281411 -207.1953125 407.86328125 C-213.23534209 399.67315742 -218.61867614 391.16870382 -223.625 382.3125 C-224.0271875 381.60528809 -224.429375 380.89807617 -224.84375 380.16943359 C-237.8691741 357.03404164 -246.61801461 331.86287591 -251.9375 305.875 C-252.08485596 305.15610596 -252.23221191 304.43721191 -252.3840332 303.6965332 C-255.35789088 288.05024391 -255.99800362 272.57681954 -255.9375 256.6875 C-255.93534485 255.66959381 -255.9331897 254.65168762 -255.93096924 253.60293579 C-255.88129854 238.99386477 -255.63040191 224.66174147 -252.625 210.3125 C-252.42632324 209.33764648 -252.22764648 208.36279297 -252.02294922 207.35839844 C-241.90232806 158.9765226 -219.9858123 114.98821731 -185.625 79.3125 C-184.99287598 78.65314453 -184.36075195 77.99378906 -183.70947266 77.31445312 C-174.56502521 67.82619695 -165.26765991 59.0862956 -154.625 51.3125 C-153.47394983 50.45443848 -152.32290823 49.59636549 -151.171875 48.73828125 C-142.98224019 42.70072604 -134.47961724 37.31792698 -125.625 32.3125 C-124.91778809 31.9103125 -124.21057617 31.508125 -123.48193359 31.09375 C-100.34654164 18.0683259 -75.17537591 9.31948539 -49.1875 4 C-48.46860596 3.85264404 -47.74971191 3.70528809 -47.0090332 3.5534668 C-31.36274391 0.57960912 -15.88931954 -0.06050362 0 0 Z M-132.625 100.3125 C-133.86636719 101.38757813 -133.86636719 101.38757813 -135.1328125 102.484375 C-165.63578284 129.44938326 -187.86448496 164.37287727 -198.01196289 203.92382812 C-198.4561255 205.654488 -198.91264545 207.38199177 -199.37670898 209.10742188 C-206.39011482 235.81273152 -206.57174532 264.26106754 -201.625 291.3125 C-201.5022168 292.01439453 -201.37943359 292.71628906 -201.25292969 293.43945312 C-191.91867325 346.46214396 -160.92344717 393.42130233 -117.23217773 424.5078125 C-89.1520157 444.03658082 -57.46503366 455.81316968 -23.625 460.3125 C-22.6246875 460.4465625 -21.624375 460.580625 -20.59375 460.71875 C33.68154914 466.46554638 87.79464708 449.50556098 130.05859375 415.375 C172.73532881 380.04170204 197.99757236 331.33192114 204.83984375 276.59375 C205.14411827 273.15919925 205.29014212 269.75985138 205.375 266.3125 C205.40078125 265.47734863 205.4265625 264.64219727 205.453125 263.78173828 C207.08252124 205.19475973 185.21292028 153.59303729 145.375 111.3125 C144.28251953 110.13880859 144.28251953 110.13880859 143.16796875 108.94140625 C136.81651093 102.33070526 129.68927931 96.80369409 122.375 91.3125 C121.54226563 90.68472656 120.70953125 90.05695313 119.8515625 89.41015625 C44.39149597 34.6822923 -62.3071717 39.41419387 -132.625 100.3125 Z " fill="#000000" transform="translate(255.625,-0.3125)"/>
|
||||
<path d="M0 0 C0.71623535 0.07524902 1.4324707 0.15049805 2.17041016 0.22802734 C11.41582457 1.31558193 17.53905875 5.69555803 24.37939453 11.60302734 C25.9717539 12.97565167 27.58779464 14.31330925 29.21484375 15.64453125 C39.45353655 24.07849539 49.89302189 32.86357336 58.390625 43.0859375 C59.52771819 44.43830561 60.70999313 45.7532854 61.921875 47.0390625 C78.86561041 65.03405157 92.54732646 86.67485183 103 109 C103.45761719 109.94875 103.91523438 110.8975 104.38671875 111.875 C105.65363083 114.56069375 106.84247298 117.26538924 108 120 C108.70576172 121.65902344 108.70576172 121.65902344 109.42578125 123.3515625 C115.29775985 137.75748334 119.44802749 153.51162791 121 169 C121.1393396 170.13598633 121.1393396 170.13598633 121.28149414 171.29492188 C123.04224228 187.01734848 123.04224228 187.01734848 118.6640625 192.58203125 C115.46027156 195.86702306 111.77976561 198.42626256 108 201 C106.70961638 201.94695663 105.4218661 202.89751165 104.13671875 203.8515625 C54.19558639 240.19218217 -10.15836211 250.82403206 -70.52783203 244.24707031 C-72.70701376 244.02928126 -74.87191381 243.88318722 -77.05859375 243.7734375 C-87.15317474 242.98567756 -93.49567628 238.26267641 -101 232 C-101.5775 231.5270752 -102.155 231.05415039 -102.75 230.56689453 C-113.80888326 221.47972622 -125.20747742 211.94987788 -134.3828125 200.92578125 C-136.24364652 198.70986054 -138.21467717 196.61674763 -140.1875 194.5 C-161.40351648 170.90663702 -177.09305866 142.66632254 -188 113 C-188.40734375 111.93007812 -188.8146875 110.86015625 -189.234375 109.7578125 C-194.48827127 95.16814672 -202.71297728 67.59681209 -195.89453125 52.5625 C-194.07223789 50.69071225 -192.19999029 49.41220499 -190 48 C-188.52096885 46.81314803 -187.04236305 45.62573122 -185.57055664 44.42993164 C-163.09739012 26.47222869 -136.79512656 14.12849151 -109 7 C-107.47898826 6.58659334 -105.95816766 6.17248301 -104.4375 5.7578125 C-70.37699256 -3.34276097 -34.91038892 -3.88896444 0 0 Z " fill="#010101" transform="translate(294,134)"/>
|
||||
<path d="M0 0 C0.86409988 -0.0033284 1.72819977 -0.0066568 2.6184845 -0.01008606 C4.43616205 -0.0151155 6.2538487 -0.01749563 8.0715332 -0.01733398 C10.80761482 -0.01951182 13.54328676 -0.03768671 16.27929688 -0.05664062 C26.27760684 -0.08904371 35.78646484 0.35879187 45.64453125 2.16796875 C46.42820068 2.2980835 47.21187012 2.42819824 48.01928711 2.56225586 C67.48091134 5.91708002 86.4354731 13.58908312 103.64453125 23.16796875 C104.27649414 23.51827148 104.90845703 23.86857422 105.55957031 24.22949219 C119.56180482 32.08853758 131.62289424 41.70235966 143.20019531 52.78662109 C144.48254743 54.01304914 145.77750786 55.22626362 147.07421875 56.4375 C160.46125565 69.27666884 171.07943638 85.21359379 179.83203125 101.48046875 C180.23115723 102.21628174 180.6302832 102.95209473 181.04150391 103.71020508 C181.97584067 105.49613991 182.81943883 107.32900956 183.64453125 109.16796875 C183.31453125 109.82796875 182.98453125 110.48796875 182.64453125 111.16796875 C181.77828125 110.23984375 180.91203125 109.31171875 180.01953125 108.35546875 C151.77719444 80.22655498 111.24847041 62.18954649 72.55981445 54.76806641 C66.66500207 53.55440703 62.13035714 50.73600667 57.08203125 47.54296875 C55.29699984 46.44819083 53.51053116 45.35575303 51.72265625 44.265625 C50.85318359 43.73340332 49.98371094 43.20118164 49.08789062 42.65283203 C16.63913044 22.93327173 -19.64835388 13.71677996 -57.35546875 13.16796875 C-51.06422023 6.87672023 -37.42242063 5.35193024 -28.98046875 3.41796875 C-28.06620117 3.20744873 -27.15193359 2.99692871 -26.20996094 2.7800293 C-17.46378901 0.84110933 -8.95226967 0.0123532 0 0 Z " fill="#030303" transform="translate(244.35546875,67.83203125)"/>
|
||||
<path d="M0 0 C1.3921875 1.2065625 1.3921875 1.2065625 2.8125 2.4375 C17.70081221 15.15211863 32.63519217 25.94043322 50 35 C50.65210449 35.34772461 51.30420898 35.69544922 51.97607422 36.05371094 C68.61826436 44.84435504 87.93410811 51.6622374 106.5 54.8125 C117.0495917 56.69262525 125.60341083 62.82215102 134.53027344 68.45361328 C166.82331519 88.67347552 203.35131846 97.02260308 241 98 C234.72437292 104.27562708 221.11199545 105.81088155 212.6875 107.75 C211.78161133 107.96052002 210.87572266 108.17104004 209.94238281 108.38793945 C198.42674064 110.9501174 187.15547289 111.18641307 175.40698242 111.18530273 C172.83560399 111.18748043 170.2646618 111.20566475 167.69335938 111.22460938 C157.60706378 111.25933256 147.96699734 110.68684053 138 109 C137.17717529 108.86456787 136.35435059 108.72913574 135.5065918 108.58959961 C99.40933725 102.18890888 66.71597085 83.54843429 40.46044922 58.39746094 C39.17158965 57.16418909 37.86994193 55.94431172 36.56640625 54.7265625 C21.17388217 39.96510447 9.14707063 21.15582183 0 2 C0 1.34 0 0.68 0 0 Z " fill="#030303" transform="translate(84,333)"/>
|
||||
<path d="M0 0 C2.99943905 1.34572887 5.08076179 2.83537101 7.39160156 5.16455078 C8.0319635 5.80506378 8.67232544 6.44557678 9.33209229 7.10549927 C10.01140808 7.79513763 10.69072388 8.484776 11.390625 9.1953125 C12.09523865 9.90275604 12.79985229 10.61019958 13.52581787 11.33908081 C15.7719539 13.59639862 18.01103009 15.86057633 20.25 18.125 C21.77297717 19.65800953 23.29640939 21.19056715 24.8203125 22.72265625 C28.55216815 26.47654573 32.27825351 30.23608914 36 34 C40.8277897 29.9845347 45.3437641 25.77173317 49.73828125 21.2890625 C50.98229394 20.03242542 52.22643824 18.77591862 53.47070312 17.51953125 C55.40594863 15.55930686 57.33953935 13.59749882 59.27075195 11.63330078 C61.15145313 9.72257458 63.03818159 7.81797036 64.92578125 5.9140625 C65.50596512 5.32041901 66.08614899 4.72677551 66.68391418 4.11514282 C68.08510986 2.70514466 69.53810801 1.34696546 71 0 C73 0 73 0 74.62109375 1.609375 C75.22050781 2.31578125 75.81992187 3.0221875 76.4375 3.75 C77.03433594 4.44609375 77.63117188 5.1421875 78.24609375 5.859375 C79.88667532 7.86168836 81.44684965 9.92913287 83 12 C74.42 20.58 65.84 29.16 57 38 C86.7 38.495 86.7 38.495 117 39 C117 44.28 117 49.56 117 55 C97.53 55.33 78.06 55.66 58 56 C66.25 64.25 74.5 72.5 83 81 C81.16614077 84.66771845 79.74937171 86.46834996 76.875 89.25 C76.15054688 89.95640625 75.42609375 90.6628125 74.6796875 91.390625 C74.12539063 91.92171875 73.57109375 92.4528125 73 93 C70.00056095 91.65427113 67.91923821 90.16462899 65.60839844 87.83544922 C64.64785553 86.87467972 64.64785553 86.87467972 63.66790771 85.89450073 C62.98859192 85.20486237 62.30927612 84.515224 61.609375 83.8046875 C60.90476135 83.09724396 60.20014771 82.38980042 59.47418213 81.66091919 C57.2280461 79.40360138 54.98896991 77.13942367 52.75 74.875 C51.22702283 73.34199047 49.70359061 71.80943285 48.1796875 70.27734375 C44.44783185 66.52345427 40.72174649 62.76391086 37 59 C32.16701194 63.02748053 27.61889996 67.24379413 23.19140625 71.7109375 C22.57982162 72.32393707 21.968237 72.93693665 21.33811951 73.56851196 C19.41052441 75.50164564 17.48646072 77.43824987 15.5625 79.375 C14.24567596 80.69684699 12.92862074 82.01846372 11.61132812 83.33984375 C8.40487627 86.55724299 5.20158371 89.77775681 2 93 C-1.41059403 91.39464713 -3.61792484 89.59481099 -6.25 86.875 C-6.95640625 86.15054688 -7.6628125 85.42609375 -8.390625 84.6796875 C-8.92171875 84.12539063 -9.4528125 83.57109375 -10 83 C-8.41570736 79.09154201 -5.73402218 76.47642804 -2.7734375 73.578125 C-1.84450684 72.65418945 -0.91557617 71.73025391 0.04150391 70.77832031 C1.22405762 69.61397461 2.40661133 68.44962891 3.625 67.25 C7.37875 63.5375 11.1325 59.825 15 56 C-14.205 55.505 -14.205 55.505 -44 55 C-44 49.72 -44 44.44 -44 39 C-24.2 38.67 -4.4 38.34 16 38 C7.42 29.42 -1.16 20.84 -10 12 C-8.02 9.69 -6.04 7.38 -4 5 C-2.65807132 3.34024611 -1.32090525 1.67653359 0 0 Z " fill="#FBFBFB" transform="translate(220,210)"/>
|
||||
<path d="M0 0 C0.66 0 1.32 0 2 0 C2.10320557 0.85489014 2.10320557 0.85489014 2.20849609 1.72705078 C7.84197412 45.87779718 28.1939551 86.6195367 56 121 C56.42941895 121.53383301 56.85883789 122.06766602 57.30126953 122.61767578 C66.40690991 133.83981661 76.68709229 143.89775823 87 154 C85 155 85 155 83.08325195 154.41430664 C82.30941162 154.08293701 81.53557129 153.75156738 80.73828125 153.41015625 C79.44216919 152.85835693 79.44216919 152.85835693 78.11987305 152.29541016 C77.19343994 151.8885498 76.26700684 151.48168945 75.3125 151.0625 C74.36221924 150.64790527 73.41193848 150.23331055 72.43286133 149.80615234 C39.14146569 135.09423719 7.08477782 112.00144189 -6.8203125 77.16015625 C-7.20960937 76.11730469 -7.59890625 75.07445312 -8 74 C-8.32742187 73.13246094 -8.65484375 72.26492188 -8.9921875 71.37109375 C-16.36268399 50.62841076 -12.86150198 26.97666813 -3.75 7.5625 C-2.52169606 5.02980009 -1.26750366 2.51347207 0 0 Z " fill="#040404" transform="translate(80,210)"/>
|
||||
<path d="M0 0 C1.35764345 4.07293036 -0.06910614 6.67744348 -1.5625 10.5 C-1.97749756 11.57531982 -1.97749756 11.57531982 -2.40087891 12.67236328 C-6.47155806 22.9772079 -11.68698526 31.65423127 -19 40 C-19.59296875 40.68835938 -20.1859375 41.37671875 -20.796875 42.0859375 C-35.31857853 57.78675371 -58.56153437 66.83592642 -79.57324219 68.23852539 C-83.59327999 68.37727956 -87.60297433 68.43877365 -91.625 68.4375 C-92.70432297 68.43910126 -92.70432297 68.43910126 -93.80545044 68.44073486 C-120.19044633 68.3846361 -144.22754501 60.94698881 -168 50 C-168 49.67 -168 49.34 -168 49 C-167.23671387 48.98018066 -166.47342773 48.96036133 -165.68701172 48.93994141 C-135.41473945 48.1185831 -105.01412697 46.60137763 -75.89160156 37.58056641 C-74.17234947 37.05289714 -72.4420738 36.56148633 -70.7109375 36.07421875 C-45.39043035 28.79857935 -20.43492118 16.73681044 0 0 Z " fill="#040404" transform="translate(411,347)"/>
|
||||
<path d="M0 0 C36.3336157 11.02984762 71.78280942 35.12814008 90.01855469 69.06640625 C96.50040978 81.83662823 99.34867561 93.65420512 99.25 107.9375 C99.26160156 109.61102539 99.26160156 109.61102539 99.2734375 111.31835938 C99.25018103 121.24305766 97.70881388 129.9522873 94.125 139.1875 C93.88136719 139.84685547 93.63773437 140.50621094 93.38671875 141.18554688 C91.65503117 145.7390777 89.50393006 149.82136348 87 154 C86.34 153.67 85.68 153.34 85 153 C84.50097656 151.18432617 84.50097656 151.18432617 84.140625 148.79296875 C83.99786133 147.90649658 83.85509766 147.02002441 83.70800781 146.10668945 C83.55686523 145.14335693 83.40572266 144.18002441 83.25 143.1875 C79.16249798 119.24447032 70.31517306 97.3963538 59 76 C58.48832275 75.02450195 58.48832275 75.02450195 57.96630859 74.02929688 C47.301887 53.8627614 33.39273041 35.47640491 17.7265625 18.9375 C15.86858655 16.97059803 14.11773128 14.95405655 12.41015625 12.85546875 C10.3273051 10.38777963 8.28787438 8.36653406 5.8125 6.3125 C3.31942756 4.23891203 1.82972289 2.74458434 0 0 Z " fill="#040404" transform="translate(345,147)"/>
|
||||
<path d="M0 0 C1.26231445 0.00491455 2.52462891 0.0098291 3.82519531 0.01489258 C24.5153613 0.27226578 42.86797287 5.22314233 62.125 12.4375 C63.02718262 12.77418701 63.92936523 13.11087402 64.85888672 13.45776367 C66.12949951 13.94660034 66.12949951 13.94660034 67.42578125 14.4453125 C68.55173706 14.87674561 68.55173706 14.87674561 69.70043945 15.31689453 C71.99402845 16.3980708 73.83827422 17.73010316 75.8125 19.3125 C74.90935059 19.3218457 74.00620117 19.33119141 73.07568359 19.34082031 C46.66373358 19.68118909 20.60587941 21.22322233 -5.11425781 27.79150391 C-7.27539495 28.33458759 -9.44421442 28.83079049 -11.6171875 29.32421875 C-39.00925704 35.7635841 -65.52353508 48.25154961 -88.08203125 65.01171875 C-91.1875 67.3125 -91.1875 67.3125 -93.1875 68.3125 C-92.18113049 53.55241391 -82.67422534 39.13895373 -73.1875 28.3125 C-72.59453125 27.62414062 -72.0015625 26.93578125 -71.390625 26.2265625 C-53.12979089 6.48301109 -25.93856094 -0.12102791 0 0 Z " fill="#030303" transform="translate(193.1875,96.6875)"/>
|
||||
<path d="M0 0 C0.99 0.33 1.98 0.66 3 1 C2.34 1.66 1.68 2.32 1 3 C0.67 2.01 0.34 1.02 0 0 Z M-9 10 C-9 13 -9 13 -9 13 Z " fill="#E8E8E8" transform="translate(220,210)"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 20 KiB |
1
frontend/public/Logos/metrics.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><defs><linearGradient id="a" x1="0" x2="0" y1="0" y2="1"><stop offset="0%" stop-color="#f59e0b" stop-opacity=".5"/><stop offset="100%" stop-color="#f59e0b" stop-opacity=".05"/></linearGradient></defs><g transform="translate(25 35)"><path stroke="#d1d5db" stroke-linecap="round" stroke-width="2" d="M0 80h100M0 80V0"/><path fill="url(#a)" d="M2 78c18 0 28-28 48-23s30-35 48-40v63z"/><path stroke="#d97706" stroke-linecap="round" stroke-width="5" d="M2 78c18 0 28-28 48-23s30-35 48-40"/><circle cx="50" cy="55" r="4" fill="#f59e0b"/><circle cx="98" cy="15" r="4" fill="#f59e0b"/></g></svg>
|
||||
|
After Width: | Height: | Size: 733 B |
1
frontend/public/Logos/pydantic-ai.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>PydanticAI</title><path fill="#e72564" d="M13.223 22.86c-.605.83-1.844.83-2.448 0L5.74 15.944a1.514 1.514 0 0 1 .73-2.322l5.035-1.738c.32-.11.668-.11.988 0l5.035 1.738c.962.332 1.329 1.5.73 2.322zm-1.224-1.259 4.688-6.439-4.688-1.618-4.688 1.618L12 21.602z"/><path fill="#e723a0" d="M23.71 13.463c.604.832.221 2.01-.756 2.328l-8.133 2.652a1.514 1.514 0 0 1-1.983-1.412l-.097-5.326c-.006-.338.101-.67.305-.94l3.209-4.25a1.514 1.514 0 0 1 2.434.022l5.022 6.926zm-1.574.775L17.46 7.79l-2.988 3.958.09 4.959z"/><path fill="#e520e9" d="M18.016.591a1.514 1.514 0 0 1 1.98 1.44l.009 8.554a1.514 1.514 0 0 1-1.956 1.45l-5.095-1.554a1.5 1.5 0 0 1-.8-.58l-3.05-4.366a1.514 1.514 0 0 1 .774-2.308zm.25 1.738L10.69 4.783l2.841 4.065 4.744 1.446-.008-7.965z"/><path fill="#e520e9" d="M5.99.595a1.514 1.514 0 0 0-1.98 1.44L4 10.588a1.514 1.514 0 0 0 1.956 1.45l5.095-1.554c.323-.098.605-.303.799-.58l3.052-4.366a1.514 1.514 0 0 0-.775-2.308zm-.25 1.738 7.577 2.454-2.842 4.065-4.743 1.446.007-7.965z"/><path fill="#e723a0" d="M.29 13.461a1.514 1.514 0 0 0 .756 2.329l8.133 2.651a1.514 1.514 0 0 0 1.983-1.412l.097-5.325a1.5 1.5 0 0 0-.305-.94L7.745 6.513a1.514 1.514 0 0 0-2.434.023L.289 13.461zm1.574.776L6.54 7.788l2.988 3.959-.09 4.958z"/><path fill="#ff96d1" d="m16.942 17.751 1.316-1.806q.178-.248.245-.523l-2.63.858-1.627 2.235a1.5 1.5 0 0 0 .575-.072zm-4.196-5.78.033 1.842 1.742.602-.034-1.843-1.741-.6zm7.257-3.622-1.314-1.812a1.5 1.5 0 0 0-.419-.393l.003 2.767 1.624 2.24q.107-.261.108-.566zm-5.038 2.746-1.762-.537 1.11-1.471 1.762.537zm-2.961-1.41 1.056-1.51-1.056-1.51-1.056 1.51zM9.368 3.509c.145-.122.316-.219.51-.282l2.12-.686 2.13.69c.191.062.36.157.503.276l-2.634.853zm1.433 7.053L9.691 9.09l-1.762.537 1.11 1.47 1.762-.537zm-6.696.584L5.733 8.9l.003-2.763c-.16.1-.305.232-.425.398L4.003 8.339l-.002 2.25q.002.299.104.557m7.149.824-1.741.601-.034 1.843 1.742-.601zM9.75 18.513l-1.628-2.237-2.629-.857q.068.276.247.525l1.313 1.804 2.126.693c.192.062.385.085.571.072"/></svg>
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
1
frontend/public/Logos/traces.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><rect width="100" height="14" x="25" y="45" fill="#4f46e5" rx="4"/><rect width="60" height="14" x="40" y="68" fill="#6366f1" rx="4"/><rect width="20" height="14" x="105" y="91" fill="#818cf8" rx="4"/><path stroke="#c7d2fe" stroke-width="2" d="M35 59v16m5 0h-5M100 59v39m5 0h-5"/></svg>
|
||||
|
After Width: | Height: | Size: 431 B |
50
frontend/scripts/update-registry.js
Normal file
@@ -0,0 +1,50 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires, import/no-dynamic-require, simple-import-sort/imports, simple-import-sort/exports */
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// 1. Define paths
|
||||
const packageJsonPath = path.resolve(__dirname, '../package.json');
|
||||
const registryPath = path.resolve(
|
||||
__dirname,
|
||||
'../src/auto-import-registry.d.ts',
|
||||
);
|
||||
|
||||
// 2. Read package.json
|
||||
const packageJson = require(packageJsonPath);
|
||||
|
||||
// 3. Combine dependencies and devDependencies
|
||||
const allDeps = {
|
||||
...packageJson.dependencies,
|
||||
...packageJson.devDependencies,
|
||||
};
|
||||
|
||||
// 4. Filter for @signozhq packages
|
||||
const signozPackages = Object.keys(allDeps).filter((dep) =>
|
||||
dep.startsWith('@signozhq/'),
|
||||
);
|
||||
|
||||
// 5. Generate file content
|
||||
const fileContent = `// -------------------------------------------------------------------------
|
||||
// AUTO-GENERATED FILE
|
||||
// -------------------------------------------------------------------------
|
||||
// This file is generated by scripts/update-registry.js automatically
|
||||
// whenever you run 'yarn install' or 'npm install'.
|
||||
//
|
||||
// It forces VS Code to index these specific packages to fix auto-import
|
||||
// performance issues in TypeScript 4.x.
|
||||
//
|
||||
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
${signozPackages.map((pkg) => `import '${pkg}';`).join('\n')}
|
||||
`;
|
||||
|
||||
// 6. Write the file
|
||||
try {
|
||||
fs.writeFileSync(registryPath, fileContent);
|
||||
console.log(
|
||||
`✅ Auto-import registry updated with ${signozPackages.length} @signozhq packages.`,
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('❌ Failed to update auto-import registry:', err);
|
||||
}
|
||||
@@ -245,6 +245,14 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
history.replace(newLocation);
|
||||
return;
|
||||
}
|
||||
|
||||
// if the current route is public dashboard then don't redirect to login
|
||||
const isPublicDashboard = currentRoute?.path === ROUTES.PUBLIC_DASHBOARD;
|
||||
|
||||
if (isPublicDashboard) {
|
||||
return;
|
||||
}
|
||||
|
||||
// if the current route
|
||||
if (currentRoute) {
|
||||
const { isPrivate, key } = currentRoute;
|
||||
|
||||
@@ -4,14 +4,15 @@ import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import AppLoading from 'components/AppLoading/AppLoading';
|
||||
import KBarCommandPalette from 'components/KBarCommandPalette/KBarCommandPalette';
|
||||
import { CmdKPalette } from 'components/cmdKPalette/cmdKPalette';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import AppLayout from 'container/AppLayout';
|
||||
import Hex from 'crypto-js/enc-hex';
|
||||
import HmacSHA256 from 'crypto-js/hmac-sha256';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
@@ -21,19 +22,17 @@ import { StatusCodes } from 'http-status-codes';
|
||||
import history from 'lib/history';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import posthog from 'posthog-js';
|
||||
import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { IUser } from 'providers/App/types';
|
||||
import { CmdKProvider } from 'providers/cmdKProvider';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
|
||||
import { KBarCommandPaletteProvider } from 'providers/KBarCommandPaletteProvider';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { LicenseStatus } from 'types/api/licensesV3/getActive';
|
||||
import { Userpilot } from 'userpilot';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
import { Home } from './pageComponents';
|
||||
@@ -84,9 +83,9 @@ function App(): JSX.Element {
|
||||
email,
|
||||
name: displayName,
|
||||
company_name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
role,
|
||||
@@ -94,9 +93,9 @@ function App(): JSX.Element {
|
||||
|
||||
const groupTraits = {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
};
|
||||
@@ -111,37 +110,23 @@ function App(): JSX.Element {
|
||||
if (window && window.Appcues) {
|
||||
window.Appcues.identify(id, {
|
||||
name: displayName,
|
||||
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
|
||||
companyName: orgName,
|
||||
email,
|
||||
paidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
}
|
||||
|
||||
Userpilot.identify(email, {
|
||||
email,
|
||||
name: displayName,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
|
||||
posthog?.identify(id, {
|
||||
email,
|
||||
name: displayName,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
@@ -149,9 +134,9 @@ function App(): JSX.Element {
|
||||
|
||||
posthog?.group('company', orgId, {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
@@ -228,7 +213,10 @@ function App(): JSX.Element {
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname === ROUTES.ONBOARDING) {
|
||||
if (
|
||||
pathname === ROUTES.ONBOARDING ||
|
||||
pathname.startsWith('/public/dashboard/')
|
||||
) {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
window.Pylon('hideChatBubble');
|
||||
@@ -270,11 +258,20 @@ function App(): JSX.Element {
|
||||
!showAddCreditCardModal &&
|
||||
(isCloudUser || isEnterpriseSelfHostedUser)
|
||||
) {
|
||||
const email = user.email || '';
|
||||
const secret = process.env.PYLON_IDENTITY_SECRET || '';
|
||||
let emailHash = '';
|
||||
|
||||
if (email && secret) {
|
||||
emailHash = HmacSHA256(email, Hex.parse(secret)).toString(Hex);
|
||||
}
|
||||
|
||||
window.pylon = {
|
||||
chat_settings: {
|
||||
app_id: process.env.PYLON_APP_ID,
|
||||
email: user.email,
|
||||
name: user.displayName,
|
||||
name: user.displayName || user.email,
|
||||
email_hash: emailHash,
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -308,10 +305,6 @@ function App(): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
if (process.env.USERPILOT_KEY) {
|
||||
Userpilot.initialize(process.env.USERPILOT_KEY);
|
||||
}
|
||||
|
||||
if (!isSentryInitialized) {
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
@@ -371,36 +364,33 @@ function App(): JSX.Element {
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<KBarCommandPaletteProvider>
|
||||
<UserpilotRouteTracker />
|
||||
<KBarCommandPalette />
|
||||
<CmdKProvider>
|
||||
<NotificationProvider>
|
||||
<ErrorModalProvider>
|
||||
{isLoggedInState && <CmdKPalette userRole={user.role} />}
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<PreferenceContextProvider>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</PreferenceContextProvider>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<PreferenceContextProvider>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</PreferenceContextProvider>
|
||||
</AppLayout>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
@@ -408,7 +398,7 @@ function App(): JSX.Element {
|
||||
</PrivateRoute>
|
||||
</ErrorModalProvider>
|
||||
</NotificationProvider>
|
||||
</KBarCommandPaletteProvider>
|
||||
</CmdKProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
|
||||
@@ -295,3 +295,10 @@ export const MetricsExplorer = Loadable(
|
||||
export const ApiMonitoring = Loadable(
|
||||
() => import(/* webpackChunkName: "ApiMonitoring" */ 'pages/ApiMonitoring'),
|
||||
);
|
||||
|
||||
export const PublicDashboardPage = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "Public Dashboard Page" */ 'pages/PublicDashboard'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -34,6 +34,7 @@ import {
|
||||
OrgOnboarding,
|
||||
PasswordReset,
|
||||
PipelinePage,
|
||||
PublicDashboardPage,
|
||||
ServiceMapPage,
|
||||
ServiceMetricsPage,
|
||||
ServicesTablePage,
|
||||
@@ -169,6 +170,13 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'DASHBOARD',
|
||||
},
|
||||
{
|
||||
path: ROUTES.PUBLIC_DASHBOARD,
|
||||
exact: false,
|
||||
component: PublicDashboardPage,
|
||||
isPrivate: false,
|
||||
key: 'PUBLIC_DASHBOARD',
|
||||
},
|
||||
{
|
||||
path: ROUTES.DASHBOARD_WIDGET,
|
||||
exact: true,
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { ApiBaseInstance as axios } from 'api';
|
||||
import { LogEventAxiosInstance as axios } from 'api';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { EventSuccessPayloadProps } from 'types/api/events/types';
|
||||
|
||||
@@ -11,9 +13,14 @@ const logEvent = async (
|
||||
rateLimited?: boolean,
|
||||
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
// add tenant_url to attributes
|
||||
// add deployment_url and user_email to attributes
|
||||
const { hostname } = window.location;
|
||||
const updatedAttributes = { ...attributes, tenant_url: hostname };
|
||||
const userEmail = getLocalStorageApi(LOCALSTORAGE.LOGGED_IN_USER_EMAIL);
|
||||
const updatedAttributes = {
|
||||
...attributes,
|
||||
deployment_url: hostname,
|
||||
user_email: userEmail,
|
||||
};
|
||||
const response = await axios.post('/event', {
|
||||
eventName,
|
||||
attributes: updatedAttributes,
|
||||
|
||||
28
frontend/src/api/dashboard/public/createPublicDashboard.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { CreatePublicDashboardProps } from 'types/api/dashboard/public/create';
|
||||
|
||||
const createPublicDashboard = async (
|
||||
props: CreatePublicDashboardProps,
|
||||
): Promise<SuccessResponseV2<CreatePublicDashboardProps>> => {
|
||||
|
||||
const { dashboardId, timeRangeEnabled = false, defaultTimeRange = '30m' } = props;
|
||||
|
||||
try {
|
||||
const response = await axios.post(
|
||||
`/dashboards/${dashboardId}/public`,
|
||||
{ timeRangeEnabled, defaultTimeRange },
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default createPublicDashboard;
|
||||
20
frontend/src/api/dashboard/public/getPublicDashboardData.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetPublicDashboardDataProps, PayloadProps,PublicDashboardDataProps } from 'types/api/dashboard/public/get';
|
||||
|
||||
const getPublicDashboardData = async (props: GetPublicDashboardDataProps): Promise<SuccessResponseV2<PublicDashboardDataProps>> => {
|
||||
try {
|
||||
const response = await axios.get<PayloadProps>(`/public/dashboards/${props.id}`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getPublicDashboardData;
|
||||
20
frontend/src/api/dashboard/public/getPublicDashboardMeta.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetPublicDashboardMetaProps, PayloadProps,PublicDashboardMetaProps } from 'types/api/dashboard/public/getMeta';
|
||||
|
||||
const getPublicDashboardMeta = async (props: GetPublicDashboardMetaProps): Promise<SuccessResponseV2<PublicDashboardMetaProps>> => {
|
||||
try {
|
||||
const response = await axios.get<PayloadProps>(`/dashboards/${props.id}/public`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getPublicDashboardMeta;
|
||||
@@ -0,0 +1,27 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { MetricRangePayloadV5 } from 'api/v5/v5';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetPublicDashboardWidgetDataProps } from 'types/api/dashboard/public/getWidgetData';
|
||||
|
||||
|
||||
const getPublicDashboardWidgetData = async (props: GetPublicDashboardWidgetDataProps): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
|
||||
try {
|
||||
const response = await axios.get(`/public/dashboards/${props.id}/widgets/${props.index}/query_range`, {
|
||||
params: {
|
||||
startTime: props.startTime,
|
||||
endTime: props.endTime,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getPublicDashboardWidgetData;
|
||||
@@ -0,0 +1,22 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps,RevokePublicDashboardAccessProps } from 'types/api/dashboard/public/delete';
|
||||
|
||||
const revokePublicDashboardAccess = async (
|
||||
props: RevokePublicDashboardAccessProps,
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.delete<PayloadProps>(`/dashboards/${props.id}/public`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default revokePublicDashboardAccess;
|
||||
28
frontend/src/api/dashboard/public/updatePublicDashboard.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { UpdatePublicDashboardProps } from 'types/api/dashboard/public/update';
|
||||
|
||||
const updatePublicDashboard = async (
|
||||
props: UpdatePublicDashboardProps,
|
||||
): Promise<SuccessResponseV2<UpdatePublicDashboardProps>> => {
|
||||
|
||||
const { dashboardId, timeRangeEnabled = false, defaultTimeRange = '30m' } = props;
|
||||
|
||||
try {
|
||||
const response = await axios.put(
|
||||
`/dashboards/${dashboardId}/public`,
|
||||
{ timeRangeEnabled, defaultTimeRange },
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default updatePublicDashboard;
|
||||
@@ -1,13 +1,11 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
|
||||
import { getFieldKeys } from '../getFieldKeys';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
get: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('getFieldKeys API', () => {
|
||||
@@ -31,33 +29,33 @@ describe('getFieldKeys API', () => {
|
||||
|
||||
it('should call API with correct parameters when no args provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with no parameters
|
||||
await getFieldKeys();
|
||||
|
||||
// Verify API was called correctly with empty params object
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with signal parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with signal parameter
|
||||
await getFieldKeys('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with name parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -72,14 +70,14 @@ describe('getFieldKeys API', () => {
|
||||
await getFieldKeys(undefined, 'service');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with both signal and name when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -94,14 +92,14 @@ describe('getFieldKeys API', () => {
|
||||
await getFieldKeys('logs', 'service');
|
||||
|
||||
// Verify API was called with both parameters
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'logs', name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should return properly formatted response', async () => {
|
||||
// Mock API to return our response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldKeys('traces');
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
|
||||
import { getFieldValues } from '../getFieldValues';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
get: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('getFieldValues API', () => {
|
||||
@@ -17,7 +15,7 @@ describe('getFieldValues API', () => {
|
||||
|
||||
it('should call the API with correct parameters (no options)', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -34,14 +32,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues();
|
||||
|
||||
// Verify API was called correctly with empty params
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with signal parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -58,14 +56,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with name parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -82,14 +80,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues(undefined, 'service.name');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with value parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -106,14 +104,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues(undefined, 'service.name', 'front');
|
||||
|
||||
// Verify API was called with value parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name', searchText: 'front' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with time range parameters', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -138,7 +136,7 @@ describe('getFieldValues API', () => {
|
||||
);
|
||||
|
||||
// Verify API was called with time range parameters (converted to milliseconds)
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {
|
||||
signal: 'logs',
|
||||
name: 'service.name',
|
||||
@@ -165,7 +163,7 @@ describe('getFieldValues API', () => {
|
||||
},
|
||||
};
|
||||
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'mixed.values');
|
||||
@@ -196,7 +194,7 @@ describe('getFieldValues API', () => {
|
||||
};
|
||||
|
||||
// Mock API to return our response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'service.name');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -24,7 +24,7 @@ export const getFieldKeys = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await ApiBaseInstance.get('/fields/keys', { params });
|
||||
const response = await axios.get('/fields/keys', { params });
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -47,7 +47,7 @@ export const getFieldValues = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await ApiBaseInstance.get('/fields/values', { params });
|
||||
const response = await axios.get('/fields/values', { params });
|
||||
|
||||
// Normalize values from different types (stringValues, boolValues, etc.)
|
||||
if (response.data?.data?.values) {
|
||||
|
||||
@@ -86,8 +86,9 @@ const interceptorRejected = async (
|
||||
|
||||
if (
|
||||
response.status === 401 &&
|
||||
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
||||
// if the session rotate call or the create session errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
||||
response.config.url !== '/sessions/rotate' &&
|
||||
response.config.url !== '/sessions/email_password' &&
|
||||
!(
|
||||
response.config.url === '/sessions' && response.config.method === 'delete'
|
||||
)
|
||||
@@ -199,15 +200,15 @@ ApiV5Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// axios Base
|
||||
export const ApiBaseInstance = axios.create({
|
||||
export const LogEventAxiosInstance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||
});
|
||||
|
||||
ApiBaseInstance.interceptors.response.use(
|
||||
LogEventAxiosInstance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejectedBase,
|
||||
);
|
||||
ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
|
||||
@@ -17,7 +17,7 @@ export const getHostAttributeKeys = async (
|
||||
try {
|
||||
const response: AxiosResponse<{
|
||||
data: IQueryAutocompleteResponse;
|
||||
}> = await ApiBaseInstance.get(
|
||||
}> = await axios.get(
|
||||
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
|
||||
{
|
||||
params: {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
@@ -20,7 +20,7 @@ const getOnboardingStatus = async (props: {
|
||||
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
|
||||
const { endpointService, ...rest } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.post(
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
|
||||
rest,
|
||||
);
|
||||
|
||||
@@ -1,13 +1,20 @@
|
||||
import axios from 'api';
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getService';
|
||||
|
||||
const getService = async (props: Props): Promise<PayloadProps> => {
|
||||
const response = await axios.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
return response.data;
|
||||
try {
|
||||
const response = await ApiV2Instance.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getService;
|
||||
|
||||
@@ -1,22 +1,27 @@
|
||||
import axios from 'api';
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
|
||||
|
||||
const getTopOperations = async (props: Props): Promise<PayloadProps> => {
|
||||
const endpoint = props.isEntryPoint
|
||||
? '/service/entry_point_operations'
|
||||
: '/service/top_operations';
|
||||
try {
|
||||
const endpoint = props.isEntryPoint
|
||||
? '/service/entry_point_operations'
|
||||
: '/service/top_operations';
|
||||
|
||||
const response = await axios.post(endpoint, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
const response = await ApiV2Instance.post(endpoint, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
limit: 5000,
|
||||
});
|
||||
|
||||
if (props.isEntryPoint) {
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
return response.data;
|
||||
};
|
||||
|
||||
export default getTopOperations;
|
||||
|
||||
@@ -9,6 +9,7 @@ export interface UpdateMetricMetadataProps {
|
||||
metricType: MetricType;
|
||||
temporality?: Temporality;
|
||||
isMonotonic?: boolean;
|
||||
unit?: string;
|
||||
}
|
||||
|
||||
export interface UpdateMetricMetadataResponse {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
@@ -9,7 +9,7 @@ const getCustomFilters = async (
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
const { signal } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.get(`orgs/me/filters/${signal}`);
|
||||
const response = await axios.get(`/orgs/me/filters/${signal}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFilters';
|
||||
@@ -6,7 +6,7 @@ import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFil
|
||||
const updateCustomFiltersAPI = async (
|
||||
props: UpdateCustomFiltersProps,
|
||||
): Promise<SuccessResponse<void> | AxiosError> =>
|
||||
ApiBaseInstance.put(`orgs/me/filters`, {
|
||||
axios.put(`/orgs/me/filters`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ const setRetentionV2 = async ({
|
||||
type,
|
||||
defaultTTLDays,
|
||||
coldStorageVolume,
|
||||
coldStorageDuration,
|
||||
coldStorageDurationDays,
|
||||
ttlConditions,
|
||||
}: PropsV2): Promise<SuccessResponseV2<PayloadPropsV2>> => {
|
||||
try {
|
||||
@@ -16,7 +16,7 @@ const setRetentionV2 = async ({
|
||||
type,
|
||||
defaultTTLDays,
|
||||
coldStorageVolume,
|
||||
coldStorageDuration,
|
||||
coldStorageDurationDays,
|
||||
ttlConditions,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -9,15 +9,12 @@ const listOverview = async (
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
const { start, end, show_ip: showIp, filter } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.post(
|
||||
`/third-party-apis/overview/list`,
|
||||
{
|
||||
start,
|
||||
end,
|
||||
show_ip: showIp,
|
||||
filter,
|
||||
},
|
||||
);
|
||||
const response = await axios.post(`/third-party-apis/overview/list`, {
|
||||
start,
|
||||
end,
|
||||
show_ip: showIp,
|
||||
filter,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
28
frontend/src/api/trace/getSpanPercentiles.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetSpanPercentilesProps,
|
||||
GetSpanPercentilesResponseDataProps,
|
||||
} from 'types/api/trace/getSpanPercentiles';
|
||||
|
||||
const getSpanPercentiles = async (
|
||||
props: GetSpanPercentilesProps,
|
||||
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/span_percentile', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export default getSpanPercentiles;
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
export const getQueryRangeV5 = async (
|
||||
props: QueryRangePayloadV5,
|
||||
version: string,
|
||||
signal: AbortSignal,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
|
||||
try {
|
||||
|
||||
@@ -1,30 +1,30 @@
|
||||
interface ConfigureIconProps {
|
||||
width?: number;
|
||||
height?: number;
|
||||
fill?: string;
|
||||
color?: string;
|
||||
}
|
||||
|
||||
function ConfigureIcon({
|
||||
width,
|
||||
height,
|
||||
fill,
|
||||
color,
|
||||
}: ConfigureIconProps): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width={width}
|
||||
height={height}
|
||||
fill={fill}
|
||||
fill="none"
|
||||
>
|
||||
<path
|
||||
stroke="#C0C1C3"
|
||||
stroke={color}
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth="1.333"
|
||||
d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z"
|
||||
/>
|
||||
<path
|
||||
stroke="#C0C1C3"
|
||||
stroke={color}
|
||||
strokeLinecap="round"
|
||||
strokeWidth="1.333"
|
||||
d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3m5.417 7.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"
|
||||
@@ -36,6 +36,6 @@ function ConfigureIcon({
|
||||
ConfigureIcon.defaultProps = {
|
||||
width: 16,
|
||||
height: 16,
|
||||
fill: 'none',
|
||||
color: 'currentColor',
|
||||
};
|
||||
export default ConfigureIcon;
|
||||
|
||||
25
frontend/src/auto-import-registry.d.ts
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
// -------------------------------------------------------------------------
|
||||
// AUTO-GENERATED FILE
|
||||
// -------------------------------------------------------------------------
|
||||
// This file is generated by scripts/update-registry.js automatically
|
||||
// whenever you run 'yarn install' or 'npm install'.
|
||||
//
|
||||
// It forces VS Code to index these specific packages to fix auto-import
|
||||
// performance issues in TypeScript 4.x.
|
||||
//
|
||||
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
import '@signozhq/badge';
|
||||
import '@signozhq/button';
|
||||
import '@signozhq/calendar';
|
||||
import '@signozhq/callout';
|
||||
import '@signozhq/checkbox';
|
||||
import '@signozhq/command';
|
||||
import '@signozhq/design-tokens';
|
||||
import '@signozhq/input';
|
||||
import '@signozhq/popover';
|
||||
import '@signozhq/resizable';
|
||||
import '@signozhq/sonner';
|
||||
import '@signozhq/table';
|
||||
import '@signozhq/tooltip';
|
||||
@@ -62,6 +62,8 @@ interface CustomTimePickerProps {
|
||||
showLiveLogs?: boolean;
|
||||
onGoLive?: () => void;
|
||||
onExitLiveLogs?: () => void;
|
||||
/** When false, hides the "Recently Used" time ranges section */
|
||||
showRecentlyUsed?: boolean;
|
||||
}
|
||||
|
||||
function CustomTimePicker({
|
||||
@@ -81,6 +83,7 @@ function CustomTimePicker({
|
||||
onGoLive,
|
||||
onExitLiveLogs,
|
||||
showLiveLogs,
|
||||
showRecentlyUsed = true,
|
||||
}: CustomTimePickerProps): JSX.Element {
|
||||
const [
|
||||
selectedTimePlaceholderValue,
|
||||
@@ -395,6 +398,7 @@ function CustomTimePicker({
|
||||
setActiveView={setActiveView}
|
||||
setIsOpenedFromFooter={setIsOpenedFromFooter}
|
||||
isOpenedFromFooter={isOpenedFromFooter}
|
||||
showRecentlyUsed={showRecentlyUsed}
|
||||
/>
|
||||
) : (
|
||||
content
|
||||
@@ -464,4 +468,5 @@ CustomTimePicker.defaultProps = {
|
||||
onCustomTimeStatusUpdate: noop,
|
||||
onExitLiveLogs: noop,
|
||||
showLiveLogs: false,
|
||||
showRecentlyUsed: true,
|
||||
};
|
||||
|
||||
@@ -47,6 +47,7 @@ interface CustomTimePickerPopoverContentProps {
|
||||
isOpenedFromFooter: boolean;
|
||||
setIsOpenedFromFooter: Dispatch<SetStateAction<boolean>>;
|
||||
onExitLiveLogs: () => void;
|
||||
showRecentlyUsed: boolean;
|
||||
}
|
||||
|
||||
interface RecentlyUsedDateTimeRange {
|
||||
@@ -72,6 +73,7 @@ function CustomTimePickerPopoverContent({
|
||||
isOpenedFromFooter,
|
||||
setIsOpenedFromFooter,
|
||||
onExitLiveLogs,
|
||||
showRecentlyUsed = true,
|
||||
}: CustomTimePickerPopoverContentProps): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
|
||||
@@ -224,33 +226,35 @@ function CustomTimePickerPopoverContent({
|
||||
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
|
||||
</div>
|
||||
|
||||
<div className="recently-used-container">
|
||||
<div className="time-heading">RECENTLY USED</div>
|
||||
<div className="recently-used-range">
|
||||
{recentlyUsedTimeRanges.map((range: RecentlyUsedDateTimeRange) => (
|
||||
<div
|
||||
className="recently-used-range-item"
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
{showRecentlyUsed && (
|
||||
<div className="recently-used-container">
|
||||
<div className="time-heading">RECENTLY USED</div>
|
||||
<div className="recently-used-range">
|
||||
{recentlyUsedTimeRanges.map((range: RecentlyUsedDateTimeRange) => (
|
||||
<div
|
||||
className="recently-used-range-item"
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
handleExitLiveLogs();
|
||||
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
|
||||
setIsOpen(false);
|
||||
}
|
||||
}}
|
||||
key={range.value}
|
||||
onClick={(): void => {
|
||||
handleExitLiveLogs();
|
||||
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
|
||||
setIsOpen(false);
|
||||
}
|
||||
}}
|
||||
key={range.value}
|
||||
onClick={(): void => {
|
||||
handleExitLiveLogs();
|
||||
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
|
||||
setIsOpen(false);
|
||||
}}
|
||||
>
|
||||
{range.label}
|
||||
</div>
|
||||
))}
|
||||
}}
|
||||
>
|
||||
{range.label}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
372
frontend/src/components/Graph/__tests__/yAxisConfig.test.ts
Normal file
@@ -0,0 +1,372 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { PrecisionOptionsEnum } from '../types';
|
||||
import { getYAxisFormattedValue } from '../yAxisConfig';
|
||||
|
||||
const testFullPrecisionGetYAxisFormattedValue = (
|
||||
value: string,
|
||||
format: string,
|
||||
): string => getYAxisFormattedValue(value, format, PrecisionOptionsEnum.FULL);
|
||||
|
||||
describe('getYAxisFormattedValue - none (full precision legacy assertions)', () => {
|
||||
test('large integers and decimals', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('250034', 'none')).toBe(
|
||||
'250034',
|
||||
);
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('250034897.12345', 'none'),
|
||||
).toBe('250034897.12345');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('250034897.02354', 'none'),
|
||||
).toBe('250034897.02354');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('9999999.9999', 'none')).toBe(
|
||||
'9999999.9999',
|
||||
);
|
||||
});
|
||||
|
||||
test('preserves leading zeros after decimal until first non-zero', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.0000234', 'none')).toBe(
|
||||
'1.0000234',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.00003', 'none')).toBe(
|
||||
'0.00003',
|
||||
);
|
||||
});
|
||||
|
||||
test('trims to three significant decimals and removes trailing zeros', () => {
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'none'),
|
||||
).toBe('0.000000250034');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'none')).toBe(
|
||||
'0.00000025',
|
||||
);
|
||||
|
||||
// Big precision, limiting the javascript precision (~16 digits)
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'none'),
|
||||
).toBe('1');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'none'),
|
||||
).toBe('1.005555555595958');
|
||||
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
|
||||
'0.000000001',
|
||||
);
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('0.000000250000', 'none'),
|
||||
).toBe('0.00000025');
|
||||
});
|
||||
|
||||
test('whole numbers normalize', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'none')).toBe('1000');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('99.5458', 'none')).toBe(
|
||||
'99.5458',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.234567', 'none')).toBe(
|
||||
'1.234567',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('99.998', 'none')).toBe(
|
||||
'99.998',
|
||||
);
|
||||
});
|
||||
|
||||
test('strip redundant decimal zeros', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000.000', 'none')).toBe(
|
||||
'1000',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('99.500', 'none')).toBe(
|
||||
'99.5',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.000', 'none')).toBe('1');
|
||||
});
|
||||
|
||||
test('edge values', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0', 'none')).toBe('0');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-0', 'none')).toBe('0');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'none')).toBe('∞');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-Infinity', 'none')).toBe(
|
||||
'-∞',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('invalid', 'none')).toBe(
|
||||
'NaN',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('', 'none')).toBe('NaN');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('abc123', 'none')).toBe('NaN');
|
||||
});
|
||||
|
||||
test('small decimals keep precision as-is', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'none')).toBe(
|
||||
'0.0001',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-0.0001', 'none')).toBe(
|
||||
'-0.0001',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
|
||||
'0.000000001',
|
||||
);
|
||||
});
|
||||
|
||||
test('simple decimals preserved', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.1', 'none')).toBe('0.1');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.2', 'none')).toBe('0.2');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.3', 'none')).toBe('0.3');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.0000000001', 'none')).toBe(
|
||||
'1.0000000001',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getYAxisFormattedValue - units (full precision legacy assertions)', () => {
|
||||
test('ms', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'ms')).toBe('1.5 s');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('500', 'ms')).toBe('500 ms');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('60000', 'ms')).toBe('1 min');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('295.429', 'ms')).toBe(
|
||||
'295.429 ms',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('4353.81', 'ms')).toBe(
|
||||
'4.35381 s',
|
||||
);
|
||||
});
|
||||
|
||||
test('s', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('90', 's')).toBe('1.5 mins');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('30', 's')).toBe('30 s');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('3600', 's')).toBe('1 hour');
|
||||
});
|
||||
|
||||
test('m', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('90', 'm')).toBe('1.5 hours');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('30', 'm')).toBe('30 min');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1440', 'm')).toBe('1 day');
|
||||
});
|
||||
|
||||
test('bytes', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'bytes')).toBe(
|
||||
'1 KiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('512', 'bytes')).toBe('512 B');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'bytes')).toBe(
|
||||
'1.5 KiB',
|
||||
);
|
||||
});
|
||||
|
||||
test('mbytes', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'mbytes')).toBe(
|
||||
'1 GiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('512', 'mbytes')).toBe(
|
||||
'512 MiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'mbytes')).toBe(
|
||||
'1.5 GiB',
|
||||
);
|
||||
});
|
||||
|
||||
test('kbytes', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'kbytes')).toBe(
|
||||
'1 MiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('512', 'kbytes')).toBe(
|
||||
'512 KiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'kbytes')).toBe(
|
||||
'1.5 MiB',
|
||||
);
|
||||
});
|
||||
|
||||
test('short', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'short')).toBe('1 K');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'short')).toBe(
|
||||
'1.5 K',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('999', 'short')).toBe('999');
|
||||
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000000', 'short')).toBe(
|
||||
'1 Mil',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1555600', 'short')).toBe(
|
||||
'1.5556 Mil',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('999999', 'short')).toBe(
|
||||
'999.999 K',
|
||||
);
|
||||
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000000000', 'short')).toBe(
|
||||
'1 Bil',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1500000000', 'short')).toBe(
|
||||
'1.5 Bil',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('999999999', 'short')).toBe(
|
||||
'999.999999 Mil',
|
||||
);
|
||||
});
|
||||
|
||||
test('percent', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.15', 'percent')).toBe(
|
||||
'0.15%',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.1234', 'percent')).toBe(
|
||||
'0.1234%',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.123499', 'percent')).toBe(
|
||||
'0.123499%',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.5', 'percent')).toBe(
|
||||
'1.5%',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'percent')).toBe(
|
||||
'0.0001%',
|
||||
);
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('0.000000001', 'percent'),
|
||||
).toBe('1e-9%');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'percent'),
|
||||
).toBe('0.000000250034%');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'percent')).toBe(
|
||||
'0.00000025%',
|
||||
);
|
||||
// Big precision, limiting the javascript precision (~16 digits)
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'percent'),
|
||||
).toBe('1%');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'percent'),
|
||||
).toBe('1.005555555595959%');
|
||||
});
|
||||
|
||||
test('ratio', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.5', 'ratio')).toBe(
|
||||
'0.5 ratio',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.25', 'ratio')).toBe(
|
||||
'1.25 ratio',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('2.0', 'ratio')).toBe(
|
||||
'2 ratio',
|
||||
);
|
||||
});
|
||||
|
||||
test('temperature units', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('25', 'celsius')).toBe(
|
||||
'25 °C',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0', 'celsius')).toBe('0 °C');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-10', 'celsius')).toBe(
|
||||
'-10 °C',
|
||||
);
|
||||
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('77', 'fahrenheit')).toBe(
|
||||
'77 °F',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('32', 'fahrenheit')).toBe(
|
||||
'32 °F',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('14', 'fahrenheit')).toBe(
|
||||
'14 °F',
|
||||
);
|
||||
});
|
||||
|
||||
test('ms edge cases', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0', 'ms')).toBe('0 ms');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-1500', 'ms')).toBe('-1.5 s');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'ms')).toBe('∞');
|
||||
});
|
||||
|
||||
test('bytes edge cases', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0', 'bytes')).toBe('0 B');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-1024', 'bytes')).toBe(
|
||||
'-1 KiB',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getYAxisFormattedValue - precision option tests', () => {
|
||||
test('precision 0 drops decimal part', () => {
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 0)).toBe('1');
|
||||
expect(getYAxisFormattedValue('0.9999', 'none', 0)).toBe('0');
|
||||
expect(getYAxisFormattedValue('12345.6789', 'none', 0)).toBe('12345');
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 0)).toBe('0');
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 0)).toBe('1000');
|
||||
expect(getYAxisFormattedValue('0.000000250034', 'none', 0)).toBe('0');
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 0)).toBe('1');
|
||||
|
||||
// with unit
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 0)).toBe('4 s');
|
||||
});
|
||||
test('precision 1,2,3,4 decimals', () => {
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 1)).toBe('1.2');
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 2)).toBe('1.23');
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 3)).toBe('1.234');
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 4)).toBe('1.2345');
|
||||
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 1)).toBe('0.00001');
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 2)).toBe('0.000012');
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 3)).toBe('0.0000123');
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 4)).toBe('0.00001234');
|
||||
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 1)).toBe('1000');
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 2)).toBe('1000');
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 3)).toBe('1000');
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 4)).toBe('1000');
|
||||
|
||||
expect(getYAxisFormattedValue('0.000000250034', 'none', 1)).toBe('0.0000002');
|
||||
expect(getYAxisFormattedValue('0.000000250034', 'none', 2)).toBe(
|
||||
'0.00000025',
|
||||
); // leading zeros + 2 significant => same trimmed
|
||||
expect(getYAxisFormattedValue('0.000000250034', 'none', 3)).toBe(
|
||||
'0.00000025',
|
||||
);
|
||||
expect(getYAxisFormattedValue('0.000000250304', 'none', 4)).toBe(
|
||||
'0.0000002503',
|
||||
);
|
||||
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 1)).toBe(
|
||||
'1.005',
|
||||
);
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 2)).toBe(
|
||||
'1.0055',
|
||||
);
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 3)).toBe(
|
||||
'1.00555',
|
||||
);
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 4)).toBe(
|
||||
'1.005555',
|
||||
);
|
||||
|
||||
// with unit
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 1)).toBe('4.4 s');
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 2)).toBe('4.35 s');
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 3)).toBe('4.354 s');
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 4)).toBe('4.3538 s');
|
||||
|
||||
// Percentages
|
||||
expect(getYAxisFormattedValue('0.123456', 'percent', 2)).toBe('0.12%');
|
||||
expect(getYAxisFormattedValue('0.123456', 'percent', 4)).toBe('0.1235%'); // approximation
|
||||
});
|
||||
|
||||
test('precision full uses up to DEFAULT_SIGNIFICANT_DIGITS significant digits', () => {
|
||||
expect(
|
||||
getYAxisFormattedValue(
|
||||
'0.00002625429914148441',
|
||||
'none',
|
||||
PrecisionOptionsEnum.FULL,
|
||||
),
|
||||
).toBe('0.000026254299141');
|
||||
expect(
|
||||
getYAxisFormattedValue(
|
||||
'0.000026254299141484417',
|
||||
's',
|
||||
PrecisionOptionsEnum.FULL,
|
||||
),
|
||||
).toBe('26.254299141484417 µs');
|
||||
|
||||
expect(
|
||||
getYAxisFormattedValue('4353.81', 'ms', PrecisionOptionsEnum.FULL),
|
||||
).toBe('4.35381 s');
|
||||
expect(getYAxisFormattedValue('500', 'ms', PrecisionOptionsEnum.FULL)).toBe(
|
||||
'500 ms',
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -78,3 +78,18 @@ export interface ITimeRange {
|
||||
minTime: number | null;
|
||||
maxTime: number | null;
|
||||
}
|
||||
|
||||
export const DEFAULT_SIGNIFICANT_DIGITS = 15;
|
||||
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
export const MAX_DECIMALS = 15;
|
||||
|
||||
export enum PrecisionOptionsEnum {
|
||||
ZERO = 0,
|
||||
ONE = 1,
|
||||
TWO = 2,
|
||||
THREE = 3,
|
||||
FOUR = 4,
|
||||
FULL = 'full',
|
||||
}
|
||||
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;
|
||||
|
||||
@@ -16,8 +16,12 @@ import {
|
||||
} from './Plugin/IntersectionCursor';
|
||||
import {
|
||||
CustomChartOptions,
|
||||
DEFAULT_SIGNIFICANT_DIGITS,
|
||||
GraphOnClickHandler,
|
||||
IAxisTimeConfig,
|
||||
MAX_DECIMALS,
|
||||
PrecisionOption,
|
||||
PrecisionOptionsEnum,
|
||||
StaticLineProps,
|
||||
} from './types';
|
||||
import { getToolTipValue, getYAxisFormattedValue } from './yAxisConfig';
|
||||
@@ -149,6 +153,7 @@ export const getGraphOptions = (
|
||||
scales: {
|
||||
x: {
|
||||
stacked: isStacked,
|
||||
offset: false,
|
||||
grid: {
|
||||
display: true,
|
||||
color: getGridColor(),
|
||||
@@ -241,3 +246,68 @@ declare module 'chart.js' {
|
||||
custom: TooltipPositionerFunction<ChartType>;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a number for display, preserving leading zeros after the decimal point
|
||||
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
|
||||
* It avoids scientific notation and removes unnecessary trailing zeros.
|
||||
*
|
||||
* @example
|
||||
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
|
||||
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
|
||||
* formatDecimalWithLeadingZeros(5.0); // "5"
|
||||
*
|
||||
* @param value The number to format.
|
||||
* @returns The formatted string.
|
||||
*/
|
||||
export const formatDecimalWithLeadingZeros = (
|
||||
value: number,
|
||||
precision: PrecisionOption,
|
||||
): string => {
|
||||
if (value === 0) {
|
||||
return '0';
|
||||
}
|
||||
|
||||
// Use toLocaleString to get a full decimal representation without scientific notation.
|
||||
const numStr = value.toLocaleString('en-US', {
|
||||
useGrouping: false,
|
||||
maximumFractionDigits: 20,
|
||||
});
|
||||
|
||||
const [integerPart, decimalPart = ''] = numStr.split('.');
|
||||
|
||||
// If there's no decimal part, the integer part is the result.
|
||||
if (!decimalPart) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Find the index of the first non-zero digit in the decimal part.
|
||||
const firstNonZeroIndex = decimalPart.search(/[^0]/);
|
||||
|
||||
// If the decimal part consists only of zeros, return just the integer part.
|
||||
if (firstNonZeroIndex === -1) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
|
||||
const significantDigits =
|
||||
precision === PrecisionOptionsEnum.FULL
|
||||
? DEFAULT_SIGNIFICANT_DIGITS
|
||||
: precision;
|
||||
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
|
||||
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
|
||||
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
|
||||
|
||||
// If precision is 0, we drop the decimal part entirely.
|
||||
if (precision === 0) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Remove any trailing zeros from the result to keep it clean.
|
||||
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
|
||||
|
||||
// Return the integer part, or the integer and decimal parts combined.
|
||||
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
|
||||
};
|
||||
|
||||
@@ -1,58 +1,97 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { formattedValueToString, getValueFormat } from '@grafana/data';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
import { isUniversalUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { isNaN } from 'lodash-es';
|
||||
|
||||
import { formatUniversalUnit } from '../YAxisUnitSelector/formatter';
|
||||
import {
|
||||
DEFAULT_SIGNIFICANT_DIGITS,
|
||||
PrecisionOption,
|
||||
PrecisionOptionsEnum,
|
||||
} from './types';
|
||||
import { formatDecimalWithLeadingZeros } from './utils';
|
||||
|
||||
/**
|
||||
* Formats a Y-axis value based on a given format string.
|
||||
*
|
||||
* @param value The string value from the axis.
|
||||
* @param format The format identifier (e.g. 'none', 'ms', 'bytes', 'short').
|
||||
* @returns A formatted string ready for display.
|
||||
*/
|
||||
export const getYAxisFormattedValue = (
|
||||
value: string,
|
||||
format: string,
|
||||
precision: PrecisionOption = 2, // default precision requested
|
||||
): string => {
|
||||
let decimalPrecision: number | undefined;
|
||||
const parsedValue = getValueFormat(format)(
|
||||
parseFloat(value),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
const numValue = parseFloat(value);
|
||||
|
||||
// Handle non-numeric or special values first.
|
||||
if (isNaN(numValue)) return 'NaN';
|
||||
if (numValue === Infinity) return '∞';
|
||||
if (numValue === -Infinity) return '-∞';
|
||||
|
||||
// For all other standard formats, delegate to grafana/data's built-in formatter.
|
||||
const computeDecimals = (): number | undefined => {
|
||||
if (precision === PrecisionOptionsEnum.FULL) {
|
||||
return DEFAULT_SIGNIFICANT_DIGITS;
|
||||
}
|
||||
return precision;
|
||||
};
|
||||
|
||||
const fallbackFormat = (): string => {
|
||||
if (precision === PrecisionOptionsEnum.FULL) return numValue.toString();
|
||||
if (precision === 0) return Math.round(numValue).toString();
|
||||
return precision !== undefined
|
||||
? numValue
|
||||
.toFixed(precision)
|
||||
.replace(/(\.[0-9]*[1-9])0+$/, '$1') // trimming zeros
|
||||
.replace(/\.$/, '')
|
||||
: numValue.toString();
|
||||
};
|
||||
|
||||
try {
|
||||
const decimalSplitted = parsedValue.text.split('.');
|
||||
if (decimalSplitted.length === 1) {
|
||||
decimalPrecision = 0;
|
||||
} else {
|
||||
const decimalDigits = decimalSplitted[1].split('');
|
||||
decimalPrecision = decimalDigits.length;
|
||||
let nonZeroCtr = 0;
|
||||
for (let idx = 0; idx < decimalDigits.length; idx += 1) {
|
||||
if (decimalDigits[idx] !== '0') {
|
||||
nonZeroCtr += 1;
|
||||
if (nonZeroCtr >= 2) {
|
||||
decimalPrecision = idx + 1;
|
||||
}
|
||||
} else if (nonZeroCtr) {
|
||||
decimalPrecision = idx;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Use custom formatter for the 'none' format honoring precision
|
||||
if (format === 'none') {
|
||||
return formatDecimalWithLeadingZeros(numValue, precision);
|
||||
}
|
||||
|
||||
return formattedValueToString(
|
||||
getValueFormat(format)(
|
||||
parseFloat(value),
|
||||
decimalPrecision,
|
||||
undefined,
|
||||
undefined,
|
||||
),
|
||||
);
|
||||
// Separate logic for universal units// Separate logic for universal units
|
||||
if (format && isUniversalUnit(format)) {
|
||||
const decimals = computeDecimals();
|
||||
return formatUniversalUnit(
|
||||
numValue,
|
||||
format as UniversalYAxisUnit,
|
||||
precision,
|
||||
decimals,
|
||||
);
|
||||
}
|
||||
|
||||
const formatter = getValueFormat(format);
|
||||
const formattedValue = formatter(numValue, computeDecimals(), undefined);
|
||||
if (formattedValue.text && formattedValue.text.includes('.')) {
|
||||
formattedValue.text = formatDecimalWithLeadingZeros(
|
||||
parseFloat(formattedValue.text),
|
||||
precision,
|
||||
);
|
||||
}
|
||||
|
||||
return formattedValueToString(formattedValue);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
Sentry.captureEvent({
|
||||
message: `Error applying formatter: ${
|
||||
error instanceof Error ? error.message : 'Unknown error'
|
||||
}`,
|
||||
level: 'error',
|
||||
});
|
||||
return fallbackFormat();
|
||||
}
|
||||
return `${parseFloat(value)}`;
|
||||
};
|
||||
|
||||
export const getToolTipValue = (value: string, format?: string): string => {
|
||||
try {
|
||||
return formattedValueToString(
|
||||
getValueFormat(format)(parseFloat(value), undefined, undefined, undefined),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
return `${value}`;
|
||||
};
|
||||
export const getToolTipValue = (
|
||||
value: string | number,
|
||||
format?: string,
|
||||
precision?: PrecisionOption,
|
||||
): string =>
|
||||
getYAxisFormattedValue(value?.toString(), format || 'none', precision);
|
||||
|
||||
@@ -60,6 +60,14 @@ function Metrics({
|
||||
setElement,
|
||||
} = useMultiIntersectionObserver(hostWidgetInfo.length, { threshold: 0.1 });
|
||||
|
||||
const legendScrollPositionRef = useRef<{
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
}>({
|
||||
scrollTop: 0,
|
||||
scrollLeft: 0,
|
||||
});
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getHostQueryPayload(
|
||||
@@ -147,6 +155,13 @@ function Metrics({
|
||||
maxTimeScale: graphTimeIntervals[idx].end,
|
||||
onDragSelect: (start, end) => onDragSelect(start, end, idx),
|
||||
query: currentQuery,
|
||||
legendScrollPosition: legendScrollPositionRef.current,
|
||||
setLegendScrollPosition: (position: {
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
}) => {
|
||||
legendScrollPositionRef.current = position;
|
||||
},
|
||||
}),
|
||||
),
|
||||
[
|
||||
|
||||
@@ -37,7 +37,6 @@
|
||||
|
||||
border-radius: 2px 0px 0px 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
border-right: none;
|
||||
border-left: none;
|
||||
@@ -45,6 +44,12 @@
|
||||
border-bottom-right-radius: 0px;
|
||||
border-top-left-radius: 0px;
|
||||
border-bottom-left-radius: 0px;
|
||||
font-size: 12px !important;
|
||||
line-height: 27px;
|
||||
&::placeholder {
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
.kbar-command-palette__positioner {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
justify-content: center;
|
||||
padding: 1rem;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
backdrop-filter: blur(6px);
|
||||
z-index: 50;
|
||||
}
|
||||
|
||||
.kbar-command-palette__animator {
|
||||
width: 100%;
|
||||
max-width: 600px;
|
||||
}
|
||||
|
||||
.kbar-command-palette__card {
|
||||
background: var(--bg-ink-500);
|
||||
color: var(--text-vanilla-100);
|
||||
border-radius: 3px;
|
||||
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.15);
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.kbar-command-palette__search {
|
||||
padding: 12px 16px;
|
||||
font-size: 13px;
|
||||
border: none;
|
||||
border-bottom: 1px solid var(--border-ink-200);
|
||||
color: var(--text-vanilla-100);
|
||||
outline: none;
|
||||
background-color: var(--bg-ink-500);
|
||||
}
|
||||
|
||||
.kbar-command-palette__section {
|
||||
padding: 8px 16px 4px;
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
color: var(--text-robin-500);
|
||||
font-family: 'Inter', sans-serif;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.02em;
|
||||
}
|
||||
|
||||
.kbar-command-palette__item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 10px 16px;
|
||||
font-size: 13px;
|
||||
cursor: pointer;
|
||||
transition: background 0.15s ease;
|
||||
}
|
||||
|
||||
.kbar-command-palette__item:hover,
|
||||
.kbar-command-palette__item--active {
|
||||
background: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.kbar-command-palette__icon {
|
||||
flex-shrink: 0;
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
color: #444;
|
||||
}
|
||||
|
||||
.kbar-command-palette__shortcut {
|
||||
margin-left: auto;
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.kbar-command-palette__key {
|
||||
padding: 2px 6px;
|
||||
font-size: 12px;
|
||||
border-radius: 4px;
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--text-vanilla-300);
|
||||
text-transform: uppercase;
|
||||
font-family: 'Space Mono', monospace;
|
||||
}
|
||||
|
||||
.kbar-command-palette__results-container {
|
||||
div {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
height: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-300);
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb:hover {
|
||||
background: var(--bg-slate-200);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.kbar-command-palette__positioner {
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
|
||||
.kbar-command-palette__card {
|
||||
background: #fff;
|
||||
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.15);
|
||||
}
|
||||
|
||||
.kbar-command-palette__search {
|
||||
border-bottom: 1px solid #e5e5e5;
|
||||
color: var(--text-ink-500);
|
||||
background-color: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.kbar-command-palette__item {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
|
||||
.kbar-command-palette__item:hover,
|
||||
.kbar-command-palette__item--active {
|
||||
background: #f5f5f5;
|
||||
}
|
||||
|
||||
.kbar-command-palette__icon {
|
||||
color: #444;
|
||||
}
|
||||
|
||||
.kbar-command-palette__key {
|
||||
background: #eee;
|
||||
color: #555;
|
||||
}
|
||||
|
||||
.kbar-command-palette__results-container {
|
||||
div {
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb:hover {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||