Compare commits

..

57 Commits

Author SHA1 Message Date
Shivanshu Raj Shrivastava
7ddf002d46 fix: update clickhouse quries
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
31cd192f24 fix: fix clickhouse query
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
3aacb99adc fix: update clickhouse queries
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
40ca9adbfc fix: update all latency and duration to milliseconds precision
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
bfa7a06e90 chore: sync with main
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
189046865a fix: minor fixes
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
d5ee6ca2c3 fix: minor fixes
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
62fb05ac5a fix: further improve clickhouse queries
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
5a3ed26f01 fix: improved clickhouse query performance
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
8e490e4089 fix: improved clickhouse query performance
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
d3adc319ad fix: improved clickhouse query performance
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
6b58e859b5 chore: updated to global inner join
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
ahmadshaheer
3d3a1eaaf2 chore: remove dev env check 2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
361640fd22 chore: fix typo
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
89de68f987 chore: tf testing
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
ahmadshaheer
71b098776b feat: add timestamp to funnel description payload and update mutation type 2025-06-02 12:17:02 +05:30
ahmadshaheer
6b3e2759a1 refactor: update funnel description endpoint from POST /save to PUT /{funnel_id} 2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
ea46639f59 feat: adds server and handler changes
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:17:02 +05:30
ahmadshaheer
69ed1b7d02 chore: remove the existing filters of a step on clicking replace button 2025-06-02 12:17:02 +05:30
ahmadshaheer
5b07705157 chore: temporarily hide latency pointer from funnel steps 2025-06-02 12:17:02 +05:30
ahmadshaheer
2fc8bb4585 fix: refetch funnel steps overview on clicking refresh 2025-06-02 12:17:02 +05:30
Shivanshu Raj Shrivastava
11d75940e8 chore: review comments
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
3caaa51e08 fix: update access control
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
cad3bf6883 chore: update unit test
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
5b7ce41d0d chore: update migration number
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
ee5120d4ed chore: restore routes
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
c249620e8f chore: fix unit tests
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
1c1811b216 chore: beautify
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
86d69f74f3 chore: remove save API endpoint
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
37b26a7116 chore: use errors package
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
26ad89ed70 chore: better error handling
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
94c7512a6a chore: beautify
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
333ff86a6b chore: update claims from context
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
a22d061ec1 fix: review comments and some changes
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:04 +05:30
Shivanshu Raj Shrivastava
22fdeb1381 fix: update funnel migration number
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
77b330cfe9 fix: minor fixes
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
78a5d7e39e fix: review comments
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
ea1f4e8253 fix: updated unit tests and mocks
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
5e0d6110b5 feat: trace funnel state management
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
14ce7f80e2 chore: fix error handling
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
f8341e8958 fix: optimize funnel creation by combining insert and update operations
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
7a7428d73e chore: added some improvements
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
77cd490e48 chore: update normalize funnel steps
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
9a96817a88 chore: fix naming convention
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
da9a2520a4 chore: fix package naming
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
c03bf9905c test: add more tests to utils
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
6676832c71 chore: add funnel validation while processing funnel steps
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
03e50d3bc3 chore: refactor handler and utils
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
95bc3987bb test: add trace funnel module tests
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
eb797edc53 test: add handler tests
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
c2d36480a2 test: add utility function tests
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
ab0d9918b2 chore: add utility functions
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
c364a3e695 feat: add db migrations
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
43337b6697 feat: db operations, module and handler implementation
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:03 +05:30
Shivanshu Raj Shrivastava
e258d70df5 feat: add required types for tracefunnels
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:02 +05:30
Shivanshu Raj Shrivastava
19ee5860cb feat: add tracefunnel module and handler
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:02 +05:30
Shivanshu Raj Shrivastava
235ea39d73 feat: adds server and handler changes
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 12:14:02 +05:30
843 changed files with 15049 additions and 106411 deletions

View File

@@ -40,7 +40,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.128.1
image: signoz/signoz-schema-migrator:v0.111.42
container_name: schema-migrator-sync
command:
- sync
@@ -53,7 +53,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.128.1
image: signoz/signoz-schema-migrator:v0.111.42
container_name: schema-migrator-async
command:
- async

7
.github/CODEOWNERS vendored
View File

@@ -11,10 +11,5 @@
/pkg/errors/ @grandwizard28
/pkg/factory/ @grandwizard28
/pkg/types/ @grandwizard28
.golangci.yml @grandwizard28
/pkg/zeus/ @vikrantgupta25
/pkg/licensing/ @vikrantgupta25
/pkg/sqlmigration/ @vikrantgupta25
/ee/zeus/ @vikrantgupta25
/ee/licensing/ @vikrantgupta25
/ee/sqlmigration/ @vikrantgupta25
.golangci.yml @grandwizard28

View File

@@ -74,8 +74,7 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.variant=community
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch

View File

@@ -108,8 +108,7 @@ jobs:
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch

View File

@@ -107,8 +107,7 @@ jobs:
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch

View File

@@ -22,7 +22,7 @@ jobs:
- 24.1.2-alpine
- 24.12-alpine
schema-migrator-version:
- v0.128.1
- v0.111.38
postgres-version:
- 15
if: |

View File

@@ -7,7 +7,6 @@ linters:
- sloglint
- depguard
- iface
- unparam
linters-settings:
sloglint:

View File

@@ -8,7 +8,7 @@
<p align="center">All your logs, metrics, and traces in one place. Monitor your application, spot issues before they occur and troubleshoot downtime quickly with rich context. SigNoz is a cost-effective open-source alternative to Datadog and New Relic. Visit <a href="https://signoz.io" target="_blank">signoz.io</a> for the full documentation, tutorials, and guide.</p>
<p align="center">
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/signoz.svg?label=Docker%20Downloads"> </a>
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/query-service?label=Docker Downloads"> </a>
<img alt="GitHub issues" src="https://img.shields.io/github/issues/signoz/signoz"> </a>
<a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNozHQ&hashtags=opensource,signoz,observability">
<img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a>
@@ -231,8 +231,6 @@ Not sure how to get started? Just ping us on `#contributing` in our [slack commu
- [Shaheer Kochai](https://github.com/ahmadshaheer)
- [Amlan Kumar Nandy](https://github.com/amlannandy)
- [Sahil Khan](https://github.com/sawhil)
- [Aditya Singh](https://github.com/aks07)
- [Abhi Kumar](https://github.com/ahrefabhi)
#### DevOps

View File

@@ -90,15 +90,6 @@ apiserver:
- /api/v1/version
- /
##################### Querier #####################
querier:
# The TTL for cached query results.
cache_ttl: 168h
# The interval for recent data that should not be cached.
flux_interval: 5m
# The maximum number of concurrent queries for missing ranges.
max_concurrent_queries: 4
##################### TelemetryStore #####################
telemetrystore:
# Maximum number of idle connections in the connection pool.
@@ -112,15 +103,13 @@ telemetrystore:
clickhouse:
# The DSN to use for clickhouse.
dsn: tcp://localhost:9000
# The cluster name to use for clickhouse.
cluster: cluster
# The query settings for clickhouse.
settings:
max_execution_time: 0
max_execution_time_leaf: 0
timeout_before_checking_execution_speed: 0
max_bytes_to_read: 0
max_result_rows: 0
max_result_rows_for_ch_query: 0
##################### Prometheus #####################
prometheus:
@@ -176,6 +165,12 @@ alertmanager:
# Retention of the notification logs.
retention: 120h
##################### Analytics #####################
analytics:
# Whether to enable analytics.
enabled: false
##################### Emailing #####################
emailing:
# Whether to enable emailing.
@@ -220,27 +215,3 @@ sharder:
single:
# The org id to which this instance belongs to.
org_id: org_id
##################### Analytics #####################
analytics:
# Whether to enable analytics.
enabled: false
segment:
# The key to use for segment.
key: ""
##################### StatsReporter #####################
statsreporter:
# Whether to enable stats reporter. This is used to provide valuable insights to the SigNoz team. It does not collect any sensitive/PII data.
enabled: true
# The interval at which the stats are collected.
interval: 6h
collect:
# Whether to collect identities and traits (emails).
identities: true
##################### Gateway (License only) #####################
gateway:
# The URL of the gateway's api.
url: http://localhost:8080

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.90.0
image: signoz/signoz:v0.85.3
command:
- --config=/root/config/prometheus.yml
ports:
@@ -194,7 +194,6 @@ services:
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-swarm
- SIGNOZ_JWT_SECRET=secret
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -207,7 +206,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.128.0
image: signoz/signoz-otel-collector:v0.111.42
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -231,7 +230,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.128.1
image: signoz/signoz-schema-migrator:v0.111.42
deploy:
restart_policy:
condition: on-failure

View File

@@ -100,32 +100,26 @@ services:
# - "9000:9000"
# - "8123:8123"
# - "9181:9181"
configs:
- source: clickhouse-config
target: /etc/clickhouse-server/config.xml
- source: clickhouse-users
target: /etc/clickhouse-server/users.xml
- source: clickhouse-custom-function
target: /etc/clickhouse-server/custom-function.xml
- source: clickhouse-cluster
target: /etc/clickhouse-server/config.d/cluster.xml
volumes:
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
- ../common/clickhouse/cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
- clickhouse:/var/lib/clickhouse/
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.90.0
image: signoz/signoz:v0.85.3
command:
- --config=/root/config/prometheus.yml
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port
volumes:
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
- ../common/dashboards:/root/config/dashboards
- sqlite:/var/lib/signoz/
configs:
- source: signoz-prometheus-config
target: /root/config/prometheus.yml
environment:
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
@@ -135,7 +129,6 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-swarm
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -148,17 +141,15 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.128.0
image: signoz/signoz-otel-collector:v0.111.42
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
- --copy-path=/var/tmp/collector-config.yaml
- --feature-gates=-pkg.translator.prometheus.NormalizeName
configs:
- source: otel-collector-config
target: /etc/otel-collector-config.yaml
- source: otel-manager-config
target: /etc/manager-config.yaml
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
- LOW_CARDINAL_EXCEPTION_GROUPING=false
@@ -174,7 +165,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.128.1
image: signoz/signoz-schema-migrator:v0.111.42
deploy:
restart_policy:
condition: on-failure
@@ -195,24 +186,3 @@ volumes:
name: signoz-sqlite
zookeeper-1:
name: signoz-zookeeper-1
configs:
clickhouse-config:
file: ../common/clickhouse/config.xml
clickhouse-users:
file: ../common/clickhouse/users.xml
clickhouse-custom-function:
file: ../common/clickhouse/custom-function.xml
clickhouse-cluster:
file: ../common/clickhouse/cluster.xml
signoz-prometheus-config:
file: ../common/signoz/prometheus.yml
# If you have multiple dashboard files, you can list them individually:
# dashboard-foo:
# file: ../common/dashboards/foo.json
# dashboard-bar:
# file: ../common/dashboards/bar.json
otel-collector-config:
file: ./otel-collector-config.yaml
otel-manager-config:
file: ../common/signoz/otel-collector-opamp-config.yaml

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: signozclickhousemetrics
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
@@ -60,16 +60,27 @@ exporters:
datasource: tcp://clickhouse:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
resource_to_telemetry_conversion:
enabled: true
disable_v2: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
@@ -81,11 +92,11 @@ service:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [clickhousemetricswrite, signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.90.0}
image: signoz/signoz:${VERSION:-v0.85.3}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -197,7 +197,6 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-standalone-amd
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -211,7 +210,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -237,7 +236,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-sync
command:
- sync
@@ -248,7 +247,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.90.0}
image: signoz/signoz:${VERSION:-v0.85.3}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -130,7 +130,6 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-standalone-amd
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -143,7 +142,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -165,7 +164,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-sync
command:
- sync
@@ -177,7 +176,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-async
command:
- async

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: signozclickhousemetrics
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
@@ -60,16 +60,27 @@ exporters:
datasource: tcp://clickhouse:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
@@ -81,11 +92,11 @@ service:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [clickhousemetricswrite, signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]

View File

@@ -1,51 +0,0 @@
# Endpoint
This guide outlines the recommended approach for designing endpoints, with a focus on entity relationships, RESTful structure, and examples from the codebase.
## How do we design an endpoint?
### Understand the core entities and their relationships
Start with understanding the core entities and their relationships. For example:
- **Organization**: an organization can have multiple users
### Structure Endpoints RESTfully
Endpoints should reflect the resource hierarchy and follow RESTful conventions. Use clear, **pluralized resource names** and versioning. For example:
- `POST /v1/organizations` — Create an organization
- `GET /v1/organizations/:id` — Get an organization by id
- `DELETE /v1/organizations/:id` — Delete an organization by id
- `PUT /v1/organizations/:id` — Update an organization by id
- `GET /v1/organizations/:id/users` — Get all users in an organization
- `GET /v1/organizations/me/users` — Get all users in my organization
Think in terms of resource navigation in a file system. For example, to find your organization, you would navigate to the root of the file system and then to the `organizations` directory. To find a user in an organization, you would navigate to the `organizations` directory and then to the `id` directory.
```bash
v1/
├── organizations/
│ └── 123/
│ └── users/
```
`me` endpoints are special. They are used to determine the actual id via some auth/external mechanism. For `me` endpoints, think of the `me` directory being symlinked to your organization directory. For example, if you are a part of the organization `123`, the `me` directory will be symlinked to `/v1/organizations/123`:
```bash
v1/
├── organizations/
│ └── me/ -> symlink to /v1/organizations/123
│ └── users/
│ └── 123/
│ └── users/
```
> 💡 **Note**: There are various ways to structure endpoints. Some prefer to use singular resource names instead of `me`. Others prefer to use singular resource names for all endpoints. We have, however, chosen to standardize our endpoints in the manner described above.
## What should I remember?
- Use clear, **plural resource names**
- Use `me` endpoints for determining the actual id via some auth mechanism
> 💡 **Note**: When in doubt, diagram the relationships and walk through the user flows as if navigating a file system. This will help you design endpoints that are both logical and user-friendly.

View File

@@ -1,106 +0,0 @@
# Provider
SigNoz is built on the provider pattern, a design approach where code is organized into providers that handle specific application responsibilities. Providers act as adapter components that integrate with external services and deliver required functionality to the application.
> 💡 **Note**: Coming from a DDD background? Providers are similar (not exactly the same) to adapter/infrastructure services.
## How to create a new provider?
To create a new provider, create a directory in the `pkg/` directory named after your provider. The provider package consists of four key components:
- **Interface** (`pkg/<name>/<name>.go`): Defines the provider's interface. Other packages should import this interface to use the provider.
- **Config** (`pkg/<name>/config.go`): Contains provider configuration, implementing the `factory.Config` interface from [factory/config.go](/pkg/factory/config.go).
- **Implementation** (`pkg/<name>/<implname><name>/provider.go`): Contains the provider implementation, including a `NewProvider` function that returns a `factory.Provider` interface from [factory/provider.go](/pkg/factory/provider.go).
- **Mock** (`pkg/<name>/<name>test.go`): Provides mocks for the provider, typically used by dependent packages for unit testing.
For example, the [prometheus](/pkg/prometheus) provider delivers a prometheus engine to the application:
- `pkg/prometheus/prometheus.go` - Interface definition
- `pkg/prometheus/config.go` - Configuration
- `pkg/prometheus/clickhouseprometheus/provider.go` - Clickhouse-powered implementation
- `pkg/prometheus/prometheustest/provider.go` - Mock implementation
## How to wire it up?
The `pkg/signoz` package contains the inversion of control container responsible for wiring providers. It handles instantiation, configuration, and assembly of providers based on configuration metadata.
> 💡 **Note**: Coming from a Java background? Providers are similar to Spring beans.
Wiring up a provider involves three steps:
1. Wiring up the configuration
Add your config from `pkg/<name>/config.go` to the `pkg/signoz/config.Config` struct and in new factories:
```go
type Config struct {
...
MyProvider myprovider.Config `mapstructure:"myprovider"`
...
}
func NewConfig(ctx context.Context, resolverConfig config.ResolverConfig, ....) (Config, error) {
...
configFactories := []factory.ConfigFactory{
myprovider.NewConfigFactory(),
}
...
}
```
2. Wiring up the provider
Add available provider implementations in `pkg/signoz/provider.go`:
```go
func NewMyProviderFactories() factory.NamedMap[factory.ProviderFactory[myprovider.MyProvider, myprovider.Config]] {
return factory.MustNewNamedMap(
myproviderone.NewFactory(),
myprovidertwo.NewFactory(),
)
}
```
3. Instantiate the provider by adding it to the `SigNoz` struct in `pkg/signoz/signoz.go`:
```go
type SigNoz struct {
...
MyProvider myprovider.MyProvider
...
}
func New(...) (*SigNoz, error) {
...
myprovider, err := myproviderone.New(ctx, settings, config.MyProvider, "one/two")
if err != nil {
return nil, err
}
...
}
```
## How to use it?
To use a provider, import its interface. For example, to use the prometheus provider, import `pkg/prometheus/prometheus.go`:
```go
import "github.com/SigNoz/signoz/pkg/prometheus/prometheus"
func CreateSomething(ctx context.Context, prometheus prometheus.Prometheus) {
...
prometheus.DoSomething()
...
}
```
## Why do we need this?
Like any dependency injection framework, providers decouple the codebase from implementation details. This is especially valuable in SigNoz's large codebase, where we need to swap implementations without changing dependent code. The provider pattern offers several benefits apart from the obvious one of decoupling:
- Configuration is **defined with each provider and centralized in one place**, making it easier to understand and manage through various methods (environment variables, config files, etc.)
- Provider mocking is **straightforward for unit testing**, with a consistent pattern for locating mocks
- **Multiple implementations** of the same provider are **supported**, as demonstrated by our sqlstore provider
## What should I remember?
- Use the provider pattern wherever applicable.
- Always create a provider **irrespective of the number of implementations**. This makes it easier to add new implementations in the future.

View File

@@ -16,7 +16,7 @@ __Table of Contents__
- [Prerequisites](#prerequisites-1)
- [Install Helm Repo and Charts](#install-helm-repo-and-charts)
- [Start the OpenTelemetry Demo App](#start-the-opentelemetry-demo-app-1)
- [Monitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes)
- [Moniitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes)
- [What's next](#whats-next)

View File

@@ -3,16 +3,16 @@ package httplicensing
import (
"context"
"encoding/json"
"github.com/SigNoz/signoz/ee/query-service/constants"
"time"
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
@@ -25,17 +25,16 @@ type provider struct {
config licensing.Config
settings factory.ScopedProviderSettings
orgGetter organization.Getter
analytics analytics.Analytics
stopChan chan struct{}
}
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
return New(ctx, providerSettings, config, store, zeus, orgGetter, analytics)
return New(ctx, providerSettings, config, store, zeus, orgGetter)
})
}
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) (licensing.Licensing, error) {
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) (licensing.Licensing, error) {
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing")
licensestore := sqllicensingstore.New(sqlstore)
return &provider{
@@ -45,7 +44,6 @@ func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Conf
settings: settings,
orgGetter: orgGetter,
stopChan: make(chan struct{}),
analytics: analytics,
}, nil
}
@@ -90,6 +88,13 @@ func (provider *provider) Validate(ctx context.Context) error {
}
}
if len(organizations) == 0 {
err = provider.InitFeatures(ctx, licensetypes.BasicPlan)
if err != nil {
return err
}
}
return nil
}
@@ -110,6 +115,11 @@ func (provider *provider) Activate(ctx context.Context, organizationID valuer.UU
return err
}
err = provider.InitFeatures(ctx, license.Features)
if err != nil {
return err
}
return nil
}
@@ -129,24 +139,28 @@ func (provider *provider) GetActive(ctx context.Context, organizationID valuer.U
func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUID) error {
activeLicense, err := provider.GetActive(ctx, organizationID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil
}
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
provider.settings.Logger().ErrorContext(ctx, "license validation failed", "org_id", organizationID.StringValue())
return err
}
if err != nil && errors.Ast(err, errors.TypeNotFound) {
provider.settings.Logger().DebugContext(ctx, "no active license found, defaulting to basic plan", "org_id", organizationID.StringValue())
err = provider.InitFeatures(ctx, licensetypes.BasicPlan)
if err != nil {
return err
}
return nil
}
data, err := provider.zeus.GetLicense(ctx, activeLicense.Key)
if err != nil {
if time.Since(activeLicense.LastValidatedAt) > time.Duration(provider.config.FailureThreshold)*provider.config.PollInterval {
activeLicense.UpdateFeatures(licensetypes.BasicPlan)
updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense)
err = provider.store.Update(ctx, organizationID, updatedStorableLicense)
provider.settings.Logger().ErrorContext(ctx, "license validation failed for consecutive poll intervals, defaulting to basic plan", "failure_threshold", provider.config.FailureThreshold, "license_id", activeLicense.ID.StringValue(), "org_id", organizationID.StringValue())
err = provider.InitFeatures(ctx, licensetypes.BasicPlan)
if err != nil {
return err
}
return nil
}
return err
@@ -163,25 +177,6 @@ func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUI
return err
}
stats := licensetypes.NewStatsFromLicense(activeLicense)
provider.analytics.Send(ctx,
analyticstypes.Track{
UserId: "stats_" + organizationID.String(),
Event: "License Updated",
Properties: analyticstypes.NewPropertiesFromMap(stats),
Context: &analyticstypes.Context{
Extra: map[string]interface{}{
analyticstypes.KeyGroupID: organizationID.String(),
},
},
},
analyticstypes.Group{
UserId: "stats_" + organizationID.String(),
GroupId: organizationID.String(),
Traits: analyticstypes.NewTraitsFromMap(stats),
},
)
return nil
}
@@ -223,27 +218,80 @@ func (provider *provider) Portal(ctx context.Context, organizationID valuer.UUID
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
}
func (provider *provider) GetFeatureFlags(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.Feature, error) {
license, err := provider.GetActive(ctx, organizationID)
// feature surrogate
func (provider *provider) CheckFeature(ctx context.Context, key string) error {
feature, err := provider.store.GetFeature(ctx, key)
if err != nil {
return err
}
if feature.Active {
return nil
}
return errors.Newf(errors.TypeUnsupported, licensing.ErrCodeFeatureUnavailable, "feature unavailable: %s", key)
}
func (provider *provider) GetFeatureFlag(ctx context.Context, key string) (*featuretypes.GettableFeature, error) {
featureStatus, err := provider.store.GetFeature(ctx, key)
if err != nil {
return nil, err
}
return &featuretypes.GettableFeature{
Name: featureStatus.Name,
Active: featureStatus.Active,
Usage: int64(featureStatus.Usage),
UsageLimit: int64(featureStatus.UsageLimit),
Route: featureStatus.Route,
}, nil
}
func (provider *provider) GetFeatureFlags(ctx context.Context) ([]*featuretypes.GettableFeature, error) {
storableFeatures, err := provider.store.GetAllFeatures(ctx)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return licensetypes.BasicPlan, nil
}
return nil, err
}
return license.Features, nil
}
func (provider *provider) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
activeLicense, err := provider.GetActive(ctx, orgID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return map[string]any{}, nil
gettableFeatures := make([]*featuretypes.GettableFeature, len(storableFeatures))
for idx, gettableFeature := range storableFeatures {
gettableFeatures[idx] = &featuretypes.GettableFeature{
Name: gettableFeature.Name,
Active: gettableFeature.Active,
Usage: int64(gettableFeature.Usage),
UsageLimit: int64(gettableFeature.UsageLimit),
Route: gettableFeature.Route,
}
return nil, err
}
return licensetypes.NewStatsFromLicense(activeLicense), nil
if constants.IsDotMetricsEnabled {
gettableFeatures = append(gettableFeatures, &featuretypes.GettableFeature{
Name: featuretypes.DotMetricsEnabled,
Active: true,
})
}
return gettableFeatures, nil
}
func (provider *provider) InitFeatures(ctx context.Context, features []*featuretypes.GettableFeature) error {
featureStatus := make([]*featuretypes.StorableFeature, len(features))
for i, f := range features {
featureStatus[i] = &featuretypes.StorableFeature{
Name: f.Name,
Active: f.Active,
Usage: int(f.Usage),
UsageLimit: int(f.UsageLimit),
Route: f.Route,
}
}
return provider.store.InitFeatures(ctx, featureStatus)
}
func (provider *provider) UpdateFeatureFlag(ctx context.Context, feature *featuretypes.GettableFeature) error {
return provider.store.UpdateFeature(ctx, &featuretypes.StorableFeature{
Name: feature.Name,
Active: feature.Active,
Usage: int(feature.Usage),
UsageLimit: int(feature.UsageLimit),
Route: feature.Route,
})
}

View File

@@ -5,6 +5,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -79,3 +80,81 @@ func (store *store) Update(ctx context.Context, organizationID valuer.UUID, stor
return nil
}
func (store *store) CreateFeature(ctx context.Context, storableFeature *featuretypes.StorableFeature) error {
_, err := store.
sqlstore.
BunDB().
NewInsert().
Model(storableFeature).
Exec(ctx)
if err != nil {
return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "feature with name:%s already exists", storableFeature.Name)
}
return nil
}
func (store *store) GetFeature(ctx context.Context, key string) (*featuretypes.StorableFeature, error) {
storableFeature := new(featuretypes.StorableFeature)
err := store.
sqlstore.
BunDB().
NewSelect().
Model(storableFeature).
Where("name = ?", key).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "feature with name:%s does not exist", key)
}
return storableFeature, nil
}
func (store *store) GetAllFeatures(ctx context.Context) ([]*featuretypes.StorableFeature, error) {
storableFeatures := make([]*featuretypes.StorableFeature, 0)
err := store.
sqlstore.
BunDB().
NewSelect().
Model(&storableFeatures).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "features do not exist")
}
return storableFeatures, nil
}
func (store *store) InitFeatures(ctx context.Context, storableFeatures []*featuretypes.StorableFeature) error {
_, err := store.
sqlstore.
BunDB().
NewInsert().
Model(&storableFeatures).
On("CONFLICT (name) DO UPDATE").
Set("active = EXCLUDED.active").
Set("usage = EXCLUDED.usage").
Set("usage_limit = EXCLUDED.usage_limit").
Set("route = EXCLUDED.route").
Exec(ctx)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to initialise features")
}
return nil
}
func (store *store) UpdateFeature(ctx context.Context, storableFeature *featuretypes.StorableFeature) error {
_, err := store.
sqlstore.
BunDB().
NewUpdate().
Model(storableFeature).
Exec(ctx)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to update feature with key: %s", storableFeature.Name)
}
return nil
}

View File

@@ -39,7 +39,6 @@ builds:
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
- >-
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
mod_timestamp: "{{ .CommitTimestamp }}"

View File

@@ -11,9 +11,11 @@ RUN apk update && \
COPY ./target/${OS}-${TARGETARCH}/signoz /root/signoz
COPY ./conf/prometheus.yml /root/config/prometheus.yml
COPY ./templates/email /root/templates
COPY frontend/build/ /etc/signoz/web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["./signoz"]
ENTRYPOINT ["./signoz"]
CMD ["-config", "/root/config/prometheus.yml"]

View File

@@ -12,9 +12,11 @@ RUN apk update && \
rm -rf /var/cache/apk/*
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
COPY ./conf/prometheus.yml /root/config/prometheus.yml
COPY ./templates/email /root/templates
COPY frontend/build/ /etc/signoz/web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["./signoz"]
CMD ["-config", "/root/config/prometheus.yml"]

View File

@@ -1,22 +1,22 @@
package api
import (
"context"
"net/http"
"net/http/httputil"
"time"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/interfaces"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -26,7 +26,8 @@ import (
)
type APIHandlerOptions struct {
DataConnector interfaces.Reader
DataConnector interfaces.DataConnector
PreferSpanMetrics bool
RulesManager *rules.Manager
UsageManager *usage.Manager
IntegrationsController *integrations.Controller
@@ -50,6 +51,7 @@ type APIHandler struct {
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
PreferSpanMetrics: opts.PreferSpanMetrics,
RuleManager: opts.RulesManager,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsController: opts.CloudIntegrationsController,
@@ -57,9 +59,8 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
FieldsAPI: fields.NewAPI(signoz.TelemetryStore, signoz.Instrumentation.Logger()),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier),
})
if err != nil {
@@ -85,12 +86,23 @@ func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
return ah.opts.Gateway
}
func (ah *APIHandler) CheckFeature(ctx context.Context, key string) bool {
err := ah.Signoz.Licensing.CheckFeature(ctx, key)
return err == nil
}
// RegisterRoutes registers routes for this handler on the given router
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// note: add ee override methods first
// routes available only in ee version
router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/featureFlags", am.OpenAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/loginPrecheck", am.OpenAccess(ah.Signoz.Handlers.User.LoginPrecheck)).Methods(http.MethodGet)
// invite
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.Signoz.Handlers.User.GetInvite)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/invite/accept", am.OpenAccess(ah.Signoz.Handlers.User.AcceptInvite)).Methods(http.MethodPost)
// paid plans specific routes
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost)
@@ -102,6 +114,9 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/dashboards/{uuid}/lock", am.EditAccess(ah.lockDashboard)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/dashboards/{uuid}/unlock", am.EditAccess(ah.unlockDashboard)).Methods(http.MethodPut)
// v3
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)

View File

@@ -96,7 +96,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
return
}
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email)
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
if err != nil {
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)

View File

@@ -0,0 +1,62 @@
package api
import (
"net/http"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/gorilla/mux"
)
func (ah *APIHandler) lockDashboard(w http.ResponseWriter, r *http.Request) {
ah.lockUnlockDashboard(w, r, true)
}
func (ah *APIHandler) unlockDashboard(w http.ResponseWriter, r *http.Request) {
ah.lockUnlockDashboard(w, r, false)
}
func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request, lock bool) {
// Locking can only be done by the owner of the dashboard
// or an admin
// - Fetch the dashboard
// - Check if the user is the owner or an admin
// - If yes, lock/unlock the dashboard
// - If no, return 403
// Get the dashboard UUID from the request
uuid := mux.Vars(r)["uuid"]
if strings.HasPrefix(uuid, "integration") {
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "dashboards created by integrations cannot be modified"))
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated"))
return
}
dashboard, err := ah.Signoz.Modules.Dashboard.Get(r.Context(), claims.OrgID, uuid)
if err != nil {
render.Error(w, err)
return
}
if err := claims.IsAdmin(); err != nil && (dashboard.CreatedBy != claims.Email) {
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "You are not authorized to lock/unlock this dashboard"))
return
}
// Lock/Unlock the dashboard
err = ah.Signoz.Modules.Dashboard.LockUnlock(r.Context(), claims.OrgID, uuid, lock)
if err != nil {
render.Error(w, err)
return
}
ah.Respond(w, "Dashboard updated successfully")
}

View File

@@ -12,7 +12,7 @@ import (
pkgError "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
@@ -31,7 +31,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
return
}
featureSet, err := ah.Signoz.Licensing.GetFeatureFlags(r.Context(), orgID)
featureSet, err := ah.Signoz.Licensing.GetFeatureFlags(r.Context())
if err != nil {
ah.HandleError(w, err, http.StatusInternalServerError)
return
@@ -59,17 +59,9 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
}
}
if constants.IsPreferSpanMetrics {
if ah.opts.PreferSpanMetrics {
for idx, feature := range featureSet {
if feature.Name == licensetypes.UseSpanMetrics {
featureSet[idx].Active = true
}
}
}
if constants.IsDotMetricsEnabled {
for idx, feature := range featureSet {
if feature.Name == licensetypes.DotMetricsEnabled {
if feature.Name == featuretypes.UseSpanMetrics {
featureSet[idx].Active = true
}
}
@@ -80,7 +72,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
// fetchZeusFeatures makes an HTTP GET request to the /zeusFeatures endpoint
// and returns the FeatureSet.
func fetchZeusFeatures(url, licenseKey string) ([]*licensetypes.Feature, error) {
func fetchZeusFeatures(url, licenseKey string) ([]*featuretypes.GettableFeature, error) {
// Check if the URL is empty
if url == "" {
return nil, fmt.Errorf("url is empty")
@@ -139,28 +131,28 @@ func fetchZeusFeatures(url, licenseKey string) ([]*licensetypes.Feature, error)
}
type ZeusFeaturesResponse struct {
Status string `json:"status"`
Data []*licensetypes.Feature `json:"data"`
Status string `json:"status"`
Data []*featuretypes.GettableFeature `json:"data"`
}
// MergeFeatureSets merges two FeatureSet arrays with precedence to zeusFeatures.
func MergeFeatureSets(zeusFeatures, internalFeatures []*licensetypes.Feature) []*licensetypes.Feature {
func MergeFeatureSets(zeusFeatures, internalFeatures []*featuretypes.GettableFeature) []*featuretypes.GettableFeature {
// Create a map to store the merged features
featureMap := make(map[string]*licensetypes.Feature)
featureMap := make(map[string]*featuretypes.GettableFeature)
// Add all features from the otherFeatures set to the map
for _, feature := range internalFeatures {
featureMap[feature.Name.StringValue()] = feature
featureMap[feature.Name] = feature
}
// Add all features from the zeusFeatures set to the map
// If a feature already exists (i.e., same name), the zeusFeature will overwrite it
for _, feature := range zeusFeatures {
featureMap[feature.Name.StringValue()] = feature
featureMap[feature.Name] = feature
}
// Convert the map back to a FeatureSet slice
var mergedFeatures []*licensetypes.Feature
var mergedFeatures []*featuretypes.GettableFeature
for _, feature := range featureMap {
mergedFeatures = append(mergedFeatures, feature)
}

View File

@@ -3,79 +3,78 @@ package api
import (
"testing"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/stretchr/testify/assert"
)
func TestMergeFeatureSets(t *testing.T) {
tests := []struct {
name string
zeusFeatures []*licensetypes.Feature
internalFeatures []*licensetypes.Feature
expected []*licensetypes.Feature
zeusFeatures []*featuretypes.GettableFeature
internalFeatures []*featuretypes.GettableFeature
expected []*featuretypes.GettableFeature
}{
{
name: "empty zeusFeatures and internalFeatures",
zeusFeatures: []*licensetypes.Feature{},
internalFeatures: []*licensetypes.Feature{},
expected: []*licensetypes.Feature{},
zeusFeatures: []*featuretypes.GettableFeature{},
internalFeatures: []*featuretypes.GettableFeature{},
expected: []*featuretypes.GettableFeature{},
},
{
name: "non-empty zeusFeatures and empty internalFeatures",
zeusFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
zeusFeatures: []*featuretypes.GettableFeature{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
internalFeatures: []*licensetypes.Feature{},
expected: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
internalFeatures: []*featuretypes.GettableFeature{},
expected: []*featuretypes.GettableFeature{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
},
{
name: "empty zeusFeatures and non-empty internalFeatures",
zeusFeatures: []*licensetypes.Feature{},
internalFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
zeusFeatures: []*featuretypes.GettableFeature{},
internalFeatures: []*featuretypes.GettableFeature{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
expected: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
expected: []*featuretypes.GettableFeature{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
},
{
name: "non-empty zeusFeatures and non-empty internalFeatures with no conflicts",
zeusFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature3"), Active: false},
zeusFeatures: []*featuretypes.GettableFeature{
{Name: "Feature1", Active: true},
{Name: "Feature3", Active: false},
},
internalFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature2"), Active: true},
{Name: valuer.NewString("Feature4"), Active: false},
internalFeatures: []*featuretypes.GettableFeature{
{Name: "Feature2", Active: true},
{Name: "Feature4", Active: false},
},
expected: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: true},
{Name: valuer.NewString("Feature3"), Active: false},
{Name: valuer.NewString("Feature4"), Active: false},
expected: []*featuretypes.GettableFeature{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: true},
{Name: "Feature3", Active: false},
{Name: "Feature4", Active: false},
},
},
{
name: "non-empty zeusFeatures and non-empty internalFeatures with conflicts",
zeusFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
zeusFeatures: []*featuretypes.GettableFeature{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
internalFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: false},
{Name: valuer.NewString("Feature3"), Active: true},
internalFeatures: []*featuretypes.GettableFeature{
{Name: "Feature1", Active: false},
{Name: "Feature3", Active: true},
},
expected: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
{Name: valuer.NewString("Feature3"), Active: true},
expected: []*featuretypes.GettableFeature{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
{Name: "Feature3", Active: true},
},
},
}

View File

@@ -0,0 +1,39 @@
package db
import (
"time"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/prometheus"
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
)
type ClickhouseReader struct {
conn clickhouse.Conn
appdb sqlstore.SQLStore
*basechr.ClickHouseReader
}
func NewDataConnector(
sqlDB sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus,
cluster string,
fluxIntervalForTraceDetail time.Duration,
cache cache.Cache,
) *ClickhouseReader {
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, fluxIntervalForTraceDetail, cache)
return &ClickhouseReader{
conn: telemetryStore.ClickhouseDB(),
appdb: sqlDB,
ClickHouseReader: chReader,
}
}
func (r *ClickhouseReader) GetSQLStore() sqlstore.SQLStore {
return r.appdb
}

View File

@@ -6,10 +6,14 @@ import (
"net"
"net/http"
_ "net/http/pprof" // http profiler
"time"
"github.com/gorilla/handlers"
"github.com/jmoiron/sqlx"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/app/db"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
@@ -28,7 +32,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
@@ -38,6 +41,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
@@ -55,55 +59,62 @@ type ServerOptions struct {
Jwt *authtypes.JWT
}
// Server runs HTTP, Mux and a grpc server
// Server runs HTTP api service
type Server struct {
config signoz.Config
signoz *signoz.SigNoz
jwt *authtypes.JWT
ruleManager *baserules.Manager
serverOptions *ServerOptions
ruleManager *baserules.Manager
// public http router
httpConn net.Listener
httpServer *http.Server
httpHostPort string
httpConn net.Listener
httpServer *http.Server
// private http
privateConn net.Listener
privateHTTP *http.Server
privateHostPort string
opampServer *opamp.Server
privateConn net.Listener
privateHTTP *http.Server
// Usage manager
usageManager *usage.Manager
opampServer *opamp.Server
unavailableChannel chan healthcheck.Status
}
// HealthCheckStatus returns health check status channel a client can subscribe to
func (s Server) HealthCheckStatus() chan healthcheck.Status {
return s.unavailableChannel
}
// NewServer creates and initializes Server
func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
func NewServer(serverOptions *ServerOptions) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
if err != nil {
return nil, err
}
reader := clickhouseReader.NewReader(
signoz.SQLStore,
signoz.TelemetryStore,
signoz.Prometheus,
signoz.TelemetryStore.Cluster(),
config.Querier.FluxInterval,
signoz.Cache,
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
if err != nil {
return nil, err
}
reader := db.NewDataConnector(
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
serverOptions.Cluster,
fluxIntervalForTraceDetail,
serverOptions.SigNoz.Cache,
)
rm, err := makeRulesManager(
serverOptions.SigNoz.SQLStore.SQLxDB(),
reader,
signoz.Cache,
signoz.Alertmanager,
signoz.SQLStore,
signoz.TelemetryStore,
signoz.Prometheus,
signoz.Modules.OrgGetter,
serverOptions.SigNoz.Cache,
serverOptions.SigNoz.Alertmanager,
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
serverOptions.SigNoz.Modules.OrgGetter,
)
if err != nil {
@@ -111,16 +122,19 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
}
// initiate opamp
opAmpModel.Init(signoz.SQLStore, signoz.Instrumentation.Logger(), signoz.Modules.OrgGetter)
_, err = opAmpModel.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB())
if err != nil {
return nil, err
}
integrationsController, err := integrations.NewController(signoz.SQLStore)
integrationsController, err := integrations.NewController(serverOptions.SigNoz.SQLStore)
if err != nil {
return nil, fmt.Errorf(
"couldn't create integrations controller: %w", err,
)
}
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
cloudIntegrationsController, err := cloudintegrations.NewController(serverOptions.SigNoz.SQLStore)
if err != nil {
return nil, fmt.Errorf(
"couldn't create cloud provider integrations controller: %w", err,
@@ -129,8 +143,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
// ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
signoz.SQLStore,
integrationsController.GetPipelinesForInstalledIntegrations,
serverOptions.SigNoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations,
)
if err != nil {
return nil, err
@@ -138,7 +151,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
// initiate agent config handler
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
Store: signoz.SQLStore,
DB: serverOptions.SigNoz.SQLStore.SQLxDB(),
AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController},
})
if err != nil {
@@ -146,7 +159,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
}
// start the usagemanager
usageManager, err := usage.New(signoz.Licensing, signoz.TelemetryStore.ClickhouseDB(), signoz.Zeus, signoz.Modules.OrgGetter)
usageManager, err := usage.New(serverOptions.SigNoz.Licensing, serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus, serverOptions.SigNoz.Modules.OrgGetter)
if err != nil {
return nil, err
}
@@ -155,36 +168,47 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
return nil, err
}
telemetry.GetInstance().SetReader(reader)
telemetry.GetInstance().SetSqlStore(serverOptions.SigNoz.SQLStore)
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
telemetry.GetInstance().SetSavedViewsInfoCallback(telemetry.GetSavedViewsInfo)
telemetry.GetInstance().SetAlertsInfoCallback(telemetry.GetAlertsInfo)
telemetry.GetInstance().SetGetUsersCallback(telemetry.GetUsers)
telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount)
telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo)
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
if err != nil {
return nil, err
}
apiOpts := api.APIHandlerOptions{
DataConnector: reader,
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
RulesManager: rm,
UsageManager: usageManager,
IntegrationsController: integrationsController,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
FluxInterval: fluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
JWT: jwt,
GatewayUrl: serverOptions.GatewayUrl,
JWT: serverOptions.Jwt,
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
apiHandler, err := api.NewAPIHandler(apiOpts, serverOptions.SigNoz)
if err != nil {
return nil, err
}
s := &Server{
config: config,
signoz: signoz,
jwt: jwt,
ruleManager: rm,
httpHostPort: baseconst.HTTPHostPort,
privateHostPort: baseconst.PrivateHostPort,
serverOptions: serverOptions,
unavailableChannel: make(chan healthcheck.Status),
usageManager: usageManager,
}
httpServer, err := s.createPublicServer(apiHandler, signoz.Web)
httpServer, err := s.createPublicServer(apiHandler, serverOptions.SigNoz.Web)
if err != nil {
return nil, err
@@ -200,28 +224,35 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
s.privateHTTP = privateServer
s.opampServer = opamp.InitializeServer(
&opAmpModel.AllAgents, agentConfMgr, signoz.Instrumentation,
&opAmpModel.AllAgents, agentConfMgr,
)
return s, nil
}
orgs, err := apiHandler.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background())
if err != nil {
return nil, err
}
for _, org := range orgs {
errorList := reader.PreloadMetricsMetadata(context.Background(), org.ID)
for _, er := range errorList {
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
}
}
// HealthCheckStatus returns health check status channel a client can subscribe to
func (s Server) HealthCheckStatus() chan healthcheck.Status {
return s.unavailableChannel
return s, nil
}
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
r := baseapp.NewRouter()
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
s.config.APIServer.Timeout.ExcludedRoutes,
s.config.APIServer.Timeout.Default,
s.config.APIServer.Timeout.Max,
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewAnalytics().Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterPrivateRoutes(r)
@@ -243,16 +274,17 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger())
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
s.config.APIServer.Timeout.ExcludedRoutes,
s.config.APIServer.Timeout.Default,
s.config.APIServer.Timeout.Max,
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewAnalytics().Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am)
@@ -262,7 +294,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)
apiHandler.RegisterQueryRangeV5Routes(r, am)
apiHandler.RegisterWebSocketPaths(r, am)
apiHandler.RegisterMessagingQueuesRoutes(r, am)
apiHandler.RegisterThirdPartyApiRoutes(r, am)
@@ -293,7 +324,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
func (s *Server) initListeners() error {
// listen on public port
var err error
publicHostPort := s.httpHostPort
publicHostPort := s.serverOptions.HTTPHostPort
if publicHostPort == "" {
return fmt.Errorf("baseconst.HTTPHostPort is required")
}
@@ -303,10 +334,10 @@ func (s *Server) initListeners() error {
return err
}
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
// listen on private port to support internal services
privateHostPort := s.privateHostPort
privateHostPort := s.serverOptions.PrivateHostPort
if privateHostPort == "" {
return fmt.Errorf("baseconst.PrivateHostPort is required")
@@ -316,7 +347,7 @@ func (s *Server) initListeners() error {
if err != nil {
return err
}
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.privateHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
return nil
}
@@ -336,7 +367,7 @@ func (s *Server) Start(ctx context.Context) error {
}
go func() {
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.httpHostPort))
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
switch err := s.httpServer.Serve(s.httpConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
@@ -362,7 +393,7 @@ func (s *Server) Start(ctx context.Context) error {
}
go func() {
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.privateHostPort))
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
switch err := s.privateHTTP.Serve(s.privateConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
@@ -414,6 +445,7 @@ func (s *Server) Stop(ctx context.Context) error {
}
func makeRulesManager(
db *sqlx.DB,
ch baseint.Reader,
cache cache.Cache,
alertmanager alertmanager.Alertmanager,
@@ -426,6 +458,7 @@ func makeRulesManager(
managerOpts := &baserules.ManagerOptions{
TelemetryStore: telemetryStore,
Prometheus: prometheus,
DBConn: db,
Context: context.Background(),
Logger: zap.L(),
Reader: ch,

View File

@@ -37,14 +37,9 @@ func GetDefaultSiteURL() string {
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
var IsDotMetricsEnabled = false
var IsPreferSpanMetrics = false
func init() {
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
IsDotMetricsEnabled = true
}
if GetOrDefaultEnv("USE_SPAN_METRICS", "false") == "true" {
IsPreferSpanMetrics = true
}
}

View File

@@ -0,0 +1,11 @@
package interfaces
import (
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
)
// Connector defines methods for interaction
// with o11y data. for example - clickhouse
type DataConnector interface {
baseint.Reader
}

View File

@@ -9,11 +9,9 @@ import (
"github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/ee/zeus"
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider"
"github.com/SigNoz/signoz/pkg/config/fileprovider"
@@ -22,7 +20,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/organization"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -104,14 +101,10 @@ func main() {
fileprovider.NewFactory(),
},
}, signoz.DeprecatedFlags{
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
Config: promConfigPath,
FluxInterval: fluxInterval,
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
Cluster: cluster,
GatewayUrl: gatewayUrl,
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
Config: promConfigPath,
})
if err != nil {
zap.L().Fatal("Failed to create config", zap.Error(err))
@@ -141,20 +134,12 @@ func main() {
zeus.Config(),
httpzeus.NewProviderFactory(),
licensing.Config(24*time.Hour, 3),
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics)
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter)
},
signoz.NewEmailingProviderFactories(),
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] {
existingFactories := signoz.NewSQLSchemaProviderFactories(sqlstore)
if err := existingFactories.Add(postgressqlschema.NewFactory(sqlstore)); err != nil {
zap.L().Fatal("Failed to add postgressqlschema factory", zap.Error(err))
}
return existingFactories
},
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
)
@@ -162,7 +147,20 @@ func main() {
zap.L().Fatal("Failed to create signoz", zap.Error(err))
}
server, err := app.NewServer(config, signoz, jwt)
serverOptions := &app.ServerOptions{
Config: config,
SigNoz: signoz,
HTTPHostPort: baseconst.HTTPHostPort,
PreferSpanMetrics: preferSpanMetrics,
PrivateHostPort: baseconst.PrivateHostPort,
FluxInterval: fluxInterval,
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
Cluster: cluster,
GatewayUrl: gatewayUrl,
Jwt: jwt,
}
server, err := app.NewServer(serverOptions)
if err != nil {
zap.L().Fatal("Failed to create server", zap.Error(err))
}

View File

@@ -1,36 +0,0 @@
package postgressqlschema
import (
"strings"
"github.com/SigNoz/signoz/pkg/sqlschema"
)
type Formatter struct {
sqlschema.Formatter
}
func (formatter Formatter) SQLDataTypeOf(dataType sqlschema.DataType) string {
if dataType == sqlschema.DataTypeTimestamp {
return "TIMESTAMPTZ"
}
return strings.ToUpper(dataType.String())
}
func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
switch strings.ToUpper(dataType) {
case "TIMESTAMPTZ", "TIMESTAMP", "TIMESTAMP WITHOUT TIME ZONE", "TIMESTAMP WITH TIME ZONE":
return sqlschema.DataTypeTimestamp
case "INT8":
return sqlschema.DataTypeBigInt
case "INT2", "INT4", "SMALLINT", "INTEGER":
return sqlschema.DataTypeInteger
case "BOOL", "BOOLEAN":
return sqlschema.DataTypeBoolean
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
return sqlschema.DataTypeText
}
return formatter.Formatter.DataTypeOf(dataType)
}

View File

@@ -1,285 +0,0 @@
package postgressqlschema
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
)
type provider struct {
settings factory.ScopedProviderSettings
fmter sqlschema.SQLFormatter
sqlstore sqlstore.SQLStore
operator sqlschema.SQLOperator
}
func NewFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config] {
return factory.NewProviderFactory(factory.MustNewName("postgres"), func(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config) (sqlschema.SQLSchema, error) {
return New(ctx, providerSettings, config, sqlstore)
})
}
func New(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config, sqlstore sqlstore.SQLStore) (sqlschema.SQLSchema, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/sqlschema/postgressqlschema")
fmter := Formatter{Formatter: sqlschema.NewFormatter(sqlstore.BunDB().Dialect())}
return &provider{
sqlstore: sqlstore,
fmter: fmter,
settings: settings,
operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{
DropConstraint: true,
ColumnIfNotExistsExists: true,
AlterColumnSetNotNull: true,
}),
}, nil
}
func (provider *provider) Formatter() sqlschema.SQLFormatter {
return provider.fmter
}
func (provider *provider) Operator() sqlschema.SQLOperator {
return provider.operator
}
func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) {
rows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
c.column_name,
c.is_nullable = 'YES',
c.udt_name,
c.column_default
FROM
information_schema.columns AS c
WHERE
c.table_name = ?`, string(tableName))
if err != nil {
return nil, nil, err
}
defer func() {
if err := rows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
columns := make([]*sqlschema.Column, 0)
for rows.Next() {
var (
name string
sqlDataType string
nullable bool
defaultVal *string
)
if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil {
return nil, nil, err
}
columnDefault := ""
if defaultVal != nil {
columnDefault = *defaultVal
}
columns = append(columns, &sqlschema.Column{
Name: sqlschema.ColumnName(name),
Nullable: nullable,
DataType: provider.fmter.DataTypeOf(sqlDataType),
Default: columnDefault,
})
}
constraintsRows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
c.column_name,
constraint_name,
constraint_type
FROM
information_schema.table_constraints tc
JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_catalog, table_name, constraint_name)
JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema AND tc.table_name = c.table_name AND ccu.column_name = c.column_name
WHERE
c.table_name = ?`, string(tableName))
if err != nil {
return nil, nil, err
}
defer func() {
if err := constraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
var primaryKeyConstraint *sqlschema.PrimaryKeyConstraint
uniqueConstraintsMap := make(map[string]*sqlschema.UniqueConstraint)
for constraintsRows.Next() {
var (
name string
constraintName string
constraintType string
)
if err := constraintsRows.Scan(&name, &constraintName, &constraintType); err != nil {
return nil, nil, err
}
if constraintType == "PRIMARY KEY" {
if primaryKeyConstraint == nil {
primaryKeyConstraint = (&sqlschema.PrimaryKeyConstraint{
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)},
}).Named(constraintName).(*sqlschema.PrimaryKeyConstraint)
} else {
primaryKeyConstraint.ColumnNames = append(primaryKeyConstraint.ColumnNames, sqlschema.ColumnName(name))
}
}
if constraintType == "UNIQUE" {
if _, ok := uniqueConstraintsMap[constraintName]; !ok {
uniqueConstraintsMap[constraintName] = (&sqlschema.UniqueConstraint{
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)},
}).Named(constraintName).(*sqlschema.UniqueConstraint)
} else {
uniqueConstraintsMap[constraintName].ColumnNames = append(uniqueConstraintsMap[constraintName].ColumnNames, sqlschema.ColumnName(name))
}
}
}
foreignKeyConstraintsRows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
tc.constraint_name,
kcu.table_name AS referencing_table,
kcu.column_name AS referencing_column,
ccu.table_name AS referenced_table,
ccu.column_name AS referenced_column
FROM
information_schema.key_column_usage kcu
JOIN information_schema.table_constraints tc ON kcu.constraint_name = tc.constraint_name AND kcu.table_schema = tc.table_schema
JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name AND ccu.table_schema = tc.table_schema
WHERE
tc.constraint_type = ?
AND kcu.table_name = ?`, "FOREIGN KEY", string(tableName))
if err != nil {
return nil, nil, err
}
defer func() {
if err := foreignKeyConstraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
foreignKeyConstraints := make([]*sqlschema.ForeignKeyConstraint, 0)
for foreignKeyConstraintsRows.Next() {
var (
constraintName string
referencingTable string
referencingColumn string
referencedTable string
referencedColumn string
)
if err := foreignKeyConstraintsRows.Scan(&constraintName, &referencingTable, &referencingColumn, &referencedTable, &referencedColumn); err != nil {
return nil, nil, err
}
foreignKeyConstraints = append(foreignKeyConstraints, (&sqlschema.ForeignKeyConstraint{
ReferencingColumnName: sqlschema.ColumnName(referencingColumn),
ReferencedTableName: sqlschema.TableName(referencedTable),
ReferencedColumnName: sqlschema.ColumnName(referencedColumn),
}).Named(constraintName).(*sqlschema.ForeignKeyConstraint))
}
uniqueConstraints := make([]*sqlschema.UniqueConstraint, 0)
for _, uniqueConstraint := range uniqueConstraintsMap {
uniqueConstraints = append(uniqueConstraints, uniqueConstraint)
}
return &sqlschema.Table{
Name: tableName,
Columns: columns,
PrimaryKeyConstraint: primaryKeyConstraint,
ForeignKeyConstraints: foreignKeyConstraints,
}, uniqueConstraints, nil
}
func (provider *provider) GetIndices(ctx context.Context, name sqlschema.TableName) ([]sqlschema.Index, error) {
rows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
ct.relname AS table_name,
ci.relname AS index_name,
i.indisunique AS unique,
i.indisprimary AS primary,
a.attname AS column_name
FROM
pg_index i
LEFT JOIN pg_class ct ON ct.oid = i.indrelid
LEFT JOIN pg_class ci ON ci.oid = i.indexrelid
LEFT JOIN pg_attribute a ON a.attrelid = ct.oid
LEFT JOIN pg_constraint con ON con.conindid = i.indexrelid
WHERE
a.attnum = ANY(i.indkey)
AND con.oid IS NULL
AND ct.relkind = 'r'
AND ct.relname = ?`, string(name))
if err != nil {
return nil, err
}
defer func() {
if err := rows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
uniqueIndicesMap := make(map[string]*sqlschema.UniqueIndex)
for rows.Next() {
var (
tableName string
indexName string
unique bool
primary bool
columnName string
)
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName); err != nil {
return nil, err
}
if unique {
if _, ok := uniqueIndicesMap[indexName]; !ok {
uniqueIndicesMap[indexName] = &sqlschema.UniqueIndex{
TableName: name,
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
}
} else {
uniqueIndicesMap[indexName].ColumnNames = append(uniqueIndicesMap[indexName].ColumnNames, sqlschema.ColumnName(columnName))
}
}
}
indices := make([]sqlschema.Index, 0)
for _, index := range uniqueIndicesMap {
indices = append(indices, index)
}
return indices, nil
}
func (provider *provider) ToggleFKEnforcement(_ context.Context, _ bun.IDB, _ bool) error {
return nil
}

View File

@@ -17,21 +17,19 @@ var (
)
var (
Org = "org"
User = "user"
UserNoCascade = "user_no_cascade"
FactorPassword = "factor_password"
CloudIntegration = "cloud_integration"
AgentConfigVersion = "agent_config_version"
Org = "org"
User = "user"
UserNoCascade = "user_no_cascade"
FactorPassword = "factor_password"
CloudIntegration = "cloud_integration"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
AgentConfigVersionReference = `("version_id") REFERENCES "agent_config_version" ("id")`
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
)
type dialect struct{}
@@ -276,8 +274,6 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, FactorPasswordReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
} else if reference == AgentConfigVersion && !slices.Contains(fkReferences, AgentConfigVersionReference) {
fkReferences = append(fkReferences, AgentConfigVersionReference)
}
}

View File

@@ -10,6 +10,7 @@ import (
"github.com/jackc/pgx/v5/pgconn"
"github.com/jackc/pgx/v5/pgxpool"
"github.com/jackc/pgx/v5/stdlib"
"github.com/jmoiron/sqlx"
"github.com/uptrace/bun"
"github.com/uptrace/bun/dialect/pgdialect"
)
@@ -18,6 +19,7 @@ type provider struct {
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
sqlxdb *sqlx.DB
dialect *dialect
}
@@ -59,6 +61,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
settings: settings,
sqldb: sqldb,
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
sqlxdb: sqlx.NewDb(sqldb, "postgres"),
dialect: new(dialect),
}, nil
}
@@ -71,6 +74,10 @@ func (provider *provider) SQLDB() *sql.DB {
return provider.sqldb
}
func (provider *provider) SQLxDB() *sqlx.DB {
return provider.sqlxdb
}
func (provider *provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}

View File

@@ -78,7 +78,7 @@
"fontfaceobserver": "2.3.0",
"history": "4.10.1",
"html-webpack-plugin": "5.5.0",
"http-proxy-middleware": "3.0.5",
"http-proxy-middleware": "3.0.3",
"http-status-codes": "2.3.0",
"i18next": "^21.6.12",
"i18next-browser-languagedetector": "^6.1.3",
@@ -134,7 +134,7 @@
"uuid": "^8.3.2",
"web-vitals": "^0.2.4",
"webpack": "5.94.0",
"webpack-dev-server": "^5.2.1",
"webpack-dev-server": "^4.15.2",
"webpack-retry-chunk-load-plugin": "3.1.1",
"xstate": "^4.31.0"
},
@@ -197,6 +197,7 @@
"babel-plugin-styled-components": "^1.12.0",
"compression-webpack-plugin": "9.0.0",
"copy-webpack-plugin": "^11.0.0",
"critters-webpack-plugin": "^3.0.1",
"eslint": "^7.32.0",
"eslint-config-airbnb": "^19.0.4",
"eslint-config-airbnb-typescript": "^16.1.4",
@@ -213,9 +214,7 @@
"eslint-plugin-simple-import-sort": "^7.0.0",
"eslint-plugin-sonarjs": "^0.12.0",
"husky": "^7.0.4",
"image-minimizer-webpack-plugin": "^4.0.0",
"imagemin": "^8.0.1",
"imagemin-svgo": "^10.0.1",
"image-webpack-loader": "8.1.0",
"is-ci": "^3.0.1",
"jest-styled-components": "^7.0.8",
"lint-staged": "^12.5.0",
@@ -232,12 +231,11 @@
"redux-mock-store": "1.5.4",
"sass": "1.66.1",
"sass-loader": "13.3.2",
"sharp": "^0.33.4",
"ts-jest": "^27.1.5",
"ts-node": "^10.2.1",
"typescript-plugin-css-modules": "5.0.1",
"webpack-bundle-analyzer": "^4.5.0",
"webpack-cli": "^5.1.4"
"webpack-cli": "^4.9.2"
},
"lint-staged": {
"*.(js|jsx|ts|tsx)": [
@@ -253,11 +251,10 @@
"xml2js": "0.5.0",
"phin": "^3.7.1",
"body-parser": "1.20.3",
"http-proxy-middleware": "3.0.5",
"http-proxy-middleware": "3.0.3",
"cross-spawn": "7.0.5",
"cookie": "^0.7.1",
"serialize-javascript": "6.0.2",
"prismjs": "1.30.0",
"got": "11.8.5"
"prismjs": "1.30.0"
}
}

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 6.1 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 18"><defs><linearGradient id="a" x1="2.59" y1="10.16" x2="15.41" y2="10.16" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#005ba1"/><stop offset=".07" stop-color="#0060a9"/><stop offset=".36" stop-color="#0071c8"/><stop offset=".52" stop-color="#0078d4"/><stop offset=".64" stop-color="#0074cd"/><stop offset=".82" stop-color="#006abb"/><stop offset="1" stop-color="#005ba1"/></linearGradient></defs><path d="M9 5.14c-3.54 0-6.41-1-6.41-2.32v12.36c0 1.27 2.82 2.3 6.32 2.32H9c3.54 0 6.41-1 6.41-2.32V2.82c0 1.29-2.87 2.32-6.41 2.32z" fill="url(#a)"/><path d="M15.41 2.82c0 1.29-2.87 2.32-6.41 2.32s-6.41-1-6.41-2.32S5.46.5 9 .5s6.41 1 6.41 2.32" fill="#e8e8e8"/><path d="M13.92 2.63c0 .82-2.21 1.48-4.92 1.48s-4.92-.66-4.92-1.48S6.29 1.16 9 1.16s4.92.66 4.92 1.47" fill="#50e6ff"/><path d="M9 3a11.55 11.55 0 00-3.89.57A11.42 11.42 0 009 4.11a11.15 11.15 0 003.89-.58A11.84 11.84 0 009 3z" fill="#198ab3"/><path d="M12.64 9v1.63h-1a.39.39 0 01-.29-.14V9H10v1.78a.92.92 0 001 .89h1.49l.26-.13s-.11.41-.26.43h-2.38v1h2.66A1.21 1.21 0 0014 11.7V9zM9.53 9v-.49a.7.7 0 00-.48-.77 1.74 1.74 0 00-.5-.08.94.94 0 00-.91.58l-.78 1.9-1-1.9A.93.93 0 005 7.66a1.44 1.44 0 00-.51.09c-.35.11-.43.34-.43.73v3.31h1.17V9.56l.63 1.57a1.08 1.08 0 001 .66c.44 0 .62-.26.8-.66l.67-1.51v2.15h1.18V9z" fill="#f2f2f2"/></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32" width="64" height="64"><path d="M8.16 23h21.177v-5.86l-4.023-2.307-.694-.3-16.46.113z" fill="#fff"/><path d="M22.012 22.222c.197-.675.122-1.294-.206-1.754-.3-.422-.807-.666-1.416-.694l-11.545-.15c-.075 0-.14-.038-.178-.094s-.047-.13-.028-.206c.038-.113.15-.197.272-.206l11.648-.15c1.38-.066 2.88-1.182 3.404-2.55l.666-1.735a.38.38 0 0 0 .02-.225c-.75-3.395-3.78-5.927-7.4-5.927-3.34 0-6.17 2.157-7.184 5.15-.657-.488-1.5-.75-2.392-.666-1.604.16-2.9 1.444-3.048 3.048a3.58 3.58 0 0 0 .084 1.191A4.84 4.84 0 0 0 0 22.1c0 .234.02.47.047.703.02.113.113.197.225.197H21.58a.29.29 0 0 0 .272-.206l.16-.572z" fill="#f38020"/><path d="M25.688 14.803l-.32.01c-.075 0-.14.056-.17.13l-.45 1.566c-.197.675-.122 1.294.206 1.754.3.422.807.666 1.416.694l2.457.15c.075 0 .14.038.178.094s.047.14.028.206c-.038.113-.15.197-.272.206l-2.56.15c-1.388.066-2.88 1.182-3.404 2.55l-.188.478c-.038.094.028.188.13.188h8.797a.23.23 0 0 0 .225-.169A6.41 6.41 0 0 0 32 21.106a6.32 6.32 0 0 0-6.312-6.302" fill="#faae40"/></svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 6.2 KiB

View File

@@ -1,2 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 256 256" xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMinYMin meet"><path d="M255.96 134.393c0-21.521-13.373-40.117-33.223-47.43a75.239 75.239 0 0 0 1.253-13.791c0-39.909-32.386-72.295-72.295-72.295-23.193 0-44.923 11.074-58.505 30.088-6.686-5.224-14.835-7.94-23.402-7.94-21.104 0-38.446 17.133-38.446 38.446 0 4.597.836 9.194 2.298 13.373C13.582 81.739 0 100.962 0 122.274c0 21.522 13.373 40.327 33.431 47.64-.835 4.388-1.253 8.985-1.253 13.79 0 39.7 32.386 72.087 72.086 72.087 23.402 0 44.924-11.283 58.505-30.088 6.686 5.223 15.044 8.149 23.611 8.149 21.104 0 38.446-17.134 38.446-38.446 0-4.597-.836-9.194-2.298-13.373 19.64-7.104 33.431-26.327 33.431-47.64z" fill="#FFF"/><path d="M100.085 110.364l57.043 26.119 57.669-50.565a64.312 64.312 0 0 0 1.253-12.746c0-35.52-28.834-64.355-64.355-64.355-21.313 0-41.162 10.447-53.072 27.998l-9.612 49.73 11.074 23.82z" fill="#F4BD19"/><path d="M40.953 170.75c-.835 4.179-1.253 8.567-1.253 12.955 0 35.52 29.043 64.564 64.564 64.564 21.522 0 41.372-10.656 53.49-28.208l9.403-49.729-12.746-24.238-57.251-26.118-56.207 50.774z" fill="#3CBEB1"/><path d="M40.536 71.918l39.073 9.194 8.775-44.506c-5.432-4.179-11.91-6.268-18.805-6.268-16.925 0-30.924 13.79-30.924 30.924 0 3.552.627 7.313 1.88 10.656z" fill="#E9478C"/><path d="M37.192 81.32c-17.551 5.642-29.67 22.567-29.67 40.954 0 17.97 11.074 34.059 27.79 40.327l54.953-49.73-10.03-21.52-43.043-10.03z" fill="#2C458F"/><path d="M167.784 219.852c5.432 4.18 11.91 6.478 18.596 6.478 16.925 0 30.924-13.79 30.924-30.924 0-3.761-.627-7.314-1.88-10.657l-39.073-9.193-8.567 44.296z" fill="#95C63D"/><path d="M175.724 165.317l43.043 10.03c17.551-5.85 29.67-22.566 29.67-40.954 0-17.97-11.074-33.849-27.79-40.326l-56.415 49.311 11.492 21.94z" fill="#176655"/></svg>

Before

Width:  |  Height:  |  Size: 1.9 KiB

View File

@@ -1,19 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg version="1.1" baseProfile="tiny" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
width="800px" height="800px" viewBox="0 0 24 24" overflow="visible" xml:space="preserve">
<g >
<rect y="0" fill="none" width="24" height="24"/>
<g transform="translate(1.000000, 8.000000)">
<path fill-rule="evenodd" fill="#5C85DE" d="M2-1.9c-1.1,0-2.3,1.1-2.3,2.2V10H2V5.5h2.2V10h2.2V0.3c0-1.1-1.1-2.2-2.3-2.2H2
L2-1.9z M2,3.2v-3h2.2v3H2L2,3.2z"/>
<path fill-rule="evenodd" fill="#5C85DE" d="M10.3-2C9.1-2,8-0.9,8,0.2V10l2.2,0V5.5h2.2c1.1,0,2.3-1.1,2.3-2.2l0-3
c0-1.1-1.1-2.2-2.3-2.2H10.3L10.3-2z M10.2,3.2v-3h2.2v3H10.2L10.2,3.2z"/>
<polygon fill-rule="evenodd" fill="#5C85DE" points="18.5,0.3 18.5,7.8 16.2,7.8 16.2,10 23,10 23,7.8 20.8,7.8 20.8,0.3 23,0.3
23,-1.9 16.2,-1.9 16.2,0.3 "/>
<polygon fill-rule="evenodd" fill="#3367D6" points="2,5.5 2,3.2 3.5,3.2 "/>
<polygon fill-rule="evenodd" fill="#3367D6" points="10.2,5.5 10.2,3.2 11.5,3.2 "/>
<polygon fill-rule="evenodd" fill="#3367D6" points="18.5,1.8 18.5,1.8 18.5,0.3 20.8,0.3 "/>
</g>
</g>

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill="#2088ff" d="M26.666 0C11.97 0 0 11.97 0 26.666c0 12.87 9.181 23.651 21.334 26.13v37.87c0 11.77 9.68 21.334 21.332 21.334h.195c1.302 9.023 9.1 16 18.473 16C71.612 128 80 119.612 80 109.334s-8.388-18.668-18.666-18.668c-9.372 0-17.17 6.977-18.473 16h-.195c-8.737 0-16-7.152-16-16V63.779a18.514 18.514 0 0 0 13.24 5.555h2.955c1.303 9.023 9.1 16 18.473 16 9.372 0 17.169-6.977 18.47-16h11.057c1.303 9.023 9.1 16 18.473 16 10.278 0 18.666-8.39 18.666-18.668C128 56.388 119.612 48 109.334 48c-9.373 0-17.171 6.977-18.473 16H79.805c-1.301-9.023-9.098-16-18.471-16s-17.171 6.977-18.473 16h-2.955c-6.433 0-11.793-4.589-12.988-10.672 14.58-.136 26.416-12.05 26.416-26.662C53.334 11.97 41.362 0 26.666 0zm0 5.334A21.292 21.292 0 0 1 48 26.666 21.294 21.294 0 0 1 26.666 48 21.292 21.292 0 0 1 5.334 26.666 21.29 21.29 0 0 1 26.666 5.334zm-5.215 7.541C18.67 12.889 16 15.123 16 18.166v17.043c0 4.043 4.709 6.663 8.145 4.533l13.634-8.455c3.257-2.02 3.274-7.002.032-9.045l-13.635-8.59a5.024 5.024 0 0 0-2.725-.777zm-.117 5.291 13.635 8.588-13.635 8.455V18.166zm40 35.168a13.29 13.29 0 0 1 13.332 13.332A13.293 13.293 0 0 1 61.334 80 13.294 13.294 0 0 1 48 66.666a13.293 13.293 0 0 1 13.334-13.332zm48 0a13.29 13.29 0 0 1 13.332 13.332A13.293 13.293 0 0 1 109.334 80 13.294 13.294 0 0 1 96 66.666a13.293 13.293 0 0 1 13.334-13.332zm-42.568 6.951a2.667 2.667 0 0 0-1.887.78l-6.3 6.294-2.093-2.084a2.667 2.667 0 0 0-3.771.006 2.667 2.667 0 0 0 .008 3.772l3.974 3.96a2.667 2.667 0 0 0 3.766-.001l8.185-8.174a2.667 2.667 0 0 0 .002-3.772 2.667 2.667 0 0 0-1.884-.78zm48 0a2.667 2.667 0 0 0-1.887.78l-6.3 6.294-2.093-2.084a2.667 2.667 0 0 0-3.771.006 2.667 2.667 0 0 0 .008 3.772l3.974 3.96a2.667 2.667 0 0 0 3.766-.001l8.185-8.174a2.667 2.667 0 0 0 .002-3.772 2.667 2.667 0 0 0-1.884-.78zM61.334 96a13.293 13.293 0 0 1 13.332 13.334 13.29 13.29 0 0 1-13.332 13.332A13.293 13.293 0 0 1 48 109.334 13.294 13.294 0 0 1 61.334 96zM56 105.334c-2.193 0-4 1.807-4 4 0 2.195 1.808 4 4 4s4-1.805 4-4c0-2.193-1.807-4-4-4zm10.666 0c-2.193 0-4 1.807-4 4 0 2.195 1.808 4 4 4s4-1.805 4-4c0-2.193-1.807-4-4-4zM56 108c.75 0 1.334.585 1.334 1.334 0 .753-.583 1.332-1.334 1.332-.75 0-1.334-.58-1.334-1.332 0-.75.585-1.334 1.334-1.334zm10.666 0c.75 0 1.334.585 1.334 1.334 0 .753-.583 1.332-1.334 1.332-.75 0-1.332-.58-1.332-1.332 0-.75.583-1.334 1.332-1.334z"/><path fill="#79b8ff" d="M109.334 90.666c-9.383 0-17.188 6.993-18.477 16.031a2.667 2.667 0 0 0-.265-.011l-2.7.09a2.667 2.667 0 0 0-2.578 2.751 2.667 2.667 0 0 0 2.752 2.578l2.7-.087a2.667 2.667 0 0 0 .097-.006C92.17 121.029 99.965 128 109.334 128c10.278 0 18.666-8.388 18.666-18.666s-8.388-18.668-18.666-18.668zm0 5.334a13.293 13.293 0 0 1 13.332 13.334 13.29 13.29 0 0 1-13.332 13.332A13.293 13.293 0 0 1 96 109.334 13.294 13.294 0 0 1 109.334 96z"/></svg>

Before

Width:  |  Height:  |  Size: 2.8 KiB

View File

@@ -1,5 +0,0 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g id="lucide/github">
<path id="Vector" d="M15 22V18C15.1391 16.7473 14.7799 15.4901 14 14.5C17 14.5 20 12.5 20 9C20.08 7.75 19.73 6.52 19 5.5C19.28 4.35 19.28 3.15 19 2C19 2 18 2 16 3.5C13.36 3 10.64 3 8 3.5C6 2 5 2 5 2C4.7 3.15 4.7 4.35 5 5.5C4.27187 6.51588 3.91847 7.75279 4 9C4 12.5 7 14.5 10 14.5C9.61 14.99 9.32 15.55 9.15 16.15C8.98 16.75 8.93 17.38 9 18M9 18V22M9 18C4.49 20 4 16 2 16" stroke="#C0C1C3" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 587 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 373.71 200"><defs><style type="text/css">.cls-1{fill:#008ec7;}.cls-2{fill:#005b9b;}.cls-3{fill:#fff;}</style></defs><title>IETF-Badge-HTTP</title><g id="Layer_2"><path class="cls-1" d="M326,0H47.73L0,100,47.73,200H326l47.73-100ZM310.05,183.36H58.22L18.43,100,58.22,16.64H310.05L349.84,100Z"/><polygon class="cls-2" points="349.84 100.01 310.05 183.37 58.22 183.37 18.43 100.01 58.22 16.64 310.05 16.64 349.84 100.01"/><path class="cls-3" d="M128.05,71.89v59.53H114.27V107h-27v24.41H73.46V71.89H87.23V95.36h27V71.89Z"/><path class="cls-3" d="M154.5,83.12H135.45V71.89h51.87V83.12h-19v48.3H154.5Z"/><path class="cls-3" d="M207.9,83.12H188.85V71.89h51.87V83.12H221.67v48.3H207.9Z"/><path class="cls-3" d="M287.62,74.53a20.45,20.45,0,0,1,9,7.48,20.67,20.67,0,0,1,3.14,11.48,20.73,20.73,0,0,1-3.14,11.44,20.06,20.06,0,0,1-9,7.48A33.55,33.55,0,0,1,273.88,115h-12v16.42H248.12V71.89h25.76A33.05,33.05,0,0,1,287.62,74.53Zm-5.06,26.57a9.33,9.33,0,0,0,3.23-7.61c0-3.34-1.08-5.91-3.23-7.69s-5.3-2.68-9.44-2.68H261.89v20.66h11.23Q279.33,103.78,282.56,101.1Z"/></g></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -1 +0,0 @@
<svg viewBox="0 0 832.8 959.8" xmlns="http://www.w3.org/2000/svg" width="2169" height="2500"><path d="M672.6 332.3l160.2-92.4v480L416.4 959.8V775.2l256.2-147.6z" fill="#00ac69"/><path d="M416.4 184.6L160.2 332.3 0 239.9 416.4 0l416.4 239.9-160.2 92.4z" fill="#1ce783"/><path d="M256.2 572.3L0 424.6V239.9l416.4 240v479.9l-160.2-92.2z" fill="#1d252c"/></svg>

Before

Width:  |  Height:  |  Size: 357 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 48 48"><path fill="#fff" d="M44.559 19.646a11.957 11.957 0 0 0-1.028-9.822 12.094 12.094 0 0 0-13.026-5.802A11.962 11.962 0 0 0 21.485 0 12.097 12.097 0 0 0 9.95 8.373a11.964 11.964 0 0 0-7.997 5.8A12.097 12.097 0 0 0 3.44 28.356a11.957 11.957 0 0 0 1.028 9.822 12.094 12.094 0 0 0 13.026 5.802 11.953 11.953 0 0 0 9.02 4.02 12.096 12.096 0 0 0 11.54-8.379 11.964 11.964 0 0 0 7.997-5.8 12.099 12.099 0 0 0-1.491-14.177zM26.517 44.863a8.966 8.966 0 0 1-5.759-2.082 6.85 6.85 0 0 0 .284-.16L30.6 37.1c.49-.278.79-.799.786-1.361V22.265l4.04 2.332a.141.141 0 0 1 .078.111v11.16a9.006 9.006 0 0 1-8.987 8.995zM7.191 36.608a8.957 8.957 0 0 1-1.073-6.027c.071.042.195.119.284.17l9.558 5.52a1.556 1.556 0 0 0 1.57 0l11.67-6.738v4.665a.15.15 0 0 1-.057.124l-9.662 5.579a9.006 9.006 0 0 1-12.288-3.293zM4.675 15.744a8.966 8.966 0 0 1 4.682-3.943c0 .082-.005.228-.005.33v11.042a1.555 1.555 0 0 0 .785 1.359l11.669 6.736-4.04 2.333a.143.143 0 0 1-.136.012L7.967 28.03a9.006 9.006 0 0 1-3.293-12.284zm33.19 7.724L26.196 16.73l4.04-2.331a.143.143 0 0 1 .136-.012l9.664 5.579c4.302 2.485 5.776 7.989 3.29 12.29a8.991 8.991 0 0 1-4.68 3.943V24.827a1.553 1.553 0 0 0-.78-1.36zm4.02-6.051c-.07-.044-.195-.119-.283-.17l-9.558-5.52a1.556 1.556 0 0 0-1.57 0l-11.67 6.738V13.8a.15.15 0 0 1 .057-.124l9.662-5.574a8.995 8.995 0 0 1 13.36 9.315zm-25.277 8.315-4.04-2.333a.141.141 0 0 1-.079-.11v-11.16a8.997 8.997 0 0 1 14.753-6.91c-.073.04-.2.11-.283.161L17.4 10.9a1.552 1.552 0 0 0-.786 1.36l-.006 13.469zM18.803 21l5.198-3.002 5.197 3V27l-5.197 3-5.198-3z"/></svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -1 +0,0 @@
<svg height="2500" width="2500" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 80 80"><linearGradient id="a" x1="0%" y1="100%" y2="0%"><stop offset="0" stop-color="#1b660f"/><stop offset="1" stop-color="#6cae3e"/></linearGradient><g fill="none" fill-rule="evenodd"><path d="M0 0h80v80H0z" fill="url(#a)"/><path d="M60.836 42.893l.384-2.704c3.54 2.12 3.587 2.997 3.586 3.02-.006.006-.61.51-3.97-.316zm-1.943-.54C52.773 40.5 44.25 36.59 40.8 34.96c0-.014.004-.027.004-.041a2.406 2.406 0 0 0-2.404-2.403c-1.324 0-2.402 1.078-2.402 2.403s1.078 2.403 2.402 2.403c.582 0 1.11-.217 1.527-.562 4.058 1.92 12.515 5.774 18.68 7.594L56.17 61.56a.955.955 0 0 0-.01.14c0 1.516-6.707 4.299-17.666 4.299-11.075 0-17.853-2.783-17.853-4.298 0-.046-.003-.091-.01-.136l-5.093-37.207c4.409 3.035 13.892 4.64 22.962 4.64 9.056 0 18.523-1.6 22.94-4.625zM15 20.478C15.072 19.162 22.634 14 38.5 14c15.864 0 23.427 5.16 23.5 6.478v.449C61.13 23.877 51.33 27 38.5 27c-12.852 0-22.657-3.132-23.5-6.087zm49 .022c0-3.465-9.934-8.5-25.5-8.5S13 17.035 13 20.5l.094.754 5.548 40.524C18.775 66.31 30.86 68 38.494 68c9.472 0 19.535-2.178 19.665-6.22l2.396-16.896c1.333.319 2.43.482 3.31.482 1.184 0 1.984-.29 2.469-.867a1.95 1.95 0 0 0 .436-1.66c-.26-1.383-1.902-2.875-5.248-4.784l2.376-16.762z" fill="#fff"/></g></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64" fill="#29b5e8"><path d="M9.86 15.298l13.008 7.8a3.72 3.72 0 0 0 4.589-.601 4.01 4.01 0 0 0 1.227-2.908V3.956a3.81 3.81 0 0 0-1.861-3.42 3.81 3.81 0 0 0-3.893 0 3.81 3.81 0 0 0-1.861 3.42v8.896l-7.387-4.43a3.79 3.79 0 0 0-2.922-.4c-.986.265-1.818.94-2.3 1.844-1.057 1.9-.44 4.28 1.4 5.422m31.27 7.8l13.008-7.8c1.84-1.143 2.458-3.533 1.4-5.424a3.75 3.75 0 0 0-5.22-1.452l-7.3 4.37v-8.84a3.81 3.81 0 1 0-7.615 0v15.323a4.08 4.08 0 0 0 .494 2.367c.482.903 1.314 1.57 2.3 1.844a3.71 3.71 0 0 0 2.922-.4M29.552 31.97c.013-.25.108-.5.272-.68l1.52-1.58a1.06 1.06 0 0 1 .658-.282h.057a1.05 1.05 0 0 1 .656.282l1.52 1.58a1.12 1.12 0 0 1 .272.681v.06a1.13 1.13 0 0 1-.272.683l-1.52 1.58a1.04 1.04 0 0 1-.656.284h-.057c-.246-.014-.48-.115-.658-.284l-1.52-1.58a1.13 1.13 0 0 1-.272-.683zm-4.604-.65v1.364a1.54 1.54 0 0 0 .372.93l5.16 5.357a1.42 1.42 0 0 0 .895.386h1.312a1.42 1.42 0 0 0 .895-.386l5.16-5.357a1.54 1.54 0 0 0 .372-.93V31.32a1.54 1.54 0 0 0-.372-.93l-5.16-5.357a1.42 1.42 0 0 0-.895-.386h-1.312a1.42 1.42 0 0 0-.895.386L25.32 30.4a1.55 1.55 0 0 0-.372.93M3.13 27.62l7.365 4.417L3.13 36.45a4.06 4.06 0 0 0-1.399 5.424 3.75 3.75 0 0 0 2.3 1.844c.986.274 2.042.133 2.922-.392l13.008-7.8c1.2-.762 1.9-2.078 1.9-3.492a4.16 4.16 0 0 0-1.9-3.492l-13.008-7.8a3.79 3.79 0 0 0-2.922-.4c-.986.265-1.818.94-2.3 1.844-1.057 1.9-.44 4.278 1.4 5.422m38.995 4.442a4 4 0 0 0 1.91 3.477l13 7.8c.88.524 1.934.666 2.92.392s1.817-.94 2.3-1.843a4.05 4.05 0 0 0-1.4-5.424L53.5 32.038l7.365-4.417c1.84-1.143 2.457-3.53 1.4-5.422a3.74 3.74 0 0 0-2.3-1.844c-.987-.274-2.042-.134-2.92.4l-13 7.8a4 4 0 0 0-1.91 3.507M25.48 40.508a3.7 3.7 0 0 0-2.611.464l-13.008 7.8c-1.84 1.143-2.456 3.53-1.4 5.422.483.903 1.314 1.57 2.3 1.843a3.75 3.75 0 0 0 2.922-.392l7.387-4.43v8.83a3.81 3.81 0 1 0 7.614 0V44.4a3.91 3.91 0 0 0-3.205-3.903m28.66 8.276l-13.008-7.8a3.75 3.75 0 0 0-2.922-.392 3.74 3.74 0 0 0-2.3 1.843 4.09 4.09 0 0 0-.494 2.37v15.25a3.81 3.81 0 1 0 7.614 0V51.28l7.287 4.37a3.79 3.79 0 0 0 2.922.4c.986-.265 1.818-.94 2.3-1.844 1.057-1.9.44-4.28-1.4-5.422"/></svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -1,18 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="80px" height="80px" viewBox="0 0 80 80" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<!-- Generator: Sketch 64 (93537) - https://sketch.com -->
<title>Icon-Architecture/64/Arch_AWS-Simple-Notification-Service_64</title>
<desc>Created with Sketch.</desc>
<defs>
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
<stop stop-color="#B0084D" offset="0%"></stop>
<stop stop-color="#FF4F8B" offset="100%"></stop>
</linearGradient>
</defs>
<g id="Icon-Architecture/64/Arch_AWS-Simple-Notification-Service_64" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g id="Icon-Architecture-BG/64/Application-Integration" fill="url(#linearGradient-1)">
<rect id="Rectangle" x="0" y="0" width="80" height="80"></rect>
</g>
<path d="M17,38 C18.103,38 19,38.897 19,40 C19,41.103 18.103,42 17,42 C15.897,42 15,41.103 15,40 C15,38.897 15.897,38 17,38 L17,38 Z M41,64 C29.314,64 19.289,55.466 17.194,43.98 C18.965,43.894 20.427,42.659 20.857,41 L27,41 L27,39 L20.857,39 C20.427,37.342 18.966,36.107 17.195,36.02 C19.285,24.71 29.511,16 41,16 C45.313,16 49.832,17.622 54.429,20.821 L55.571,19.179 C50.633,15.743 45.73,14 41,14 C28.27,14 16.949,23.865 15.063,36.521 C13.839,37.207 13,38.5 13,40 C13,41.5 13.839,42.793 15.063,43.478 C16.97,56.341 28.056,66 41,66 C46.407,66 51.942,64.157 56.585,60.811 L55.415,59.189 C51.11,62.292 45.991,64 41,64 L41,64 Z M30.101,36.442 C31.955,36.895 34.275,37 36,37 C37.642,37 39.823,36.905 41.629,36.506 L37.105,45.553 C37.036,45.691 37,45.845 37,46 L37,50.453 C36.199,50.964 34.833,51.812 34,51.986 L34,46 C34,45.868 33.974,45.737 33.923,45.615 L30.101,36.442 Z M36,33 C40.025,33 42.174,33.604 42.841,34 C42.174,34.396 40.025,35 36,35 C31.975,35 29.826,34.396 29.159,34 C29.826,33.604 31.975,33 36,33 L36,33 Z M33,54 L34,54 C34.043,54 34.086,53.997 34.128,53.992 C35.352,53.833 36.909,52.887 38.272,52.013 L38.535,51.845 C38.824,51.661 39,51.342 39,51 L39,46.236 L44.559,35.12 C44.833,34.801 45,34.434 45,34 C45,31.39 39.361,31 36,31 C32.639,31 27,31.39 27,34 C27,34.366 27.12,34.684 27.32,34.967 L32,46.2 L32,53 C32,53.552 32.447,54 33,54 L33,54 Z M62,53 C63.103,53 64,53.897 64,55 C64,56.103 63.103,57 62,57 C60.897,57 60,56.103 60,55 C60,53.897 60.897,53 62,53 L62,53 Z M62,23 C63.103,23 64,23.897 64,25 C64,26.103 63.103,27 62,27 C60.897,27 60,26.103 60,25 C60,23.897 60.897,23 62,23 L62,23 Z M64,38 C65.103,38 66,38.897 66,40 C66,41.103 65.103,42 64,42 C62.897,42 62,41.103 62,40 C62,38.897 62.897,38 64,38 L64,38 Z M54,41 L60.143,41 C60.589,42.72 62.142,44 64,44 C66.206,44 68,42.206 68,40 C68,37.794 66.206,36 64,36 C62.142,36 60.589,37.28 60.143,39 L54,39 L54,26 L58.143,26 C58.589,27.72 60.142,29 62,29 C64.206,29 66,27.206 66,25 C66,22.794 64.206,21 62,21 C60.142,21 58.589,22.28 58.143,24 L53,24 C52.447,24 52,24.448 52,25 L52,39 L45,39 L45,41 L52,41 L52,55 C52,55.552 52.447,56 53,56 L58.143,56 C58.589,57.72 60.142,59 62,59 C64.206,59 66,57.206 66,55 C66,52.794 64.206,51 62,51 C60.142,51 58.589,52.28 58.143,54 L54,54 L54,41 Z" id="AWS-Simple-Notification-Service_Icon_64_Squid" fill="#FFFFFF"></path>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 3.2 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 5.7 KiB

View File

@@ -1,8 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="300" height="200">
<path fill="#fff" d="M0 0h300v200H0z"/>
<g transform="translate(30.667 -1141.475) scale(1.33333)">
<path d="M25 911.61v39h15v-6h-9v-27h9v-6zm114 0v6h9v27h-9v6h15v-39z" style="line-height:normal;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000;text-transform:none;text-orientation:mixed;white-space:normal;shape-padding:0;isolation:auto;mix-blend-mode:normal;solid-color:#000;solid-opacity:1" color="#000" font-weight="400" font-family="sans-serif" overflow="visible" fill="#201a26"/>
<path d="M92.5 931.11l27-15v30z" fill="#30d475"/>
<circle cx="70.002" cy="931.111" r="13.5" fill="#30d475"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 942 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32" width="64" height="64" fill="#00749a"><path d="M2.26 16c0 5.45 3.13 10.145 7.7 12.348L3.478 10.435C2.725 12.174 2.26 14.03 2.26 16zm23.015-.696c0-1.68-.638-2.9-1.16-3.768-.696-1.16-1.333-2.087-1.333-3.246 0-1.275.986-2.435 2.32-2.435h.174C22.84 3.594 19.594 2.26 16 2.26A13.95 13.95 0 0 0 4.522 8.463h.87c1.45 0 3.652-.174 3.652-.174.754-.058.812 1.043.116 1.16 0 0-.754.116-1.565.116l4.986 14.84 3.014-8.986-2.145-5.855L12 9.45c-.754-.058-.638-1.16.058-1.16 0 0 2.26.174 3.594.174 1.45 0 3.652-.174 3.652-.174.754-.058.812 1.043.116 1.16 0 0-.754.116-1.565.116L22.84 24.35l1.4-4.58c.58-1.913 1.043-3.246 1.043-4.464zm-9.043 1.913L12.116 29.16c1.217.348 2.55.58 3.884.58 1.623 0 3.13-.3 4.58-.754-.058-.058-.058-.116-.116-.174zM28.058 9.45l.116 1.4c0 1.4-.232 2.957-1.043 4.928l-4.174 12.116c4.058-2.377 6.84-6.783 6.84-11.884-.058-2.377-.696-4.58-1.74-6.55zM16 0C7.188 0 0 7.188 0 16s7.188 16 16 16 16-7.188 16-16S24.812 0 16 0zm0 31.304C7.594 31.304.754 24.464.754 16A15.27 15.27 0 0 1 16 .754 15.27 15.27 0 0 1 31.246 16c0 8.464-6.84 15.304-15.246 15.304z"/></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -14,8 +14,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -9,8 +9,8 @@
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channel",
"page_title_edit": "Edit Notification Channel",
"page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels",
"button_save_channel": "Save",
"button_test_channel": "Test",
"button_return": "Back",
@@ -62,8 +62,5 @@
"channel_test_failed": "Failed to send a test message to this channel, please confirm that the parameters are set correctly",
"channel_test_unexpected": "An unexpected error occurred while sending a message to this channel, please try again",
"webhook_url_required": "Webhook URL is mandatory",
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)",
"api_key_required": "API Key is mandatory",
"to_required": "To field is mandatory",
"channel_name_required": "Channel name is mandatory"
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)"
}

View File

@@ -7,8 +7,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -129,6 +129,5 @@
"text_num_points": "data points in each result group",
"text_alert_frequency": "Run alert every",
"text_for": "minutes",
"selected_query_placeholder": "Select query",
"alert_rule_not_found": "Alert Rule not found"
"selected_query_placeholder": "Select query"
}

View File

@@ -9,8 +9,8 @@
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channel",
"page_title_edit": "Edit Notification Channel",
"page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels",
"button_save_channel": "Save",
"button_test_channel": "Test",
"button_return": "Back",
@@ -77,8 +77,5 @@
"channel_test_failed": "Failed to send a test message to this channel, please confirm that the parameters are set correctly",
"channel_test_unexpected": "An unexpected error occurred while sending a message to this channel, please try again",
"webhook_url_required": "Webhook URL is mandatory",
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)",
"api_key_required": "API Key is mandatory",
"to_required": "To field is mandatory",
"channel_name_required": "Channel name is mandatory"
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)"
}

View File

@@ -7,8 +7,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -3,7 +3,6 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
import getAll from 'api/v1/user/get';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import { ORG_PREFERENCES } from 'constants/orgPreferences';
import ROUTES from 'constants/routes';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import history from 'lib/history';
@@ -15,7 +14,6 @@ import { matchPath, useLocation } from 'react-router-dom';
import { SuccessResponseV2 } from 'types/api';
import APIError from 'types/api/error';
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
import { OrgPreference } from 'types/api/preferences/preference';
import { Organization } from 'types/api/user/getOrganization';
import { UserResponse } from 'types/api/user/getUser';
import { USER_ROLES } from 'types/roles';
@@ -80,7 +78,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const checkFirstTimeUser = useCallback((): boolean => {
const users = usersData?.data || [];
const remainingUsers = (Array.isArray(users) ? users : []).filter(
const remainingUsers = users.filter(
(user) => user.email !== 'admin@signoz.cloud',
);
@@ -97,8 +95,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
usersData.data
) {
const isOnboardingComplete = orgPreferences?.find(
(preference: OrgPreference) =>
preference.name === ORG_PREFERENCES.ORG_ONBOARDING,
(preference: Record<string, any>) => preference.key === 'ORG_ONBOARDING',
)?.value;
const isFirstUser = checkFirstTimeUser();
@@ -126,8 +123,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const isRouteEnabledForWorkspaceBlockedState =
isAdmin &&
(path === ROUTES.SETTINGS ||
path === ROUTES.ORG_SETTINGS ||
(path === ROUTES.ORG_SETTINGS ||
path === ROUTES.BILLING ||
path === ROUTES.MY_SETTINGS);

View File

@@ -28,7 +28,6 @@ import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useCallback, useEffect, useState } from 'react';
import { Route, Router, Switch } from 'react-router-dom';
import { CompatRouter } from 'react-router-dom-v5-compat';
import { LicenseStatus } from 'types/api/licensesV3/getActive';
import { Userpilot } from 'userpilot';
import { extractDomain } from 'utils/app';
@@ -71,7 +70,7 @@ function App(): JSX.Element {
const orgName =
org && Array.isArray(org) && org.length > 0 ? org[0].displayName : '';
const { displayName, email, role, id, orgId } = user;
const { displayName, email, role } = user;
const domain = extractDomain(email);
const hostNameParts = hostname.split('.');
@@ -105,7 +104,7 @@ function App(): JSX.Element {
logEvent('Domain Identified', groupTraits, 'group');
}
if (window && window.Appcues) {
window.Appcues.identify(id, {
window.Appcues.identify(email, {
name: displayName,
tenant_id: hostNameParts[0],
@@ -131,7 +130,7 @@ function App(): JSX.Element {
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
posthog?.identify(id, {
posthog?.identify(email, {
email,
name: displayName,
orgName,
@@ -143,7 +142,7 @@ function App(): JSX.Element {
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
posthog?.group('company', orgId, {
posthog?.group('company', domain, {
name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
@@ -172,13 +171,11 @@ function App(): JSX.Element {
user &&
!!user.email
) {
// either the active API returns error with 404 or 501 and if it returns a terminated license means it's on basic plan
const isOnBasicPlan =
(activeLicenseFetchError &&
[StatusCodes.NOT_FOUND, StatusCodes.NOT_IMPLEMENTED].includes(
activeLicenseFetchError?.getHttpStatusCode(),
)) ||
(activeLicense?.status && activeLicense.status === LicenseStatus.INVALID);
activeLicenseFetchError &&
[StatusCodes.NOT_FOUND, StatusCodes.NOT_IMPLEMENTED].includes(
activeLicenseFetchError?.getHttpStatusCode(),
);
const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER);
if (isLoggedInState && user && user.id && user.email && !isIdentifiedUser) {
@@ -191,22 +188,15 @@ function App(): JSX.Element {
// if the user is on basic plan then remove billing
if (isOnBasicPlan) {
updatedRoutes = updatedRoutes.filter(
(route) =>
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
(route) => route?.path !== ROUTES.BILLING,
);
}
if (isEnterpriseSelfHostedUser) {
updatedRoutes.push(LIST_LICENSES);
}
// always add support route for cloud users
updatedRoutes = [...updatedRoutes, SUPPORT_ROUTE];
} else {
// if not a cloud user then remove billing and add list licenses route
updatedRoutes = updatedRoutes.filter(
(route) =>
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
(route) => route?.path !== ROUTES.BILLING,
);
updatedRoutes = [...updatedRoutes, LIST_LICENSES];
}

View File

@@ -128,7 +128,12 @@ export const AlertOverview = Loadable(
);
export const CreateAlertChannelAlerts = Loadable(
() => import(/* webpackChunkName: "Create Channels" */ 'pages/Settings'),
() =>
import(/* webpackChunkName: "Create Channels" */ 'pages/AlertChannelCreate'),
);
export const EditAlertChannelsAlerts = Loadable(
() => import(/* webpackChunkName: "Edit Channels" */ 'pages/ChannelsEdit'),
);
export const AllAlertChannels = Loadable(
@@ -160,7 +165,7 @@ export const APIKeys = Loadable(
);
export const MySettings = Loadable(
() => import(/* webpackChunkName: "All MySettings" */ 'pages/Settings'),
() => import(/* webpackChunkName: "All MySettings" */ 'pages/MySettings'),
);
export const CustomDomainSettings = Loadable(
@@ -217,7 +222,7 @@ export const LogsIndexToFields = Loadable(
);
export const BillingPage = Loadable(
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Settings'),
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Billing'),
);
export const SupportPage = Loadable(
@@ -244,7 +249,7 @@ export const WorkspaceAccessRestricted = Loadable(
);
export const ShortcutsPage = Loadable(
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Settings'),
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'),
);
export const InstalledIntegrations = Loadable(

View File

@@ -7,15 +7,20 @@ import {
AlertOverview,
AllAlertChannels,
AllErrors,
APIKeys,
ApiMonitoring,
BillingPage,
CreateAlertChannelAlerts,
CreateNewAlerts,
CustomDomainSettings,
DashboardPage,
DashboardWidget,
EditAlertChannelsAlerts,
EditRulesPage,
ErrorDetails,
Home,
InfrastructureMonitoring,
IngestionSettings,
InstalledIntegrations,
LicensePage,
ListAllALertsPage,
@@ -26,10 +31,12 @@ import {
LogsIndexToFields,
LogsSaveViews,
MetricsExplorer,
MySettings,
NewDashboardPage,
OldLogsExplorer,
Onboarding,
OnboardingV2,
OrganizationSettings,
OrgOnboarding,
PasswordReset,
PipelinePage,
@@ -38,6 +45,7 @@ import {
ServicesTablePage,
ServiceTopLevelOperationsPage,
SettingsPage,
ShortcutsPage,
SignupPage,
SomethingWentWrong,
StatusPage,
@@ -142,7 +150,7 @@ const routes: AppRoutes[] = [
},
{
path: ROUTES.SETTINGS,
exact: false,
exact: true,
component: SettingsPage,
isPrivate: true,
key: 'SETTINGS',
@@ -252,6 +260,13 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'CHANNELS_NEW',
},
{
path: ROUTES.CHANNELS_EDIT,
exact: true,
component: EditAlertChannelsAlerts,
isPrivate: true,
key: 'CHANNELS_EDIT',
},
{
path: ROUTES.ALL_CHANNELS,
exact: true,
@@ -280,6 +295,41 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'VERSION',
},
{
path: ROUTES.ORG_SETTINGS,
exact: true,
component: OrganizationSettings,
isPrivate: true,
key: 'ORG_SETTINGS',
},
{
path: ROUTES.INGESTION_SETTINGS,
exact: true,
component: IngestionSettings,
isPrivate: true,
key: 'INGESTION_SETTINGS',
},
{
path: ROUTES.API_KEYS,
exact: true,
component: APIKeys,
isPrivate: true,
key: 'API_KEYS',
},
{
path: ROUTES.MY_SETTINGS,
exact: true,
component: MySettings,
isPrivate: true,
key: 'MY_SETTINGS',
},
{
path: ROUTES.CUSTOM_DOMAIN_SETTINGS,
exact: true,
component: CustomDomainSettings,
isPrivate: true,
key: 'CUSTOM_DOMAIN_SETTINGS',
},
{
path: ROUTES.LOGS,
exact: true,
@@ -343,6 +393,13 @@ const routes: AppRoutes[] = [
key: 'SOMETHING_WENT_WRONG',
isPrivate: false,
},
{
path: ROUTES.BILLING,
exact: true,
component: BillingPage,
key: 'BILLING',
isPrivate: true,
},
{
path: ROUTES.WORKSPACE_LOCKED,
exact: true,
@@ -364,6 +421,13 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'WORKSPACE_ACCESS_RESTRICTED',
},
{
path: ROUTES.SHORTCUTS,
exact: true,
component: ShortcutsPage,
isPrivate: true,
key: 'SHORTCUTS',
},
{
path: ROUTES.INTEGRATIONS,
exact: true,

View File

@@ -1,47 +0,0 @@
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import axios, { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
ChangelogSchema,
DeploymentType,
} from 'types/api/changelog/getChangelogByVersion';
const getChangelogByVersion = async (
versionId: string,
deployment_type?: DeploymentType,
): Promise<SuccessResponse<ChangelogSchema> | ErrorResponse> => {
try {
let queryParams = `filters[version][$eq]=${versionId}&populate[features][sort]=sort_order:asc&populate[features][populate][media][fields]=id,ext,url,mime,alternativeText`;
if (
deployment_type &&
Object.values(DeploymentType).includes(deployment_type)
) {
const excludedDeploymentType =
deployment_type === DeploymentType.CLOUD_ONLY
? DeploymentType.OSS_ONLY
: DeploymentType.CLOUD_ONLY;
queryParams = `${queryParams}&populate[features][filters][deployment_type][$notIn]=${excludedDeploymentType}`;
}
const response = await axios.get(`
https://cms.signoz.cloud/api/release-changelogs?${queryParams}
`);
if (!Array.isArray(response.data.data) || response.data.data.length === 0) {
throw new Error('No changelog found!');
}
return {
statusCode: 200,
error: null,
message: response.statusText,
payload: response.data.data[0],
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default getChangelogByVersion;

View File

@@ -0,0 +1,27 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/dashboard/create';
const createDashboard = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const url = props.uploadedGrafana ? '/dashboards/grafana' : '/dashboards';
try {
const response = await axios.post(url, {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default createDashboard;

View File

@@ -0,0 +1,9 @@
import axios from 'api';
import { PayloadProps, Props } from 'types/api/dashboard/delete';
const deleteDashboard = (props: Props): Promise<PayloadProps> =>
axios
.delete<PayloadProps>(`/dashboards/${props.uuid}`)
.then((response) => response.data);
export default deleteDashboard;

View File

@@ -0,0 +1,11 @@
import axios from 'api';
import { ApiResponse } from 'types/api';
import { Props } from 'types/api/dashboard/get';
import { Dashboard } from 'types/api/dashboard/getAll';
const getDashboard = (props: Props): Promise<Dashboard> =>
axios
.get<ApiResponse<Dashboard>>(`/dashboards/${props.uuid}`)
.then((res) => res.data.data);
export default getDashboard;

View File

@@ -0,0 +1,8 @@
import axios from 'api';
import { ApiResponse } from 'types/api';
import { Dashboard } from 'types/api/dashboard/getAll';
export const getAllDashboardList = (): Promise<Dashboard[]> =>
axios
.get<ApiResponse<Dashboard[]>>('/dashboards')
.then((res) => res.data.data);

View File

@@ -0,0 +1,11 @@
import axios from 'api';
import { AxiosResponse } from 'axios';
interface LockDashboardProps {
uuid: string;
}
const lockDashboard = (props: LockDashboardProps): Promise<AxiosResponse> =>
axios.put(`/dashboards/${props.uuid}/lock`);
export default lockDashboard;

View File

@@ -0,0 +1,11 @@
import axios from 'api';
import { AxiosResponse } from 'axios';
interface UnlockDashboardProps {
uuid: string;
}
const unlockDashboard = (props: UnlockDashboardProps): Promise<AxiosResponse> =>
axios.put(`/dashboards/${props.uuid}/unlock`);
export default unlockDashboard;

View File

@@ -0,0 +1,20 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/dashboard/update';
const updateDashboard = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.put(`/dashboards/${props.uuid}`, {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default updateDashboard;

View File

@@ -0,0 +1,10 @@
import axios from 'api';
import { ApiResponse } from 'types/api';
import { FeatureFlagProps } from 'types/api/features/getFeaturesFlags';
const getFeaturesFlags = (): Promise<FeatureFlagProps[]> =>
axios
.get<ApiResponse<FeatureFlagProps[]>>(`/featureFlags`)
.then((response) => response.data.data);
export default getFeaturesFlags;

View File

@@ -1,20 +1,24 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { Pipeline } from 'types/api/pipeline/def';
import { Props } from 'types/api/pipeline/post';
const post = async (props: Props): Promise<SuccessResponseV2<Pipeline>> => {
const post = async (
props: Props,
): Promise<SuccessResponse<Pipeline> | ErrorResponse> => {
try {
const response = await axios.post('/logs/pipelines', props.data);
return {
httpStatusCode: response.status,
data: response.data.data,
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -0,0 +1,18 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { GetAllOrgPreferencesResponseProps } from 'types/api/preferences/userOrgPreferences';
const getAllOrgPreferences = async (): Promise<
SuccessResponse<GetAllOrgPreferencesResponseProps> | ErrorResponse
> => {
const response = await axios.get(`/org/preferences`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
};
export default getAllOrgPreferences;

View File

@@ -0,0 +1,18 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { GetAllUserPreferencesResponseProps } from 'types/api/preferences/userOrgPreferences';
const getAllUserPreferences = async (): Promise<
SuccessResponse<GetAllUserPreferencesResponseProps> | ErrorResponse
> => {
const response = await axios.get(`/user/preferences`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
};
export default getAllUserPreferences;

View File

@@ -0,0 +1,20 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { GetOrgPreferenceResponseProps } from 'types/api/preferences/userOrgPreferences';
const getOrgPreference = async ({
preferenceID,
}: {
preferenceID: string;
}): Promise<SuccessResponse<GetOrgPreferenceResponseProps> | ErrorResponse> => {
const response = await axios.get(`/org/preferences/${preferenceID}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
};
export default getOrgPreference;

View File

@@ -0,0 +1,22 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { GetUserPreferenceResponseProps } from 'types/api/preferences/userOrgPreferences';
const getUserPreference = async ({
preferenceID,
}: {
preferenceID: string;
}): Promise<
SuccessResponse<GetUserPreferenceResponseProps> | ErrorResponse
> => {
const response = await axios.get(`/user/preferences/${preferenceID}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
};
export default getUserPreference;

View File

@@ -0,0 +1,28 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
UpdateOrgPreferenceProps,
UpdateOrgPreferenceResponseProps,
} from 'types/api/preferences/userOrgPreferences';
const updateOrgPreference = async (
preferencePayload: UpdateOrgPreferenceProps,
): Promise<
SuccessResponse<UpdateOrgPreferenceResponseProps> | ErrorResponse
> => {
const response = await axios.put(
`/org/preferences/${preferencePayload.preferenceID}`,
{
preference_value: preferencePayload.value,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default updateOrgPreference;

View File

@@ -0,0 +1,28 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
UpdateUserPreferenceProps,
UpdateUserPreferenceResponseProps,
} from 'types/api/preferences/userOrgPreferences';
const updateUserPreference = async (
preferencePayload: UpdateUserPreferenceProps,
): Promise<
SuccessResponse<UpdateUserPreferenceResponseProps> | ErrorResponse
> => {
const response = await axios.put(
`/user/preferences/${preferencePayload.preferenceID}`,
{
preference_value: preferencePayload.value,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default updateUserPreference;

View File

@@ -119,7 +119,6 @@ export const updateFunnelSteps = async (
export interface ValidateFunnelPayload {
start_time: number;
end_time: number;
steps: FunnelStepData[];
}
export interface ValidateFunnelResponse {
@@ -133,11 +132,12 @@ export interface ValidateFunnelResponse {
}
export const validateFunnelSteps = async (
funnelId: string,
payload: ValidateFunnelPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<ValidateFunnelResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/validate`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/validate`,
payload,
{ signal },
);
@@ -185,7 +185,6 @@ export interface FunnelOverviewPayload {
end_time: number;
step_start?: number;
step_end?: number;
steps: FunnelStepData[];
}
export interface FunnelOverviewResponse {
@@ -197,17 +196,20 @@ export interface FunnelOverviewResponse {
avg_rate: number;
conversion_rate: number | null;
errors: number;
// TODO(shaheer): remove p99_latency once we have support for latency
p99_latency: number;
latency: number;
};
}>;
}
export const getFunnelOverview = async (
funnelId: string,
payload: FunnelOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelOverviewResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/overview`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/overview`,
payload,
{
signal,
@@ -235,11 +237,12 @@ export interface SlowTraceData {
}
export const getFunnelSlowTraces = async (
funnelId: string,
payload: FunnelOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<SlowTraceData> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/slow-traces`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/slow-traces`,
payload,
{
signal,
@@ -272,7 +275,7 @@ export const getFunnelErrorTraces = async (
signal?: AbortSignal,
): Promise<SuccessResponse<ErrorTraceData> | ErrorResponse> => {
const response: AxiosResponse = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/error-traces`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/error-traces`,
payload,
{
signal,
@@ -290,7 +293,6 @@ export const getFunnelErrorTraces = async (
export interface FunnelStepsPayload {
start_time: number;
end_time: number;
steps: FunnelStepData[];
}
export interface FunnelStepGraphMetrics {
@@ -307,11 +309,12 @@ export interface FunnelStepsResponse {
}
export const getFunnelSteps = async (
funnelId: string,
payload: FunnelStepsPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelStepsResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/steps`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps`,
payload,
{ signal },
);
@@ -329,7 +332,6 @@ export interface FunnelStepsOverviewPayload {
end_time: number;
step_start?: number;
step_end?: number;
steps: FunnelStepData[];
}
export interface FunnelStepsOverviewResponse {
@@ -341,11 +343,12 @@ export interface FunnelStepsOverviewResponse {
}
export const getFunnelStepsOverview = async (
funnelId: string,
payload: FunnelStepsOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelStepsOverviewResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/steps/overview`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps/overview`,
payload,
{ signal },
);

View File

@@ -0,0 +1,21 @@
import axios from 'api';
import { Props } from 'types/api/userFeedback/sendResponse';
const sendFeedback = async (props: Props): Promise<number> => {
const response = await axios.post(
'/feedback',
{
email: props.email,
message: props.message,
},
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
},
);
return response.status;
};
export default sendFeedback;

View File

@@ -1,23 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/dashboard/create';
import { Dashboard } from 'types/api/dashboard/getAll';
const create = async (props: Props): Promise<SuccessResponseV2<Dashboard>> => {
try {
const response = await axios.post<PayloadProps>('/dashboards', {
...props,
});
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default create;

View File

@@ -1,19 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Dashboard, PayloadProps } from 'types/api/dashboard/getAll';
const getAll = async (): Promise<SuccessResponseV2<Dashboard[]>> => {
try {
const response = await axios.get<PayloadProps>('/dashboards');
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default getAll;

View File

@@ -1,21 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/dashboard/delete';
const deleteDashboard = async (
props: Props,
): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.delete<PayloadProps>(`/dashboards/${props.id}`);
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default deleteDashboard;

View File

@@ -1,20 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/dashboard/get';
import { Dashboard } from 'types/api/dashboard/getAll';
const get = async (props: Props): Promise<SuccessResponseV2<Dashboard>> => {
try {
const response = await axios.get<PayloadProps>(`/dashboards/${props.id}`);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default get;

View File

@@ -1,23 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/dashboard/lockUnlock';
const lock = async (props: Props): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.put<PayloadProps>(
`/dashboards/${props.id}/lock`,
{ lock: props.lock },
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default lock;

View File

@@ -1,23 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Dashboard } from 'types/api/dashboard/getAll';
import { PayloadProps, Props } from 'types/api/dashboard/update';
const update = async (props: Props): Promise<SuccessResponseV2<Dashboard>> => {
try {
const response = await axios.put<PayloadProps>(`/dashboards/${props.id}`, {
...props.data,
});
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default update;

View File

@@ -1,23 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import {
FeatureFlagProps,
PayloadProps,
} from 'types/api/features/getFeaturesFlags';
const list = async (): Promise<SuccessResponseV2<FeatureFlagProps[]>> => {
try {
const response = await axios.get<PayloadProps>(`/features`);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default list;

View File

@@ -1,23 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps } from 'types/api/preferences/list';
import { OrgPreference } from 'types/api/preferences/preference';
const listPreference = async (): Promise<
SuccessResponseV2<OrgPreference[]>
> => {
try {
const response = await axios.get<PayloadProps>(`/org/preferences`);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default listPreference;

View File

@@ -1,25 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/preferences/get';
import { OrgPreference } from 'types/api/preferences/preference';
const getPreference = async (
props: Props,
): Promise<SuccessResponseV2<OrgPreference>> => {
try {
const response = await axios.get<PayloadProps>(
`/org/preferences/${props.name}`,
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default getPreference;

View File

@@ -1,22 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Props } from 'types/api/preferences/update';
const update = async (props: Props): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.put(`/org/preferences/${props.name}`, {
value: props.value,
});
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default update;

View File

@@ -0,0 +1,24 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps } from 'types/api/user/getUserPreference';
const getPreference = async (): Promise<
SuccessResponse<PayloadProps> | ErrorResponse
> => {
try {
const response = await axios.get(`/user/preferences`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default getPreference;

View File

@@ -1,21 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps } from 'types/api/preferences/list';
import { UserPreference } from 'types/api/preferences/preference';
const list = async (): Promise<SuccessResponseV2<UserPreference[]>> => {
try {
const response = await axios.get<PayloadProps>(`/user/preferences`);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default list;

View File

@@ -1,25 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/preferences/get';
import { UserPreference } from 'types/api/preferences/preference';
const get = async (
props: Props,
): Promise<SuccessResponseV2<UserPreference>> => {
try {
const response = await axios.get<PayloadProps>(
`/user/preferences/${props.name}`,
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default get;

View File

@@ -1,22 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Props } from 'types/api/preferences/update';
const update = async (props: Props): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.put(`/user/preferences/${props.name}`, {
value: props.value,
});
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default update;

View File

@@ -1,162 +0,0 @@
.changelog-modal {
.ant-modal-content {
padding: unset;
background-color: var(--bg-ink-400, #121317);
.ant-modal-header {
margin-bottom: unset;
}
.ant-modal-footer {
margin-top: unset;
}
}
&-title {
display: flex;
align-items: center;
gap: 8px;
background-color: var(--bg-ink-400, #121317);
padding: 16px;
font-size: 14px;
line-height: 20px;
color: var(--text-vanilla-100, #fff);
border-bottom: 1px solid var(--bg-slate-500, #161922);
}
&-footer.scroll-available {
.scroll-btn-container {
display: block;
}
}
&-footer {
position: relative;
border: 1px solid var(--bg-slate-500, #161922);
padding: 12px;
display: flex;
align-items: center;
justify-content: space-between;
&-label {
color: var(--text-robin-400, #7190f9);
font-size: 14px;
line-height: 24px;
position: relative;
padding-left: 14px;
&::before {
content: '';
position: absolute;
top: 50%;
left: 0;
transform: translateY(-50%);
width: 6px;
height: 6px;
border-radius: 100%;
background-color: var(--bg-robin-500, #7190f9);
}
}
&-ctas {
display: flex;
margin-left: auto;
& svg {
font-size: 14px;
}
}
.scroll-btn-container {
display: none;
position: absolute;
top: -40px;
left: 50%;
transform: translateX(-50%);
.scroll-btn {
all: unset;
padding: 4px 12px 4px 10px;
background-color: var(--bg-slate-400, #1d212d);
border-radius: 20px;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
gap: 4px;
transition: background-color 0.1s;
&:hover {
background-color: var(--bg-slate-200, #2c3140);
}
&:active {
background-color: var(--bg-slate-600, #1c1f2a);
}
span {
font-size: 12px;
line-height: 18px;
color: var(--text-vanilla-400, #c0c1c3);
}
// add animation to the chevrons down icon
svg {
animation: pulse 1s infinite;
}
}
}
}
&-content {
max-height: calc(100vh - 300px);
overflow-y: auto;
padding: 16px 16px 18px 16px;
border: 1px solid var(--bg-slate-500, #161922);
border-top-width: 0;
border-bottom-width: 0;
}
}
// pulse for the scroll for more icon
@keyframes pulse {
0% {
opacity: 1;
}
50% {
opacity: 0.5;
}
}
.lightMode {
.changelog-modal {
.ant-modal-content {
background-color: var(--bg-vanilla-100);
border-color: var(--bg-vanilla-300);
}
&-title {
background: var(--bg-vanilla-100);
color: var(--bg-ink-500);
border-color: var(--bg-vanilla-300);
}
&-content {
border-color: var(--bg-vanilla-300);
}
&-footer {
border-color: var(--bg-vanilla-300);
.scroll-btn-container {
.scroll-btn {
background-color: var(--bg-vanilla-300);
span {
color: var(--text-ink-500);
}
}
}
}
}
}

View File

@@ -1,168 +0,0 @@
import './ChangelogModal.styles.scss';
import { CheckOutlined, CloseOutlined } from '@ant-design/icons';
import { Button, Modal } from 'antd';
import updateUserPreference from 'api/v1/user/preferences/name/update';
import cx from 'classnames';
import { USER_PREFERENCES } from 'constants/userPreferences';
import dayjs from 'dayjs';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import { ChevronsDown, ScrollText } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { useCallback, useEffect, useRef, useState } from 'react';
import { useMutation } from 'react-query';
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
import { UserPreference } from 'types/api/preferences/preference';
import ChangelogRenderer from './components/ChangelogRenderer';
interface Props {
changelog: ChangelogSchema;
onClose: () => void;
}
function ChangelogModal({ changelog, onClose }: Props): JSX.Element {
const [hasScroll, setHasScroll] = useState(false);
const changelogContentSectionRef = useRef<HTMLDivElement>(null);
const { userPreferences, updateUserPreferenceInContext } = useAppContext();
const formattedReleaseDate = dayjs(changelog?.release_date).format(
'MMMM D, YYYY',
);
const { isCloudUser } = useGetTenantLicense();
const seenChangelogVersion = userPreferences?.find(
(preference) =>
preference.name === USER_PREFERENCES.LAST_SEEN_CHANGELOG_VERSION,
)?.value as string;
const { mutate: updateUserPreferenceMutation } = useMutation(
updateUserPreference,
);
useEffect(() => {
// Update the seen version
if (seenChangelogVersion !== changelog.version) {
const version = {
name: USER_PREFERENCES.LAST_SEEN_CHANGELOG_VERSION,
value: changelog.version,
};
updateUserPreferenceInContext(version as UserPreference);
updateUserPreferenceMutation(version);
}
}, [
seenChangelogVersion,
changelog.version,
updateUserPreferenceMutation,
updateUserPreferenceInContext,
]);
const checkScroll = useCallback((): void => {
if (changelogContentSectionRef.current) {
const {
scrollHeight,
clientHeight,
scrollTop,
} = changelogContentSectionRef.current;
const isAtBottom = scrollHeight - clientHeight - scrollTop <= 8;
setHasScroll(scrollHeight > clientHeight + 24 && !isAtBottom); // 24px - buffer height to show show more
}
}, []);
useEffect(() => {
checkScroll();
const changelogContentSection = changelogContentSectionRef.current;
if (changelogContentSection) {
changelogContentSection.addEventListener('scroll', checkScroll);
}
return (): void => {
if (changelogContentSection) {
changelogContentSection.removeEventListener('scroll', checkScroll);
}
};
}, [checkScroll]);
const onClickUpdateWorkspace = (): void => {
window.open(
'https://github.com/SigNoz/signoz/releases',
'_blank',
'noopener,noreferrer',
);
};
const onClickScrollForMore = (): void => {
if (changelogContentSectionRef.current) {
changelogContentSectionRef.current.scrollTo({
top: changelogContentSectionRef.current.scrollTop + 600, // Scroll 600px from the current position
behavior: 'smooth',
});
}
};
return (
<Modal
className={cx('changelog-modal')}
title={
<div className="changelog-modal-title">
<ScrollText size={16} />
Whats New Changelog : {formattedReleaseDate}
</div>
}
width={820}
open
onCancel={onClose}
footer={
<div
className={cx('changelog-modal-footer', hasScroll && 'scroll-available')}
>
{changelog?.features && changelog.features.length > 0 && (
<span className="changelog-modal-footer-label">
{changelog.features.length} new&nbsp;
{changelog.features.length > 1 ? 'features' : 'feature'}
</span>
)}
{!isCloudUser && (
<div className="changelog-modal-footer-ctas">
<Button type="default" icon={<CloseOutlined />} onClick={onClose}>
Skip for now
</Button>
<Button
type="primary"
icon={<CheckOutlined />}
onClick={onClickUpdateWorkspace}
>
Update my workspace
</Button>
</div>
)}
{changelog && (
<div className="scroll-btn-container">
<button
data-testid="scroll-more-btn"
type="button"
className="scroll-btn"
onClick={onClickScrollForMore}
>
<ChevronsDown size={14} />
<span>Scroll for more</span>
</button>
</div>
)}
</div>
}
>
<div
className="changelog-modal-content"
data-testid="changelog-content"
ref={changelogContentSectionRef}
>
{changelog && <ChangelogRenderer changelog={changelog} />}
</div>
</Modal>
);
}
export default ChangelogModal;

Some files were not shown because too many files have changed in this diff Show More