Compare commits

..

79 Commits

Author SHA1 Message Date
grandwizard28
4af0503a89 chore(openfeature): integrate with openfeature 2025-04-07 01:35:16 +05:30
grandwizard28
fa2f63bc0d Merge branch 'remove-ff-unused' into licensing
# Conflicts:
#	ee/query-service/model/license.go
#	pkg/query-service/app/clickhouseReader/reader.go
#	pkg/query-service/app/querier/querier.go
#	pkg/query-service/constants/constants.go
#	pkg/query-service/featureManager/manager.go
#	pkg/query-service/model/featureSet.go
#	pkg/query-service/rules/manager.go
2025-04-06 21:11:24 +05:30
grandwizard28
084def0ba9 chore(ff): remove AWS_INTEGRATION 2025-04-06 17:03:42 +05:30
grandwizard28
b77bca7b71 Merge branch 'main' into remove-ff-unused 2025-04-05 23:38:34 +05:30
Vibhu Pandey
2330420c0d fix(querier): remove ff (#7531) 2025-04-05 18:08:06 +00:00
grandwizard28
a6bd5f9e33 Merge branch 'remove-ff-querier' into remove-ff-unused 2025-04-05 23:31:47 +05:30
Vibhu Pandey
2ae34cbcae Merge branch 'main' into remove-ff-querier 2025-04-05 23:30:15 +05:30
Vibhu Pandey
65ac277074 fix(duration|timestamp): remove duration/timestamp sort (#7530) 2025-04-05 23:26:50 +05:30
grandwizard28
46e8182ab1 Merge branch 'remove-ff-querier' into remove-ff-unused 2025-04-05 18:48:58 +05:30
grandwizard28
7bc2a614f9 Merge branch 'remove-ff-sort' into remove-ff-querier 2025-04-05 18:48:53 +05:30
grandwizard28
88227c6992 Merge branch 'main' into remove-ff-sort 2025-04-05 18:48:46 +05:30
Vibhu Pandey
b7982ca348 fix(ff): remove feature interface from ruler (#7529)
### Summary

remove feature interface from ruler
2025-04-05 12:52:26 +00:00
dependabot[bot]
2748b49a44 chore(deps): bump github.com/golang-jwt/jwt/v5 from 5.2.1 to 5.2.2 (#7401)
Bumps [github.com/golang-jwt/jwt/v5](https://github.com/golang-jwt/jwt) from 5.2.1 to 5.2.2.
- [Release notes](https://github.com/golang-jwt/jwt/releases)
- [Changelog](https://github.com/golang-jwt/jwt/blob/main/VERSION_HISTORY.md)
- [Commits](https://github.com/golang-jwt/jwt/compare/v5.2.1...v5.2.2)

---
updated-dependencies:
- dependency-name: github.com/golang-jwt/jwt/v5
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-04-05 01:43:43 +00:00
grandwizard28
6aa8f18018 chore(ff): remove all unused ffs 2025-04-05 01:38:06 +05:30
grandwizard28
a389901b8d chore(ff): remove infra_metrics and custom_metrics 2025-04-05 00:34:49 +05:30
grandwizard28
30e2581bbc fix(querier): remove ff 2025-04-05 00:02:37 +05:30
grandwizard28
ff1e46766f Merge branch 'remove-ff-ruler' into remove-ff-sort 2025-04-04 23:47:15 +05:30
grandwizard28
8e48e58f9b Merge branch 'main' into remove-ff-ruler 2025-04-04 23:47:13 +05:30
grandwizard28
bb7301bc9f fix(duration|timestamp): remove duration/timestamp sort 2025-04-04 23:46:51 +05:30
Vibhu Pandey
7345027762 fix(ff): remove prefer rpm (#7528) 2025-04-04 23:38:16 +05:30
grandwizard28
7b5eae84d5 feat(ruler): remove ff 2025-04-04 22:21:51 +05:30
grandwizard28
0aeb1009c6 Merge branch 'remove-prefer-rpm' into remove-ff-ruler 2025-04-04 22:18:56 +05:30
grandwizard28
ab3b250629 fix(ff): fix test cases 2025-04-04 20:44:24 +05:30
grandwizard28
765354b1ee Merge branch 'main' into remove-prefer-rpm 2025-04-04 20:29:16 +05:30
Vibhu Pandey
68f874e433 chore(ff): remove unused SMART_TRACE_DETAIL feature flag (#7527) 2025-04-04 20:28:54 +05:30
grandwizard28
a3daf43186 fix(ff): remove prefer rpm 2025-04-04 20:28:09 +05:30
grandwizard28
1649c0e26f chore(ff): remove from ruler 2025-04-04 19:55:47 +05:30
grandwizard28
142ad8adc4 Merge branch 'remove-ff-smart-detail' into remove-ff-ruler
# Conflicts:
#	pkg/query-service/rules/threshold_rule_test.go
2025-04-04 19:45:42 +05:30
grandwizard28
cda94d0325 chore(ff): remove unused SMART_TRACE_DETAIL feature flag 2025-04-04 19:34:20 +05:30
grandwizard28
51ae2df8d5 feat(ruler): remove ff 2025-04-04 19:17:58 +05:30
Vibhu Pandey
54a82b1664 fix(dashboards): remove ff interface (#7526) 2025-04-04 19:12:31 +05:30
grandwizard28
ea4e9988a5 feat(licensing): second commit 2025-04-04 18:55:11 +05:30
Yunus M
93dc585145 fix: disable sidenav items for cloud users whose license has expired (#7524) 2025-04-04 18:33:55 +05:30
Vikrant Gupta
6a143efd2c feat(sqlmigration): update the user related tables according to new schema (#7518)
* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): make the preference package multi tenant

* feat(preference): address nit pick comments

* feat(preference): added the cascade delete for preferences

* feat(sqlmigration): update apdex and TTL status tables  (#7481)

* feat(sqlmigration): update the apdex and ttl tables

* feat(sqlmigration): register the new migration and rename table

* feat(sqlmigration): fix the ttl queries

* feat(sqlmigration): update the TTL and apdex tables

* feat(sqlmigration): update the TTL and apdex tables

* feat(sqlmigration): fix the reset password and pat tables (#7482)

* feat(sqlmigration): fix the reset password and pat tables

* feat(sqlmigration): revert PAT changes

* feat(sqlmigration): register and rename the new migration

* feat(sqlmigration): handle updates for user tables

* feat(sqlmigration): remove unwanted changes
2025-04-04 01:46:28 +05:30
Vikrant Gupta
0116eb20ab feat(sqlmigration): update apdex and TTL status tables (#7517)
* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): make the preference package multi tenant

* feat(preference): address nit pick comments

* feat(preference): added the cascade delete for preferences

* feat(sqlmigration): update apdex and TTL status tables  (#7481)

* feat(sqlmigration): update the apdex and ttl tables

* feat(sqlmigration): register the new migration and rename table

* feat(sqlmigration): fix the ttl queries

* feat(sqlmigration): update the TTL and apdex tables

* feat(sqlmigration): update the TTL and apdex tables
2025-04-04 01:36:47 +05:30
Vikrant Gupta
79e9d1b357 feat(preference): multi tenant preference module (#7516)
* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): update the alertmanager tables

* feat(sqlmigration): make the preference package multi tenant

* feat(preference): address nit pick comments

* feat(preference): added the cascade delete for preferences
2025-04-04 01:25:24 +05:30
Vikrant Gupta
b89ce82e25 feat(sqlmigration): update the alertmanager tables (#7513)
* feat(sqlmigration): update the alertmanager tables
2025-04-03 17:56:49 +00:00
dependabot[bot]
b43a198fd8 chore(deps): bump github.com/expr-lang/expr from 1.16.9 to 1.17.0 (#7342)
Bumps [github.com/expr-lang/expr](https://github.com/expr-lang/expr) from 1.16.9 to 1.17.0.
- [Release notes](https://github.com/expr-lang/expr/releases)
- [Commits](https://github.com/expr-lang/expr/compare/v1.16.9...v1.17.0)

---
updated-dependencies:
- dependency-name: github.com/expr-lang/expr
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-04-03 08:17:40 +00:00
Srikanth Chekuri
b40ca4baf3 fix: do not crash service on panic during rule eval (#7514) 2025-04-03 13:13:58 +05:30
SagarRajput-7
8df77c9221 fix: fixed trace funnel - header style overriding other pages (#7512)
* fix: fixed trace funnel - header style overriding other pages

* fix: fixed trace funnel - header style overriding other pages

* fix: handled nesting
2025-04-03 07:29:39 +00:00
Srikanth Chekuri
f67555576f chore: add info icon tool tips for webhook/routing/integration key (#7405) 2025-04-03 09:40:41 +05:30
Srikanth Chekuri
f0a4c37073 fix: handle maintenance windows that cross day boundaries (#7494) 2025-04-02 20:48:01 +00:00
Vikrant Gupta
7972261237 Revert "fix: use search v2 component for traces data source & minor improveme…" (#7511)
This reverts commit d7a6607a25.
2025-04-03 01:15:20 +05:30
grandwizard28
74489efeef feat(featuretypes): add feature types 2025-04-02 19:36:40 +05:30
grandwizard28
834d75fbbd feat(licensing): first commit 2025-04-02 19:35:06 +05:30
primus-bot[bot]
3b4a8e5e0f chore(release): bump to v0.77.0 (#7502)
#### Summary
 - Release SigNoz v0.77.0
 - Bump SigNoz OTel Collector to v0.111.37

 Created by [Primus-Bot](https://github.com/apps/primus-bot)
2025-04-02 12:19:03 +05:30
Yunus M
5ef3b8ee3f feat: support request data source and improve layout (#7485)
* feat: support request data source and improve layout

* feat: update config

* feat: update config with related keywords

* update config

---------

Co-authored-by: makeavish <makeavish786@gmail.com>
2025-04-02 11:59:53 +05:30
Yunus M
597752a4bc fix: licenses in community edition & improve messaging (#7456)
Enhance platform to handle cloud, self-hosted, community, and enterprise user types with tailored routing, error handling, and feature access.
2025-04-02 01:12:42 +05:30
Nityananda Gohain
07a244f569 chore: add migration for pat to add default values (#7492)
* chore: add migration for pat to add default values

* fix: minor changes

* fix: don't panic in GetClickhouseColumnName

* fix: use new function for pat

* fix: address minor comments

* fix: address comments

* fix: remove generatepat

* fix: minor changes

* fix: remove extra check

---------

Co-authored-by: Vibhu Pandey <vibhupandey28@gmail.com>
2025-04-01 23:49:37 +05:30
sawhil
eb9385840f fix: minor error handler message 2025-04-01 22:47:48 +05:30
sawhil
30b689037a fix: minor fix 2025-04-01 22:47:48 +05:30
sawhil
ba33c885d5 fix: added auth token refresh logic in event source provider error handler 2025-04-01 22:47:48 +05:30
Amlan Kumar Nandy
a4ed9e4d47 feat: base setup for inspect metrics feature (#7490) 2025-04-01 11:47:38 +00:00
Nityananda Gohain
df5767198c fix: don't panic in GetClickhouseColumnName (#7493) 2025-03-31 22:26:37 +05:30
Vibhu Pandey
81c7f3221a feat(prometheus): create a dedicated prometheus package (#7397) 2025-03-31 14:11:11 +00:00
Amlan Kumar Nandy
2cbd8733a1 chore: remove new banner from infra monitoring tab in side nav (#7483) 2025-03-31 13:22:23 +00:00
Nityananda Gohain
71d1dfe9bd chore: use new uuid in pipelines (#7487) 2025-03-31 16:45:00 +05:30
aniketio-ctrl
459712d25c fix(nil-pointer): wrong error passed | 2262 (#7463)
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-03-31 09:47:37 +00:00
Aditya Singh
61de2d414d fix: handle 404 redirection on root route (#7454)
* fix: handle 404 redirection on root route

* fix: add home component for root route

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-03-31 14:35:58 +05:30
Sahil Khan
0b7cd4c1a7 feat: api monitoring feedback - 2 (#7432)
* feat: new dropdown styles

* fix: added new tag

* feat: added endpoint name and port in endpoint details

* feat: endpoint details feedback

* feat: analytics added

* fix: title fixed

* fix: domain list breaking for non available data

* feat: added third party api feature flag

* fix: console removed

* feat: added traces corelation in api monitoring charts

* feat: added customondragselect in grid card full view to handle breaking flow

* fix: minor failsafes added:

* fix: minor ux fix

* feat: incorporated pr comments - 0
2025-03-30 03:10:43 +05:30
Shaheer Kochai
62c033ccf8 chore: trace funnels feature flag changes (#7478)
* chore: trace funnels feature flag
2025-03-29 19:33:15 +05:30
Nageshbansal
e637487984 Fix the hyperlink for otel-demo-docs in contributing guide (#7462)
Co-authored-by: CheetoDa <31571545+Calm-Rock@users.noreply.github.com>
Co-authored-by: Vibhu Pandey <vibhupandey28@gmail.com>
2025-03-28 14:58:11 +00:00
Nityananda Gohain
8fc43a00f8 fix: collector connection to opamp without orgID (#7474) 2025-03-28 20:19:37 +05:30
Srikanth Chekuri
031d62ca44 Revert "fix: added default value to time,space aggregation to fix query_range…" (#7464)
This reverts commit 8c4c357351.
2025-03-28 12:43:31 +05:30
SagarRajput-7
8c4c357351 fix: added default value to time,space aggregation to fix query_range getting 500 for metric (#7414)
* fix: added default value to time,space aggregation to fix query_range getting 500 for metric

* fix: added all available operators as default when no attribute type is present

* fix: changed operator, time and space values to avg when empty attribute type
2025-03-28 06:36:09 +00:00
SagarRajput-7
d8d8191a32 feat: allow width customisation and persist it across users and view (#7273)
* feat: removed ellipsis prop

* feat: prevent unnecessary save calls

* feat: fix dashboard detail resize icon

* feat: adjusted resizable header - set minConstraint

* feat: fixed dashboard vanishing issue

* feat: removed dependency causing maximum callstack warning

* feat: corrected the list edit view render issue and resize handler fix

* feat: style fix

* feat: removed comments

* fix: updated test cases

* feat: updated the test cases

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-03-28 06:06:40 +00:00
SagarRajput-7
a876c0a744 chore: added doc title to messaging queues (#7460) 2025-03-28 11:25:41 +05:30
Vishal Sharma
c36f913a90 fix: telemetry version function call (#7453) 2025-03-27 20:07:09 +05:30
SagarRajput-7
ed597f00c0 fix: fixed copy to clipboard popup getting flooded on every click (#7448) 2025-03-27 05:54:34 +00:00
Vibhu Pandey
4957d3ae93 feat(sqlstore): move postgres to enterprise codebase (#7445) 2025-03-27 11:16:43 +05:30
Srikanth Chekuri
8835e3493d chore: skip logfield/spanfield type in the suggestions (#7433) 2025-03-27 10:36:27 +05:30
Vibhu Pandey
027a1631ef feat(httpclient): add an extensible http client (#7446) 2025-03-26 19:33:52 +00:00
Shaheer Kochai
d7a6607a25 fix: use search v2 component for traces data source & minor improvements to search v2 component (#7404) 2025-03-26 18:00:54 +00:00
Sahil Khan
7a58bc58c9 fix: stage and run query button same url navigation enabled (#7415) 2025-03-26 23:25:01 +05:30
Srikanth Chekuri
88be23c3e3 chore: pass through substitutions for CH query (#7389) 2025-03-26 12:58:55 +00:00
Srikanth Chekuri
8f095dfbc9 fix: handle expected value less than zero (#7410) 2025-03-26 12:50:46 +00:00
aniketio-ctrl
72207691a3 fix(metrics-explorer): added time filter in inner sub queries of list and samples (#7436) 2025-03-26 09:57:21 +00:00
Raj Kamal Singh
8998ca652e chore: aws integration: bump recommended agent version (#7434) 2025-03-26 09:14:05 +00:00
Piyush Singariya
f4ae5f19ff feat: AWS Managed Streaming Kafka service integration (#7350)
* feat: msk integration

* feat: logs not available in msk

* fix: minor suggestions made by ellipsis

* fix: changes based on review, added Variables, Units, Legends, SVG

* fix: update in global variables, and query operators

* fix: update in rx tx panel, region variable query update

---------

Co-authored-by: Raj Kamal Singh <1133322+raj-k-singh@users.noreply.github.com>
2025-03-26 12:57:39 +05:30
285 changed files with 13170 additions and 4551 deletions

2
.gitignore vendored
View File

@@ -54,6 +54,7 @@ ee/query-service/tests/test-deploy/data/
bin/
.local/
*/query-service/queries.active
ee/query-service/db
# e2e
@@ -82,3 +83,4 @@ queries.active
# .devenv tmp files
.devenv/**/tmp/**
.qodo

View File

@@ -77,4 +77,4 @@ Need assistance? Join our Slack community:
## Where do I go from here?
- Set up your [development environment](docs/contributing/development.md)
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo/otel-demo-docs.md)
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo-docs.md)

View File

@@ -74,6 +74,10 @@ go-run-enterprise: ## Runs the enterprise go backend server
--use-logs-new-schema true \
--use-trace-new-schema true
.PHONY: go-test
go-test: ## Runs go unit tests
@go test -race ./...
.PHONY: go-run-community
go-run-community: ## Runs the community go backend server
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \

View File

@@ -72,7 +72,6 @@ sqlstore:
# The path to the SQLite database file.
path: /var/lib/signoz/signoz.db
##################### APIServer #####################
apiserver:
timeout:
@@ -91,20 +90,29 @@ apiserver:
- /api/v1/version
- /
##################### TelemetryStore #####################
telemetrystore:
# Specifies the telemetrystore provider to use.
provider: clickhouse
# Maximum number of idle connections in the connection pool.
max_idle_conns: 50
# Maximum number of open connections to the database.
max_open_conns: 100
# Maximum time to wait for a connection to be established.
dial_timeout: 5s
# Specifies the telemetrystore provider to use.
provider: clickhouse
clickhouse:
# The DSN to use for ClickHouse.
dsn: http://localhost:9000
# The DSN to use for clickhouse.
dsn: tcp://localhost:9000
##################### Prometheus #####################
prometheus:
active_query_tracker:
# Whether to enable the active query tracker.
enabled: true
# The path to use for the active query tracker.
path: ""
# The maximum number of concurrent queries.
max_concurrent: 20
##################### Alertmanager #####################
alertmanager:
@@ -117,7 +125,7 @@ alertmanager:
# The poll interval for periodically syncing the alertmanager with the config in the store.
poll_interval: 1m
# The URL under which Alertmanager is externally reachable (for example, if Alertmanager is served via a reverse proxy). Used for generating relative and absolute links back to Alertmanager itself.
external_url: http://localhost:9093
external_url: http://localhost:8080
# The global configuration for the alertmanager. All the exahustive fields can be found in the upstream: https://github.com/prometheus/alertmanager/blob/efa05feffd644ba4accb526e98a8c6545d26a783/config/config.go#L833
global:
# ResolveTimeout is the time after which an alert is declared resolved if it has not been updated.

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.76.2
image: signoz/signoz:v0.77.0
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
@@ -208,7 +208,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.34
image: signoz/signoz-otel-collector:v0.111.37
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -232,7 +232,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.34
image: signoz/signoz-schema-migrator:v0.111.37
deploy:
restart_policy:
condition: on-failure

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.76.2
image: signoz/signoz:v0.77.0
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
@@ -143,7 +143,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.34
image: signoz/signoz-otel-collector:v0.111.37
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -167,7 +167,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.34
image: signoz/signoz-schema-migrator:v0.111.37
deploy:
restart_policy:
condition: on-failure

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.76.2}
image: signoz/signoz:${VERSION:-v0.77.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -212,7 +212,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.37}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -238,7 +238,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
container_name: schema-migrator-sync
command:
- sync
@@ -249,7 +249,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.76.2}
image: signoz/signoz:${VERSION:-v0.77.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -146,7 +146,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.37}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -168,7 +168,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
container_name: schema-migrator-sync
command:
- sync
@@ -180,7 +180,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.76.2}
image: signoz/signoz:${VERSION:-v0.77.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.37}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.34}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.37}
container_name: schema-migrator-async
command:
- async

View File

@@ -4,6 +4,7 @@ import (
"net/http"
"time"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -24,7 +25,7 @@ func (p *Pat) Wrap(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
var values []string
var patToken string
var pat types.StorablePersonalAccessToken
var pat eeTypes.StorablePersonalAccessToken
for _, header := range p.headers {
values = append(values, r.Header.Get(header))

View File

@@ -0,0 +1,31 @@
package licensingserver
import (
"time"
"github.com/SigNoz/signoz/pkg/licensing"
)
type Config struct {
PollingConfig PollingConfig `mapstructure:"polling"`
}
type PollingConfig struct {
Interval time.Duration `mapstructure:"interval"`
}
func NewConfig() Config {
return Config{
PollingConfig: PollingConfig{
Interval: 24 * time.Hour,
},
}
}
func NewConfigFromLicensingConfig(config licensing.Config) Config {
return Config{
PollingConfig: PollingConfig{
Interval: config.PollingConfig.Interval,
},
}
}

View File

@@ -0,0 +1,72 @@
package licensingserver
import (
"context"
"log/slog"
"sync"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
)
type Server struct {
logger *slog.Logger
cfg Config
orgID valuer.UUID
zeus zeus.Zeus
store licensetypes.Store
license licensetypes.License
mtx sync.RWMutex
}
func NewServer(logger *slog.Logger, config Config, orgID valuer.UUID, zeus zeus.Zeus, store licensetypes.Store) *Server {
return &Server{
logger: logger,
cfg: config,
orgID: orgID,
zeus: zeus,
store: store,
license: licensetypes.NewNoop(),
}
}
func (server *Server) Fetch(ctx context.Context) error {
license, err := server.store.GetLatest(ctx, server.orgID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil
}
return err
}
fetchedLicense, err := server.zeus.GetLicense(ctx, license.Key())
if err != nil {
return err
}
return server.SetLicense(ctx, fetchedLicense)
}
func (server *Server) SetLicense(ctx context.Context, license licensetypes.License) error {
server.mtx.Lock()
defer server.mtx.Unlock()
server.license = license
return nil
}
func (server *Server) GetLicense(ctx context.Context) licensetypes.License {
server.mtx.RLock()
defer server.mtx.RUnlock()
return server.license
}

View File

@@ -0,0 +1,35 @@
package sqllicensingstore
import (
"context"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type store struct {
sqlstore sqlstore.SQLStore
}
func NewStore(sqlstore sqlstore.SQLStore) licensetypes.Store {
return &store{
sqlstore: sqlstore,
}
}
func (store *store) Set(ctx context.Context, license licensetypes.License) error {
return nil
}
func (store *store) Get(ctx context.Context, orgID valuer.UUID) ([]licensetypes.License, error) {
return nil, nil
}
func (store *store) GetLatest(ctx context.Context, orgID valuer.UUID) (licensetypes.License, error) {
return nil, nil
}
func (store *store) ListOrgs(ctx context.Context) ([]valuer.UUID, error) {
return nil, nil
}

View File

@@ -0,0 +1,109 @@
package pollinglicensing
import (
"context"
"time"
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
)
type provider struct {
config licensing.Config
settings factory.ScopedProviderSettings
zeus zeus.Zeus
service *Service
store licensetypes.Store
stopC chan struct{}
}
func NewFactory(zeus zeus.Zeus, sqlstore sqlstore.SQLStore) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
return factory.NewProviderFactory(factory.MustNewName("sql"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
return New(ctx, providerSettings, config, zeus, sqlstore)
})
}
func New(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config, zeus zeus.Zeus, sqlstore sqlstore.SQLStore) (licensing.Licensing, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/licensing/pollinglicensing")
store := sqllicensingstore.NewStore(sqlstore)
return &provider{
config: config,
settings: settings,
zeus: zeus,
service: NewService(ctx, settings, config, store, zeus),
stopC: make(chan struct{}),
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
if err := provider.service.SyncServers(ctx); err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to sync licensing servers", "error", err)
return err
}
ticker := time.NewTicker(provider.config.PollingConfig.Interval)
defer ticker.Stop()
for {
select {
case <-provider.stopC:
return nil
case <-ticker.C:
if err := provider.service.SyncServers(ctx); err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to sync licensing servers", "error", err)
}
}
}
}
func (provider *provider) GetLatestLicense(ctx context.Context, orgID valuer.UUID) (licensetypes.License, error) {
server, err := provider.service.getServer(orgID)
if err != nil {
return nil, err
}
return server.GetLicense(ctx), nil
}
func (provider *provider) GetLicenses(ctx context.Context, orgID valuer.UUID, params licensetypes.GettableLicenseParams) (licensetypes.GettableLicenses, error) {
if params.Active != nil {
if *params.Active {
license, err := provider.GetLatestLicense(ctx, orgID)
if err != nil {
return nil, err
}
return licensetypes.GettableLicenses{license}, nil
}
}
licenses, err := provider.store.Get(ctx, orgID)
if err != nil {
return nil, err
}
return licenses, nil
}
func (provider *provider) SetLicense(ctx context.Context, orgID valuer.UUID, key string) error {
license, err := provider.zeus.GetLicense(ctx, key)
if err != nil {
return err
}
if err := provider.store.Set(ctx, license); err != nil {
return err
}
return provider.service.SyncOrgServer(ctx, orgID)
}
func (provider *provider) Stop(ctx context.Context) error {
close(provider.stopC)
return nil
}

View File

@@ -0,0 +1,103 @@
package pollinglicensing
import (
"context"
"sync"
"github.com/SigNoz/signoz/ee/licensing/licensingserver"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
)
type Service struct {
// config is the config for the licensing service
config licensing.Config
// store is the store for the licensing service
store licensetypes.Store
// zeus
zeus zeus.Zeus
// settings is the settings for the licensing service
settings factory.ScopedProviderSettings
// Map of organization id to alertmanager server
servers map[valuer.UUID]*licensingserver.Server
// Mutex to protect the servers map
serversMtx sync.RWMutex
}
func NewService(ctx context.Context, settings factory.ScopedProviderSettings, config licensing.Config, store licensetypes.Store, zeus zeus.Zeus) *Service {
service := &Service{
config: config,
store: store,
zeus: zeus,
settings: settings,
servers: make(map[valuer.UUID]*licensingserver.Server),
serversMtx: sync.RWMutex{},
}
return service
}
func (service *Service) SyncServers(ctx context.Context) error {
orgIDs, err := service.store.ListOrgs(ctx)
if err != nil {
return err
}
service.serversMtx.Lock()
for _, orgID := range orgIDs {
// If the server is not present, create it and sync the config
if _, ok := service.servers[orgID]; !ok {
server := licensingserver.NewServer(service.settings.Logger(), licensingserver.NewConfigFromLicensingConfig(service.config), orgID, service.zeus, service.store)
service.servers[orgID] = server
}
err = service.servers[orgID].Fetch(ctx)
if err != nil {
service.settings.Logger().Error("failed to fetch license for licensing server", "orgID", orgID, "error", err)
continue
}
}
service.serversMtx.Unlock()
return nil
}
func (service *Service) SyncOrgServer(ctx context.Context, orgID valuer.UUID) error {
service.serversMtx.Lock()
defer service.serversMtx.Unlock()
_, ok := service.servers[orgID]
if !ok {
server := licensingserver.NewServer(service.settings.Logger(), licensingserver.NewConfigFromLicensingConfig(service.config), orgID, service.zeus, service.store)
service.servers[orgID] = server
}
err := service.servers[orgID].Fetch(ctx)
if err != nil {
service.settings.Logger().Error("failed to fetch license for licensing server", "orgID", orgID, "error", err)
return err
}
return nil
}
func (service *Service) getServer(orgID valuer.UUID) (*licensingserver.Server, error) {
service.serversMtx.RLock()
defer service.serversMtx.RUnlock()
server, ok := service.servers[orgID]
if !ok {
return nil, errors.Newf(errors.TypeNotFound, licensing.ErrCodeLicensingServerNotFound, "server not found for %s", orgID.StringValue())
}
return server, nil
}

View File

@@ -28,11 +28,10 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
}
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
Reader: dp.reader,
Cache: dp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: dp.fluxInterval,
FeatureLookup: dp.ff,
Reader: dp.reader,
Cache: dp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: dp.fluxInterval,
})
return dp

View File

@@ -28,11 +28,10 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
}
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
Reader: hp.reader,
Cache: hp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: hp.fluxInterval,
FeatureLookup: hp.ff,
Reader: hp.reader,
Cache: hp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: hp.fluxInterval,
})
return hp

View File

@@ -38,12 +38,6 @@ func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericPr
}
}
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().ff = ff
}
}
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().reader = reader
@@ -56,7 +50,6 @@ type BaseSeasonalProvider struct {
fluxInterval time.Duration
cache cache.Cache
keyGenerator cache.KeyGenerator
ff interfaces.FeatureLookup
}
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
@@ -313,6 +306,9 @@ func (p *BaseSeasonalProvider) getScore(
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
) float64 {
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
if expectedValue < 0 {
expectedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
}
return (value - expectedValue) / p.getStdDev(weekSeries)
}

View File

@@ -27,11 +27,10 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
}
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
Reader: wp.reader,
Cache: wp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: wp.fluxInterval,
FeatureLookup: wp.ff,
Reader: wp.reader,
Cache: wp.cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FluxInterval: wp.fluxInterval,
})
return wp

View File

@@ -7,10 +7,11 @@ import (
"github.com/SigNoz/signoz/ee/query-service/dao"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/interfaces"
"github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/modules/preference"
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
@@ -21,12 +22,13 @@ import (
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
type APIHandlerOptions struct {
DataConnector interfaces.DataConnector
DataConnector baseint.Reader
SkipConfig *basemodel.SkipConfig
PreferSpanMetrics bool
AppDao dao.ModelDao
@@ -54,6 +56,7 @@ type APIHandler struct {
// NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(signoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
@@ -71,6 +74,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
UseTraceNewSchema: opts.UseTraceNewSchema,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
Signoz: signoz,
Preference: preference,
})
if err != nil {
@@ -157,7 +161,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
// PAT APIs
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)

View File

@@ -11,7 +11,7 @@ import (
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/model"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/query-service/auth"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/dao"
@@ -135,19 +135,12 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
zap.String("cloudProvider", cloudProvider),
)
newPAT := model.PAT{
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
Token: generatePATToken(),
UserID: integrationUser.ID,
Name: integrationPATName,
Role: baseconstants.ViewerGroup,
ExpiresAt: 0,
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
},
}
newPAT := eeTypes.NewGettablePAT(
integrationPATName,
baseconstants.ViewerGroup,
integrationUser.ID,
0,
)
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(

View File

@@ -2,31 +2,24 @@ package api
import (
"context"
"crypto/rand"
"encoding/base64"
"encoding/json"
"fmt"
"net/http"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/auth"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
func generatePATToken() string {
// Generate a 32-byte random token.
token := make([]byte, 32)
rand.Read(token)
// Encode the token in base64.
encodedToken := base64.StdEncoding.EncodeToString(token)
return encodedToken
}
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
@@ -43,31 +36,18 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
}, nil)
return
}
pat := model.PAT{
StorablePersonalAccessToken: types.StorablePersonalAccessToken{
Name: req.Name,
Role: req.Role,
ExpiresAt: req.ExpiresInDays,
},
}
pat := eeTypes.NewGettablePAT(
req.Name,
req.Role,
user.ID,
req.ExpiresInDays,
)
err = validatePATRequest(pat)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
// All the PATs are associated with the user creating the PAT.
pat.UserID = user.ID
pat.CreatedAt = time.Now()
pat.UpdatedAt = time.Now()
pat.LastUsed = 0
pat.Token = generatePATToken()
if pat.ExpiresAt != 0 {
// convert expiresAt to unix timestamp from days
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
}
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
var apierr basemodel.BaseApiError
if pat, apierr = ah.AppDao().CreatePAT(ctx, user.OrgID, pat); apierr != nil {
@@ -78,7 +58,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
ah.Respond(w, &pat)
}
func validatePATRequest(req model.PAT) error {
func validatePATRequest(req types.GettablePAT) error {
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
return fmt.Errorf("valid role is required")
}
@@ -94,7 +74,7 @@ func validatePATRequest(req model.PAT) error {
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := model.PAT{}
req := types.GettablePAT{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
@@ -116,7 +96,12 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
}
req.UpdatedByUserID = user.ID
id := mux.Vars(r)["id"]
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
req.UpdatedAt = time.Now()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
@@ -149,7 +134,12 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
id := mux.Vars(r)["id"]
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
user, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, &model.ApiError{
@@ -159,7 +149,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
return
}
zap.L().Info("Revoke PAT with id", zap.String("id", id))
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
RespondError(w, apierr, nil)
return

View File

@@ -88,28 +88,24 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](aH.opts.FeatureFlags),
)
case anomaly.SeasonalityDaily:
provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
)
case anomaly.SeasonalityHourly:
provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](aH.opts.FeatureFlags),
)
default:
provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
anomaly.WithFeatureLookup[*anomaly.DailyProvider](aH.opts.FeatureFlags),
)
}
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})

View File

@@ -1,33 +0,0 @@
package api
import (
"net/http"
"github.com/SigNoz/signoz/ee/query-service/app/db"
"github.com/SigNoz/signoz/ee/query-service/model"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
ah.APIHandler.SearchTraces(w, r)
return
}
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
return
}
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
if ah.HandleError(w, err, http.StatusBadRequest) {
return
}
ah.WriteJSON(w, r, result)
}

View File

@@ -5,38 +5,33 @@ import (
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/jmoiron/sqlx"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/prometheus"
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
)
type ClickhouseReader struct {
conn clickhouse.Conn
appdb *sqlx.DB
appdb sqlstore.SQLStore
*basechr.ClickHouseReader
}
func NewDataConnector(
localDB *sqlx.DB,
ch clickhouse.Conn,
promConfigPath string,
lm interfaces.FeatureLookup,
sqlDB sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus,
cluster string,
useLogsNewSchema bool,
useTraceNewSchema bool,
fluxIntervalForTraceDetail time.Duration,
cache cache.Cache,
) *ClickhouseReader {
chReader := basechr.NewReader(localDB, ch, promConfigPath, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
return &ClickhouseReader{
conn: ch,
appdb: localDB,
conn: telemetryStore.ClickhouseDB(),
appdb: sqlDB,
ClickHouseReader: chReader,
}
}
func (r *ClickhouseReader) Start(readerReady chan bool) {
r.ClickHouseReader.Start(readerReady)
}

View File

@@ -18,13 +18,14 @@ import (
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/dao"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/interfaces"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/web"
@@ -43,13 +44,11 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
"github.com/SigNoz/signoz/pkg/query-service/cache"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
pqle "github.com/SigNoz/signoz/pkg/query-service/pqlEngine"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
"github.com/SigNoz/signoz/pkg/query-service/utils"
@@ -116,10 +115,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
return nil, err
}
if err := preferences.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB()); err != nil {
return nil, err
}
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
return nil, err
}
@@ -137,27 +132,22 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
// set license manager as feature flag provider in dao
modelDao.SetFlagProvider(lm)
readerReady := make(chan bool)
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
if err != nil {
return nil, err
}
var reader interfaces.DataConnector
qb := db.NewDataConnector(
serverOptions.SigNoz.SQLStore.SQLxDB(),
serverOptions.SigNoz.TelemetryStore.ClickHouseDB(),
serverOptions.PromConfigPath,
lm,
reader := db.NewDataConnector(
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
serverOptions.Cluster,
serverOptions.UseLogsNewSchema,
serverOptions.UseTraceNewSchema,
fluxIntervalForTraceDetail,
serverOptions.SigNoz.Cache,
)
go qb.Start(readerReady)
reader = qb
skipConfig := &basemodel.SkipConfig{}
if serverOptions.SkipTopLvlOpsPath != "" {
@@ -176,19 +166,18 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
c = cache.NewCache(cacheOpts)
}
<-readerReady
rm, err := makeRulesManager(
serverOptions.PromConfigPath,
serverOptions.RuleRepoURL,
serverOptions.SigNoz.SQLStore.SQLxDB(),
reader,
c,
serverOptions.DisableRules,
lm,
serverOptions.UseLogsNewSchema,
serverOptions.UseTraceNewSchema,
serverOptions.SigNoz.Alertmanager,
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
)
if err != nil {
@@ -233,7 +222,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
// start the usagemanager
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickHouseDB(), serverOptions.Config.TelemetryStore.ClickHouse.DSN)
usageManager, err := usage.New(lm.GetRepo(), serverOptions.SigNoz.TelemetryStore, serverOptions.SigNoz.Zeus)
if err != nil {
return nil, err
}
@@ -304,7 +293,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
&opAmpModel.AllAgents, agentConfMgr,
)
errorList := qb.PreloadMetricsMetadata(context.Background())
errorList := reader.PreloadMetricsMetadata(context.Background())
for _, er := range errorList {
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
}
@@ -537,33 +526,27 @@ func (s *Server) Stop() error {
}
func makeRulesManager(
promConfigPath,
ruleRepoURL string,
db *sqlx.DB,
ch baseint.Reader,
cache cache.Cache,
disableRules bool,
fm baseint.FeatureLookup,
useLogsNewSchema bool,
useTraceNewSchema bool,
alertmanager alertmanager.Alertmanager,
sqlstore sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus,
) (*baserules.Manager, error) {
// create engine
pqle, err := pqle.FromConfigPath(promConfigPath)
if err != nil {
return nil, fmt.Errorf("failed to create pql engine : %v", err)
}
// create manager opts
managerOpts := &baserules.ManagerOptions{
PqlEngine: pqle,
TelemetryStore: telemetryStore,
Prometheus: prometheus,
RepoURL: ruleRepoURL,
DBConn: db,
Context: context.Background(),
Logger: zap.L(),
DisableRules: disableRules,
FeatureFlags: fm,
Reader: ch,
Cache: cache,
EvalDelay: baseconst.GetEvalDelay(),

View File

@@ -4,13 +4,13 @@ import (
"context"
"net/url"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/uptrace/bun"
)
@@ -36,11 +36,11 @@ type ModelDao interface {
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError)
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
}

View File

@@ -6,45 +6,53 @@ import (
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p model.PAT) (model.PAT, basemodel.BaseApiError) {
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
p.StorablePersonalAccessToken.OrgID = orgID
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
_, err := m.DB().NewInsert().
Model(&p.StorablePersonalAccessToken).
Returning("id").
Exec(ctx)
if err != nil {
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
createdByUser, _ := m.GetUser(ctx, p.UserID)
if createdByUser == nil {
p.CreatedByUser = model.User{
p.CreatedByUser = types.PatUser{
NotFound: true,
}
} else {
p.CreatedByUser = model.User{
Id: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
CreatedAt: createdByUser.CreatedAt.Unix(),
ProfilePictureURL: createdByUser.ProfilePictureURL,
NotFound: false,
p.CreatedByUser = types.PatUser{
User: ossTypes.User{
ID: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: createdByUser.CreatedAt,
UpdatedAt: createdByUser.UpdatedAt,
},
ProfilePictureURL: createdByUser.ProfilePictureURL,
},
NotFound: false,
}
}
return p, nil
}
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p model.PAT, id string) basemodel.BaseApiError {
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
_, err := m.DB().NewUpdate().
Model(&p.StorablePersonalAccessToken).
Column("role", "name", "updated_at", "updated_by_user_id").
Where("id = ?", id).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Where("revoked = false").
Exec(ctx)
@@ -55,7 +63,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p model.PAT, id
return nil
}
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]model.PAT, basemodel.BaseApiError) {
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
@@ -68,41 +76,51 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]model.PAT, bas
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
}
patsWithUsers := []model.PAT{}
patsWithUsers := []types.GettablePAT{}
for i := range pats {
patWithUser := model.PAT{
patWithUser := types.GettablePAT{
StorablePersonalAccessToken: pats[i],
}
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
if createdByUser == nil {
patWithUser.CreatedByUser = model.User{
patWithUser.CreatedByUser = types.PatUser{
NotFound: true,
}
} else {
patWithUser.CreatedByUser = model.User{
Id: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
CreatedAt: createdByUser.CreatedAt.Unix(),
ProfilePictureURL: createdByUser.ProfilePictureURL,
NotFound: false,
patWithUser.CreatedByUser = types.PatUser{
User: ossTypes.User{
ID: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: createdByUser.CreatedAt,
UpdatedAt: createdByUser.UpdatedAt,
},
ProfilePictureURL: createdByUser.ProfilePictureURL,
},
NotFound: false,
}
}
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
if updatedByUser == nil {
patWithUser.UpdatedByUser = model.User{
patWithUser.UpdatedByUser = types.PatUser{
NotFound: true,
}
} else {
patWithUser.UpdatedByUser = model.User{
Id: updatedByUser.ID,
Name: updatedByUser.Name,
Email: updatedByUser.Email,
CreatedAt: updatedByUser.CreatedAt.Unix(),
ProfilePictureURL: updatedByUser.ProfilePictureURL,
NotFound: false,
patWithUser.UpdatedByUser = types.PatUser{
User: ossTypes.User{
ID: updatedByUser.ID,
Name: updatedByUser.Name,
Email: updatedByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: updatedByUser.CreatedAt,
UpdatedAt: updatedByUser.UpdatedAt,
},
ProfilePictureURL: updatedByUser.ProfilePictureURL,
},
NotFound: false,
}
}
@@ -111,14 +129,14 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]model.PAT, bas
return patsWithUsers, nil
}
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id string, userID string) basemodel.BaseApiError {
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
updatedAt := time.Now().Unix()
_, err := m.DB().NewUpdate().
Model(&types.StorablePersonalAccessToken{}).
Set("revoked = ?", true).
Set("updated_by_user_id = ?", userID).
Set("updated_at = ?", updatedAt).
Where("id = ?", id).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Exec(ctx)
if err != nil {
@@ -128,7 +146,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id string, userI
return nil
}
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) {
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
@@ -146,19 +164,19 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemo
}
}
patWithUser := model.PAT{
patWithUser := types.GettablePAT{
StorablePersonalAccessToken: pats[0],
}
return &patWithUser, nil
}
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*model.PAT, basemodel.BaseApiError) {
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
Model(&pats).
Where("id = ?", id).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Where("revoked = false").
Scan(ctx); err != nil {
@@ -172,7 +190,7 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*mo
}
}
patWithUser := model.PAT{
patWithUser := types.GettablePAT{
StorablePersonalAccessToken: pats[0],
}
@@ -180,8 +198,8 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id string) (*mo
}
// deprecated
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*types.GettableUser, basemodel.BaseApiError) {
users := []types.GettableUser{}
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError) {
users := []ossTypes.GettableUser{}
if err := m.DB().NewSelect().
Model(&users).

View File

@@ -1,12 +0,0 @@
package interfaces
import (
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
)
// Connector defines methods for interaction
// with o11y data. for example - clickhouse
type DataConnector interface {
Start(readerReady chan bool)
baseint.Reader
}

View File

@@ -14,8 +14,8 @@ import (
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/zeus"
validate "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/model"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
@@ -29,6 +29,7 @@ var validationFrequency = 24 * 60 * time.Minute
type Manager struct {
repo *Repo
zeus zeus.Zeus
mutex sync.Mutex
validatorRunning bool
// end the license validation, this is important to gracefully
@@ -45,7 +46,7 @@ type Manager struct {
activeFeatures basemodel.FeatureSet
}
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, features ...basemodel.Feature) (*Manager, error) {
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, zeus zeus.Zeus, features ...basemodel.Feature) (*Manager, error) {
if LM != nil {
return LM, nil
}
@@ -53,6 +54,7 @@ func StartManager(db *sqlx.DB, store sqlstore.SQLStore, features ...basemodel.Fe
repo := NewLicenseRepo(db, store)
m := &Manager{
repo: &repo,
zeus: zeus,
}
if err := m.start(features...); err != nil {
return m, err
@@ -173,14 +175,12 @@ func (lm *Manager) ValidatorV3(ctx context.Context) {
}
func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError {
license, apiError := validate.ValidateLicenseV3(lm.activeLicenseV3.Key)
if apiError != nil {
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
return apiError
license, err := lm.zeus.GetLicense(ctx, lm.activeLicenseV3.Key)
if err != nil {
return model.BadRequest(errors.Wrap(err, "failed to get license"))
}
err := lm.repo.UpdateLicenseV3(ctx, license)
err = lm.repo.UpdateLicenseV3(ctx, license)
if err != nil {
return model.BadRequest(errors.Wrap(err, "failed to update the new license"))
}
@@ -247,10 +247,9 @@ func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (licenseRe
}
}()
license, apiError := validate.ValidateLicenseV3(licenseKey)
if apiError != nil {
zap.L().Error("failed to get the license", zap.Error(apiError.Err))
return nil, apiError
license, errv2 := lm.zeus.GetLicense(ctx, lm.activeLicenseV3.Key)
if errv2 != nil {
return nil, model.BadRequest(errors.Wrap(errv2, "failed to get license"))
}
// insert the new license to the sqlite db

View File

@@ -7,17 +7,17 @@ import (
"time"
"github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider"
"github.com/SigNoz/signoz/pkg/config/fileprovider"
"github.com/SigNoz/signoz/pkg/query-service/auth"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
prommodel "github.com/prometheus/common/model"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
@@ -30,10 +30,6 @@ func initZapLog() *zap.Logger {
return logger
}
func init() {
prommodel.NameValidationScheme = prommodel.UTF8Validation
}
func main() {
var promConfigPath, skipTopLvlOpsPath string
@@ -87,6 +83,7 @@ func main() {
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
Config: promConfigPath,
})
if err != nil {
zap.L().Fatal("Failed to create config", zap.Error(err))
@@ -94,16 +91,21 @@ func main() {
version.Info.PrettyPrint(config.Version)
sqlStoreFactories := signoz.NewSQLStoreProviderFactories()
if err := sqlStoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
}
signoz, err := signoz.New(
context.Background(),
config,
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
signoz.NewSQLStoreProviderFactories(),
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
)
if err != nil {
zap.L().Fatal("Failed to create signoz struct", zap.Error(err))
zap.L().Fatal("Failed to create signoz", zap.Error(err))
}
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")

View File

@@ -1,246 +0,0 @@
package model
import (
"encoding/json"
"fmt"
"reflect"
"time"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/pkg/errors"
)
type License struct {
Key string `json:"key" db:"key"`
ActivationId string `json:"activationId" db:"activationId"`
CreatedAt time.Time `db:"created_at"`
// PlanDetails contains the encrypted plan info
PlanDetails string `json:"planDetails" db:"planDetails"`
// stores parsed license details
LicensePlan
FeatureSet basemodel.FeatureSet
// populated in case license has any errors
ValidationMessage string `db:"validationMessage"`
// used only for sending details to front-end
IsCurrent bool `json:"isCurrent"`
}
func (l *License) MarshalJSON() ([]byte, error) {
return json.Marshal(&struct {
Key string `json:"key" db:"key"`
ActivationId string `json:"activationId" db:"activationId"`
ValidationMessage string `db:"validationMessage"`
IsCurrent bool `json:"isCurrent"`
PlanKey string `json:"planKey"`
ValidFrom time.Time `json:"ValidFrom"`
ValidUntil time.Time `json:"ValidUntil"`
Status string `json:"status"`
}{
Key: l.Key,
ActivationId: l.ActivationId,
IsCurrent: l.IsCurrent,
PlanKey: l.PlanKey,
ValidFrom: time.Unix(l.ValidFrom, 0),
ValidUntil: time.Unix(l.ValidUntil, 0),
Status: l.Status,
ValidationMessage: l.ValidationMessage,
})
}
type LicensePlan struct {
PlanKey string `json:"planKey"`
ValidFrom int64 `json:"validFrom"`
ValidUntil int64 `json:"validUntil"`
Status string `json:"status"`
}
type Licenses struct {
TrialStart int64 `json:"trialStart"`
TrialEnd int64 `json:"trialEnd"`
OnTrial bool `json:"onTrial"`
WorkSpaceBlock bool `json:"workSpaceBlock"`
TrialConvertedToSubscription bool `json:"trialConvertedToSubscription"`
GracePeriodEnd int64 `json:"gracePeriodEnd"`
Licenses []License `json:"licenses"`
}
type SubscriptionServerResp struct {
Status string `json:"status"`
Data Licenses `json:"data"`
}
type Plan struct {
Name string `json:"name"`
}
type LicenseDB struct {
ID string `json:"id"`
Key string `json:"key"`
Data string `json:"data"`
}
type LicenseV3 struct {
ID string
Key string
Data map[string]interface{}
PlanName string
Features basemodel.FeatureSet
Status string
IsCurrent bool
ValidFrom int64
ValidUntil int64
}
func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) {
var zeroValue T
if val, ok := data[key]; ok {
if value, ok := val.(T); ok {
return value, nil
}
return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue))
}
return zeroValue, fmt.Errorf("%s key is missing", key)
}
func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
var features basemodel.FeatureSet
// extract id from data
licenseID, err := extractKeyFromMapStringInterface[string](data, "id")
if err != nil {
return nil, err
}
delete(data, "id")
// extract key from data
licenseKey, err := extractKeyFromMapStringInterface[string](data, "key")
if err != nil {
return nil, err
}
delete(data, "key")
// extract status from data
status, err := extractKeyFromMapStringInterface[string](data, "status")
if err != nil {
return nil, err
}
planMap, err := extractKeyFromMapStringInterface[map[string]any](data, "plan")
if err != nil {
return nil, err
}
planName, err := extractKeyFromMapStringInterface[string](planMap, "name")
if err != nil {
return nil, err
}
// if license status is invalid then default it to basic
if status == LicenseStatusInvalid {
planName = PlanNameBasic
}
featuresFromZeus := basemodel.FeatureSet{}
if _features, ok := data["features"]; ok {
featuresData, err := json.Marshal(_features)
if err != nil {
return nil, errors.Wrap(err, "failed to marshal features data")
}
if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil {
return nil, errors.Wrap(err, "failed to unmarshal features data")
}
}
switch planName {
case PlanNameTeams:
features = append(features, ProPlan...)
case PlanNameEnterprise:
features = append(features, EnterprisePlan...)
case PlanNameBasic:
features = append(features, BasicPlan...)
default:
features = append(features, BasicPlan...)
}
if len(featuresFromZeus) > 0 {
for _, feature := range featuresFromZeus {
exists := false
for i, existingFeature := range features {
if existingFeature.Name == feature.Name {
features[i] = feature // Replace existing feature
exists = true
break
}
}
if !exists {
features = append(features, feature) // Append if it doesn't exist
}
}
}
data["features"] = features
_validFrom, err := extractKeyFromMapStringInterface[float64](data, "valid_from")
if err != nil {
_validFrom = 0
}
validFrom := int64(_validFrom)
_validUntil, err := extractKeyFromMapStringInterface[float64](data, "valid_until")
if err != nil {
_validUntil = 0
}
validUntil := int64(_validUntil)
return &LicenseV3{
ID: licenseID,
Key: licenseKey,
Data: data,
PlanName: planName,
Features: features,
ValidFrom: validFrom,
ValidUntil: validUntil,
Status: status,
}, nil
}
func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}) (*LicenseV3, error) {
licenseDataWithIdAndKey := data
licenseDataWithIdAndKey["id"] = id
licenseDataWithIdAndKey["key"] = key
return NewLicenseV3(licenseDataWithIdAndKey)
}
func ConvertLicenseV3ToLicenseV2(l *LicenseV3) *License {
planKeyFromPlanName, ok := MapOldPlanKeyToNewPlanName[l.PlanName]
if !ok {
planKeyFromPlanName = Basic
}
return &License{
Key: l.Key,
ActivationId: "",
PlanDetails: "",
FeatureSet: l.Features,
ValidationMessage: "",
IsCurrent: l.IsCurrent,
LicensePlan: LicensePlan{
PlanKey: planKeyFromPlanName,
ValidFrom: l.ValidFrom,
ValidUntil: l.ValidUntil,
Status: l.Status},
}
}
type CheckoutRequest struct {
SuccessURL string `json:"url"`
}
type PortalRequest struct {
SuccessURL string `json:"url"`
}

View File

@@ -1,170 +0,0 @@
package model
import (
"encoding/json"
"testing"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewLicenseV3(t *testing.T) {
testCases := []struct {
name string
data []byte
pass bool
expected *LicenseV3
error error
}{
{
name: "Error for missing license id",
data: []byte(`{}`),
pass: false,
error: errors.New("id key is missing"),
},
{
name: "Error for license id not being a valid string",
data: []byte(`{"id": 10}`),
pass: false,
error: errors.New("id key is not a valid string"),
},
{
name: "Error for missing license key",
data: []byte(`{"id":"does-not-matter"}`),
pass: false,
error: errors.New("key key is missing"),
},
{
name: "Error for invalid string license key",
data: []byte(`{"id":"does-not-matter","key":10}`),
pass: false,
error: errors.New("key key is not a valid string"),
},
{
name: "Error for missing license status",
data: []byte(`{"id":"does-not-matter", "key": "does-not-matter","category":"FREE"}`),
pass: false,
error: errors.New("status key is missing"),
},
{
name: "Error for invalid string license status",
data: []byte(`{"id":"does-not-matter","key": "does-not-matter", "category":"FREE", "status":10}`),
pass: false,
error: errors.New("status key is not a valid string"),
},
{
name: "Error for missing license plan",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`),
pass: false,
error: errors.New("plan key is missing"),
},
{
name: "Error for invalid json license plan",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`),
pass: false,
error: errors.New("plan key is not a valid map[string]interface {}"),
},
{
name: "Error for invalid license plan",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`),
pass: false,
error: errors.New("name key is missing"),
},
{
name: "Parse the entire license properly",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "TEAMS",
},
"category": "FREE",
"status": "ACTIVE",
"valid_from": float64(1730899309),
"valid_until": float64(-1),
},
PlanName: PlanNameTeams,
ValidFrom: 1730899309,
ValidUntil: -1,
Status: "ACTIVE",
IsCurrent: false,
Features: model.FeatureSet{},
},
},
{
name: "Fallback to basic plan if license status is invalid",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "TEAMS",
},
"category": "FREE",
"status": "INVALID",
"valid_from": float64(1730899309),
"valid_until": float64(-1),
},
PlanName: PlanNameBasic,
ValidFrom: 1730899309,
ValidUntil: -1,
Status: "INVALID",
IsCurrent: false,
Features: model.FeatureSet{},
},
},
{
name: "fallback states for validFrom and validUntil",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "TEAMS",
},
"valid_from": 1234.456,
"valid_until": 5678.567,
"category": "FREE",
"status": "ACTIVE",
},
PlanName: PlanNameTeams,
ValidFrom: 1234,
ValidUntil: 5678,
Status: "ACTIVE",
IsCurrent: false,
Features: model.FeatureSet{},
},
},
}
for _, tc := range testCases {
var licensePayload map[string]interface{}
err := json.Unmarshal(tc.data, &licensePayload)
require.NoError(t, err)
license, err := NewLicenseV3(licensePayload)
if license != nil {
license.Features = make(model.FeatureSet, 0)
delete(license.Data, "features")
}
if tc.pass {
require.NoError(t, err)
require.NotNil(t, license)
assert.Equal(t, tc.expected, license)
} else {
require.Error(t, err)
assert.EqualError(t, err, tc.error.Error())
require.Nil(t, license)
}
}
}

View File

@@ -1,25 +1,7 @@
package model
import "github.com/SigNoz/signoz/pkg/types"
type User struct {
Id string `json:"id" db:"id"`
Name string `json:"name" db:"name"`
Email string `json:"email" db:"email"`
CreatedAt int64 `json:"createdAt" db:"created_at"`
ProfilePictureURL string `json:"profilePictureURL" db:"profile_picture_url"`
NotFound bool `json:"notFound"`
}
type CreatePATRequestBody struct {
Name string `json:"name"`
Role string `json:"role"`
ExpiresInDays int64 `json:"expiresInDays"`
}
type PAT struct {
CreatedByUser User `json:"createdByUser"`
UpdatedByUser User `json:"updatedByUser"`
types.StorablePersonalAccessToken
}

View File

@@ -1,7 +1,6 @@
package model
import (
"github.com/SigNoz/signoz/pkg/query-service/constants"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
)
@@ -24,7 +23,6 @@ var (
LicenseStatusInvalid = "INVALID"
)
const DisableUpsell = "DISABLE_UPSELL"
const Onboarding = "ONBOARDING"
const ChatSupport = "CHAT_SUPPORT"
const Gateway = "GATEWAY"
@@ -38,90 +36,6 @@ var BasicPlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.OSS,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: DisableUpsell,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.SmartTraceDetail,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.CustomMetricsFunction,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelSlack,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelWebhook,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelPagerduty,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelOpsgenie,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelEmail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelMsTeams,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
@@ -151,134 +65,12 @@ var BasicPlan = basemodel.FeatureSet{
Route: "",
},
basemodel.Feature{
Name: basemodel.HostsInfraMonitoring,
Active: constants.EnableHostsInfraMonitoring(),
Usage: 0,
UsageLimit: -1,
Route: "",
},
}
var ProPlan = basemodel.FeatureSet{
basemodel.Feature{
Name: SSO,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.OSS,
Name: basemodel.TraceFunnels,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.SmartTraceDetail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.CustomMetricsFunction,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelSlack,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelWebhook,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelPagerduty,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelOpsgenie,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelEmail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelMsTeams,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Gateway,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: PremiumSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AnomalyDetection,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.HostsInfraMonitoring,
Active: constants.EnableHostsInfraMonitoring(),
Usage: 0,
UsageLimit: -1,
Route: "",
},
}
var EnterprisePlan = basemodel.FeatureSet{
@@ -289,83 +81,6 @@ var EnterprisePlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.OSS,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.SmartTraceDetail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.CustomMetricsFunction,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelSlack,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelWebhook,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelPagerduty,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelOpsgenie,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelEmail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelMsTeams,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
@@ -409,8 +124,8 @@ var EnterprisePlan = basemodel.FeatureSet{
Route: "",
},
basemodel.Feature{
Name: basemodel.HostsInfraMonitoring,
Active: constants.EnableHostsInfraMonitoring(),
Name: basemodel.TraceFunnels,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",

View File

@@ -53,7 +53,6 @@ type AnomalyRule struct {
func NewAnomalyRule(
id string,
p *baserules.PostableRule,
featureFlags interfaces.FeatureLookup,
reader interfaces.Reader,
cache cache.Cache,
opts ...baserules.RuleOption,
@@ -89,10 +88,9 @@ func NewAnomalyRule(
zap.L().Info("using seasonality", zap.String("seasonality", t.seasonality.String()))
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
FeatureLookup: featureFlags,
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
@@ -102,21 +100,18 @@ func NewAnomalyRule(
anomaly.WithCache[*anomaly.HourlyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](reader),
anomaly.WithFeatureLookup[*anomaly.HourlyProvider](featureFlags),
)
} else if t.seasonality == anomaly.SeasonalityDaily {
t.provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](reader),
anomaly.WithFeatureLookup[*anomaly.DailyProvider](featureFlags),
)
} else if t.seasonality == anomaly.SeasonalityWeekly {
t.provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
anomaly.WithFeatureLookup[*anomaly.WeeklyProvider](featureFlags),
)
}
return &t, nil

View File

@@ -23,7 +23,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
tr, err := baserules.NewThresholdRule(
ruleId,
opts.Rule,
opts.FF,
opts.Reader,
opts.UseLogsNewSchema,
opts.UseTraceNewSchema,
@@ -48,7 +47,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
opts.Rule,
opts.Logger,
opts.Reader,
opts.ManagerOpts.PqlEngine,
opts.ManagerOpts.Prometheus,
baserules.WithSQLStore(opts.SQLStore),
)
@@ -66,7 +65,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
ar, err := NewAnomalyRule(
ruleId,
opts.Rule,
opts.FF,
opts.Reader,
opts.Cache,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
@@ -123,7 +121,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
rule, err = baserules.NewThresholdRule(
alertname,
parsedRule,
opts.FF,
opts.Reader,
opts.UseLogsNewSchema,
opts.UseTraceNewSchema,
@@ -145,7 +142,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
parsedRule,
opts.Logger,
opts.Reader,
opts.ManagerOpts.PqlEngine,
opts.ManagerOpts.Prometheus,
baserules.WithSendAlways(),
baserules.WithSendUnmatched(),
baserules.WithSQLStore(opts.SQLStore),
@@ -160,7 +157,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
rule, err = NewAnomalyRule(
alertname,
parsedRule,
opts.FF,
opts.Reader,
opts.Cache,
baserules.WithSendAlways(),

View File

@@ -4,22 +4,21 @@ import (
"context"
"encoding/json"
"fmt"
"regexp"
"strings"
"sync/atomic"
"time"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/go-co-op/gocron"
"github.com/google/uuid"
"go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/dao"
licenseserver "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/zeus"
)
const (
@@ -34,35 +33,20 @@ var (
)
type Manager struct {
clickhouseConn clickhouse.Conn
licenseRepo *license.Repo
scheduler *gocron.Scheduler
modelDao dao.ModelDao
tenantID string
telemetryStore telemetrystore.TelemetryStore
licenseRepo *license.Repo
scheduler *gocron.Scheduler
zeus zeus.Zeus
}
func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, chUrl string) (*Manager, error) {
hostNameRegex := regexp.MustCompile(`tcp://(?P<hostname>.*):`)
hostNameRegexMatches := hostNameRegex.FindStringSubmatch(chUrl)
tenantID := ""
if len(hostNameRegexMatches) == 2 {
tenantID = hostNameRegexMatches[1]
tenantID = strings.TrimSuffix(tenantID, "-clickhouse")
}
func New(licenseRepo *license.Repo, telemetryStore telemetrystore.TelemetryStore, zeus zeus.Zeus) (*Manager, error) {
m := &Manager{
// repository: repo,
clickhouseConn: clickhouseConn,
licenseRepo: licenseRepo,
telemetryStore: telemetryStore,
zeus: zeus,
scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC
modelDao: modelDao,
tenantID: tenantID,
}
return m, nil
}
@@ -120,7 +104,7 @@ func (lm *Manager) UploadUsage() {
for _, db := range dbs {
dbusages := []model.UsageDB{}
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
err := lm.telemetryStore.ClickhouseDB().Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
return
@@ -136,17 +120,6 @@ func (lm *Manager) UploadUsage() {
return
}
zap.L().Info("uploading usage data")
orgName := ""
orgNames, orgError := lm.modelDao.GetOrgs(ctx)
if orgError != nil {
zap.L().Error("failed to get org data: %v", zap.Error(orgError))
}
if len(orgNames) == 1 {
orgName = orgNames[0].Name
}
usagesPayload := []model.Usage{}
for _, usage := range usages {
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
@@ -166,8 +139,8 @@ func (lm *Manager) UploadUsage() {
usageData.ExporterID = usage.ExporterID
usageData.Type = usage.Type
usageData.Tenant = "default"
usageData.OrgName = orgName
usageData.TenantId = lm.tenantID
usageData.OrgName = "default"
usageData.TenantId = "default"
usagesPayload = append(usagesPayload, usageData)
}
@@ -176,6 +149,7 @@ func (lm *Manager) UploadUsage() {
LicenseKey: key,
Usage: usagesPayload,
}
lm.UploadUsageWithExponentalBackOff(ctx, payload)
}

View File

@@ -2,11 +2,30 @@ package postgressqlstore
import (
"context"
"fmt"
"reflect"
"slices"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/uptrace/bun"
)
var (
Identity = "id"
Integer = "bigint"
Text = "text"
)
var (
Org = "org"
User = "user"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
)
type dialect struct {
}
@@ -174,7 +193,10 @@ func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table inte
return true, nil
}
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, references []string, cb func(context.Context) error) error {
if len(references) == 0 {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
}
exists, err := dialect.TableExists(ctx, bun, newModel)
if err != nil {
return err
@@ -183,12 +205,25 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
return nil
}
_, err = bun.
var fkReferences []string
for _, reference := range references {
if reference == Org && !slices.Contains(fkReferences, OrgReference) {
fkReferences = append(fkReferences, OrgReference)
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
fkReferences = append(fkReferences, UserReference)
}
}
createTable := bun.
NewCreateTable().
IfNotExists().
Model(newModel).
Exec(ctx)
Model(newModel)
for _, fk := range fkReferences {
createTable = createTable.ForeignKey(fk)
}
_, err = createTable.Exec(ctx)
if err != nil {
return err
}
@@ -209,3 +244,123 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
return nil
}
func (dialect *dialect) AddNotNullDefaultToColumn(ctx context.Context, bun bun.IDB, table string, column, columnType, defaultValue string) error {
query := fmt.Sprintf("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT %s, ALTER COLUMN %s SET NOT NULL", table, column, defaultValue, column)
if _, err := bun.ExecContext(ctx, query); err != nil {
return err
}
return nil
}
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
if reference == "" {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
}
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
columnType, err := dialect.GetColumnType(ctx, bun, oldTableName, Identity)
if err != nil {
return err
}
if columnType == Text {
return nil
}
fkReference := ""
if reference == Org {
fkReference = OrgReference
} else if reference == User {
fkReference = UserReference
}
_, err = bun.
NewCreateTable().
IfNotExists().
Model(newModel).
ForeignKey(fkReference).
Exec(ctx)
if err != nil {
return err
}
err = cb(ctx)
if err != nil {
return err
}
_, err = bun.
NewDropTable().
IfExists().
Model(oldModel).
Exec(ctx)
if err != nil {
return err
}
_, err = bun.
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
if err != nil {
return err
}
return nil
}
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
if reference == "" {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
}
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
identityExists, err := dialect.ColumnExists(ctx, bun, oldTableName, Identity)
if err != nil {
return err
}
if identityExists {
return nil
}
fkReference := ""
if reference == Org {
fkReference = OrgReference
} else if reference == User {
fkReference = UserReference
}
_, err = bun.
NewCreateTable().
IfNotExists().
Model(newModel).
ForeignKey(fkReference).
Exec(ctx)
if err != nil {
return err
}
err = cb(ctx)
if err != nil {
return err
}
_, err = bun.
NewDropTable().
IfExists().
Model(oldModel).
Exec(ctx)
if err != nil {
return err
}
_, err = bun.
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,33 @@
package featuretypes
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
var (
SingleSignOn = featuretypes.MustNewName("SingleSignOn")
)
func NewEnterpriseRegistry() (featuretypes.Registry, error) {
enterpriseRegistry, err := featuretypes.NewRegistry(
&featuretypes.Feature{
Name: SingleSignOn,
Kind: featuretypes.KindBoolean,
Description: "Enable single sign on.",
Stage: featuretypes.StageStable,
Default: true,
},
)
if err != nil {
return nil, err
}
return enterpriseRegistry.MergeOrOverride(featuretypes.MustNewCommunityRegistry()), nil
}
func MustNewEnterpriseRegistry() featuretypes.Registry {
enterpriseRegistry, err := NewEnterpriseRegistry()
if err != nil {
panic(err)
}
return enterpriseRegistry
}

View File

@@ -0,0 +1 @@
package licensetypes

View File

@@ -0,0 +1,245 @@
package licensetypes
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
type License struct {
ID string
Key string
Contents map[string]any
OrgFeatures []*featuretypes.StorableOrgFeature
}
// type License struct {
// Key string `json:"key" db:"key"`
// ActivationId string `json:"activationId" db:"activationId"`
// CreatedAt time.Time `db:"created_at"`
// // PlanDetails contains the encrypted plan info
// PlanDetails string `json:"planDetails" db:"planDetails"`
// // stores parsed license details
// LicensePlan
// FeatureSet basemodel.FeatureSet
// // populated in case license has any errors
// ValidationMessage string `db:"validationMessage"`
// // used only for sending details to front-end
// IsCurrent bool `json:"isCurrent"`
// }
// func (l *License) MarshalJSON() ([]byte, error) {
// return json.Marshal(&struct {
// Key string `json:"key" db:"key"`
// ActivationId string `json:"activationId" db:"activationId"`
// ValidationMessage string `db:"validationMessage"`
// IsCurrent bool `json:"isCurrent"`
// PlanKey string `json:"planKey"`
// ValidFrom time.Time `json:"ValidFrom"`
// ValidUntil time.Time `json:"ValidUntil"`
// Status string `json:"status"`
// }{
// Key: l.Key,
// ActivationId: l.ActivationId,
// IsCurrent: l.IsCurrent,
// PlanKey: l.PlanKey,
// ValidFrom: time.Unix(l.ValidFrom, 0),
// ValidUntil: time.Unix(l.ValidUntil, 0),
// Status: l.Status,
// ValidationMessage: l.ValidationMessage,
// })
// }
// type LicensePlan struct {
// PlanKey string `json:"planKey"`
// ValidFrom int64 `json:"validFrom"`
// ValidUntil int64 `json:"validUntil"`
// Status string `json:"status"`
// }
// type Licenses struct {
// TrialStart int64 `json:"trialStart"`
// TrialEnd int64 `json:"trialEnd"`
// OnTrial bool `json:"onTrial"`
// WorkSpaceBlock bool `json:"workSpaceBlock"`
// TrialConvertedToSubscription bool `json:"trialConvertedToSubscription"`
// GracePeriodEnd int64 `json:"gracePeriodEnd"`
// Licenses []License `json:"licenses"`
// }
// type SubscriptionServerResp struct {
// Status string `json:"status"`
// Data Licenses `json:"data"`
// }
// type Plan struct {
// Name string `json:"name"`
// }
// type LicenseDB struct {
// ID string `json:"id"`
// Key string `json:"key"`
// Data string `json:"data"`
// }
// type LicenseV3 struct {
// ID string
// Key string
// Data map[string]interface{}
// PlanName string
// Features basemodel.FeatureSet
// Status string
// IsCurrent bool
// ValidFrom int64
// ValidUntil int64
// }
// func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) {
// var zeroValue T
// if val, ok := data[key]; ok {
// if value, ok := val.(T); ok {
// return value, nil
// }
// return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue))
// }
// return zeroValue, fmt.Errorf("%s key is missing", key)
// }
// func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
// var features basemodel.FeatureSet
// // extract id from data
// licenseID, err := extractKeyFromMapStringInterface[string](data, "id")
// if err != nil {
// return nil, err
// }
// delete(data, "id")
// // extract key from data
// licenseKey, err := extractKeyFromMapStringInterface[string](data, "key")
// if err != nil {
// return nil, err
// }
// delete(data, "key")
// // extract status from data
// status, err := extractKeyFromMapStringInterface[string](data, "status")
// if err != nil {
// return nil, err
// }
// planMap, err := extractKeyFromMapStringInterface[map[string]any](data, "plan")
// if err != nil {
// return nil, err
// }
// planName, err := extractKeyFromMapStringInterface[string](planMap, "name")
// if err != nil {
// return nil, err
// }
// // if license status is invalid then default it to basic
// if status == LicenseStatusInvalid {
// planName = PlanNameBasic
// }
// featuresFromZeus := basemodel.FeatureSet{}
// if _features, ok := data["features"]; ok {
// featuresData, err := json.Marshal(_features)
// if err != nil {
// return nil, errors.Wrap(err, "failed to marshal features data")
// }
// if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil {
// return nil, errors.Wrap(err, "failed to unmarshal features data")
// }
// }
// switch planName {
// case PlanNameTeams:
// features = append(features, ProPlan...)
// case PlanNameEnterprise:
// features = append(features, EnterprisePlan...)
// case PlanNameBasic:
// features = append(features, BasicPlan...)
// default:
// features = append(features, BasicPlan...)
// }
// if len(featuresFromZeus) > 0 {
// for _, feature := range featuresFromZeus {
// exists := false
// for i, existingFeature := range features {
// if existingFeature.Name == feature.Name {
// features[i] = feature // Replace existing feature
// exists = true
// break
// }
// }
// if !exists {
// features = append(features, feature) // Append if it doesn't exist
// }
// }
// }
// data["features"] = features
// _validFrom, err := extractKeyFromMapStringInterface[float64](data, "valid_from")
// if err != nil {
// _validFrom = 0
// }
// validFrom := int64(_validFrom)
// _validUntil, err := extractKeyFromMapStringInterface[float64](data, "valid_until")
// if err != nil {
// _validUntil = 0
// }
// validUntil := int64(_validUntil)
// return &LicenseV3{
// ID: licenseID,
// Key: licenseKey,
// Data: data,
// PlanName: planName,
// Features: features,
// ValidFrom: validFrom,
// ValidUntil: validUntil,
// Status: status,
// }, nil
// }
// func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}) (*LicenseV3, error) {
// licenseDataWithIdAndKey := data
// licenseDataWithIdAndKey["id"] = id
// licenseDataWithIdAndKey["key"] = key
// return NewLicenseV3(licenseDataWithIdAndKey)
// }
// func ConvertLicenseV3ToLicenseV2(l *LicenseV3) *License {
// planKeyFromPlanName, ok := MapOldPlanKeyToNewPlanName[l.PlanName]
// if !ok {
// planKeyFromPlanName = Basic
// }
// return &License{
// Key: l.Key,
// ActivationId: "",
// PlanDetails: "",
// FeatureSet: l.Features,
// ValidationMessage: "",
// IsCurrent: l.IsCurrent,
// LicensePlan: LicensePlan{
// PlanKey: planKeyFromPlanName,
// ValidFrom: l.ValidFrom,
// ValidUntil: l.ValidUntil,
// Status: l.Status},
// }
// }
// type CheckoutRequest struct {
// SuccessURL string `json:"url"`
// }
// type PortalRequest struct {
// SuccessURL string `json:"url"`
// }

View File

@@ -0,0 +1,160 @@
package licensetypes
// func TestNewLicenseV3(t *testing.T) {
// testCases := []struct {
// name string
// data []byte
// pass bool
// expected *LicenseV3
// error error
// }{
// {
// name: "Error for missing license id",
// data: []byte(`{}`),
// pass: false,
// error: errors.New("id key is missing"),
// },
// {
// name: "Error for license id not being a valid string",
// data: []byte(`{"id": 10}`),
// pass: false,
// error: errors.New("id key is not a valid string"),
// },
// {
// name: "Error for missing license key",
// data: []byte(`{"id":"does-not-matter"}`),
// pass: false,
// error: errors.New("key key is missing"),
// },
// {
// name: "Error for invalid string license key",
// data: []byte(`{"id":"does-not-matter","key":10}`),
// pass: false,
// error: errors.New("key key is not a valid string"),
// },
// {
// name: "Error for missing license status",
// data: []byte(`{"id":"does-not-matter", "key": "does-not-matter","category":"FREE"}`),
// pass: false,
// error: errors.New("status key is missing"),
// },
// {
// name: "Error for invalid string license status",
// data: []byte(`{"id":"does-not-matter","key": "does-not-matter", "category":"FREE", "status":10}`),
// pass: false,
// error: errors.New("status key is not a valid string"),
// },
// {
// name: "Error for missing license plan",
// data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`),
// pass: false,
// error: errors.New("plan key is missing"),
// },
// {
// name: "Error for invalid json license plan",
// data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`),
// pass: false,
// error: errors.New("plan key is not a valid map[string]interface {}"),
// },
// {
// name: "Error for invalid license plan",
// data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`),
// pass: false,
// error: errors.New("name key is missing"),
// },
// {
// name: "Parse the entire license properly",
// data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
// pass: true,
// expected: &LicenseV3{
// ID: "does-not-matter",
// Key: "does-not-matter-key",
// Data: map[string]interface{}{
// "plan": map[string]interface{}{
// "name": "TEAMS",
// },
// "category": "FREE",
// "status": "ACTIVE",
// "valid_from": float64(1730899309),
// "valid_until": float64(-1),
// },
// PlanName: PlanNameTeams,
// ValidFrom: 1730899309,
// ValidUntil: -1,
// Status: "ACTIVE",
// IsCurrent: false,
// Features: model.FeatureSet{},
// },
// },
// {
// name: "Fallback to basic plan if license status is invalid",
// data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
// pass: true,
// expected: &LicenseV3{
// ID: "does-not-matter",
// Key: "does-not-matter-key",
// Data: map[string]interface{}{
// "plan": map[string]interface{}{
// "name": "TEAMS",
// },
// "category": "FREE",
// "status": "INVALID",
// "valid_from": float64(1730899309),
// "valid_until": float64(-1),
// },
// PlanName: PlanNameBasic,
// ValidFrom: 1730899309,
// ValidUntil: -1,
// Status: "INVALID",
// IsCurrent: false,
// Features: model.FeatureSet{},
// },
// },
// {
// name: "fallback states for validFrom and validUntil",
// data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
// pass: true,
// expected: &LicenseV3{
// ID: "does-not-matter",
// Key: "does-not-matter-key",
// Data: map[string]interface{}{
// "plan": map[string]interface{}{
// "name": "TEAMS",
// },
// "valid_from": 1234.456,
// "valid_until": 5678.567,
// "category": "FREE",
// "status": "ACTIVE",
// },
// PlanName: PlanNameTeams,
// ValidFrom: 1234,
// ValidUntil: 5678,
// Status: "ACTIVE",
// IsCurrent: false,
// Features: model.FeatureSet{},
// },
// },
// }
// for _, tc := range testCases {
// var licensePayload map[string]interface{}
// err := json.Unmarshal(tc.data, &licensePayload)
// require.NoError(t, err)
// license, err := NewLicenseV3(licensePayload)
// if license != nil {
// license.Features = make(model.FeatureSet, 0)
// delete(license.Data, "features")
// }
// if tc.pass {
// require.NoError(t, err)
// require.NotNil(t, license)
// assert.Equal(t, tc.expected, license)
// } else {
// require.Error(t, err)
// assert.EqualError(t, err, tc.error.Error())
// require.Nil(t, license)
// }
// }
// }

View File

@@ -0,0 +1,76 @@
package types
import (
"crypto/rand"
"encoding/base64"
"time"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
type GettablePAT struct {
CreatedByUser PatUser `json:"createdByUser"`
UpdatedByUser PatUser `json:"updatedByUser"`
StorablePersonalAccessToken
}
type PatUser struct {
types.User
NotFound bool `json:"notFound"`
}
func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
return GettablePAT{
StorablePersonalAccessToken: NewStorablePersonalAccessToken(name, role, userID, expiresAt),
}
}
type StorablePersonalAccessToken struct {
bun.BaseModel `bun:"table:personal_access_token"`
types.Identifiable
types.TimeAuditable
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
Token string `json:"token" bun:"token,type:text,notnull,unique"`
Name string `json:"name" bun:"name,type:text,notnull"`
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
}
func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64) StorablePersonalAccessToken {
now := time.Now()
if expiresAt != 0 {
// convert expiresAt to unix timestamp from days
expiresAt = now.Unix() + (expiresAt * 24 * 60 * 60)
}
// Generate a 32-byte random token.
token := make([]byte, 32)
rand.Read(token)
// Encode the token in base64.
encodedToken := base64.StdEncoding.EncodeToString(token)
return StorablePersonalAccessToken{
Token: encodedToken,
Name: name,
Role: role,
UserID: userID,
ExpiresAt: expiresAt,
LastUsed: 0,
Revoked: false,
UpdatedByUserID: "",
TimeAuditable: types.TimeAuditable{
CreatedAt: now,
UpdatedAt: now,
},
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
}
}

31
ee/zeus/config.go Normal file
View File

@@ -0,0 +1,31 @@
package zeus
import (
"fmt"
neturl "net/url"
"sync"
"github.com/SigNoz/signoz/pkg/zeus"
)
// This will be set via ldflags at build time.
var (
url string = "<unset>"
once sync.Once
GlobalConfig zeus.Config
)
// init initializes and validates the Zeus configuration
func init() {
once.Do(func() {
parsedURL, err := neturl.Parse(url)
if err != nil {
panic(fmt.Errorf("invalid zeus URL: %w", err))
}
GlobalConfig = zeus.Config{URL: parsedURL}
if err := GlobalConfig.Validate(); err != nil {
panic(fmt.Errorf("invalid zeus config: %w", err))
}
})
}

View File

@@ -0,0 +1,61 @@
package implzeus
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/client"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/types/metertypes"
"github.com/SigNoz/signoz/pkg/zeus"
)
type Provider struct {
settings factory.ScopedProviderSettings
config zeus.Config
client *client.Client
}
func NewProviderFactory() factory.ProviderFactory[zeus.Zeus, zeus.Config] {
return factory.NewProviderFactory(factory.MustNewName("impl"), func(ctx context.Context, providerSettings factory.ProviderSettings, config zeus.Config) (zeus.Zeus, error) {
return New(ctx, providerSettings, config)
})
}
func New(ctx context.Context, providerSettings factory.ProviderSettings, config zeus.Config) (zeus.Zeus, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/zeus/implzeus")
httpClient := client.New(
settings.Logger(),
providerSettings.TracerProvider,
providerSettings.MeterProvider,
client.WithRequestResponseLog(true),
client.WithRetryCount(3),
)
return &Provider{
settings: settings,
config: config,
client: httpClient,
}, nil
}
func (provider *Provider) GetLicense(ctx context.Context, key string) (*licensetypes.License, error) {
return nil, nil
}
func (provider *Provider) GetCheckoutURL(ctx context.Context, key string) (string, error) {
return "", nil
}
func (provider *Provider) GetPortalURL(ctx context.Context, key string) (string, error) {
return "", nil
}
func (provider *Provider) GetDeployment(ctx context.Context, key string) ([]byte, error) {
return nil, nil
}
func (provider *Provider) PutMeters(ctx context.Context, key string, meters metertypes.Meters) error {
return nil
}

View File

@@ -18,6 +18,13 @@
"field_send_resolved": "Send resolved alerts",
"field_channel_type": "Type",
"field_webhook_url": "Webhook URL",
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
"tooltip_email_to": "Enter email addresses separated by commas.",
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
"field_slack_recipient": "Recipient",
"field_slack_title": "Title",
"field_slack_description": "Description",

View File

@@ -18,6 +18,12 @@
"field_send_resolved": "Send resolved alerts",
"field_channel_type": "Type",
"field_webhook_url": "Webhook URL",
"tooltip_webhook_url": "The URL of the webhook to send alerts to. Learn more about webhook integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/webhook/). Integrates with [Incident.io](https://signoz.io/docs/alerts-management/notification-channel/incident-io/), [Rootly](https://signoz.io/docs/alerts-management/notification-channel/rootly/), [Zenduty](https://signoz.io/docs/alerts-management/notification-channel/zenduty/) and [more](https://signoz.io/docs/alerts-management/notification-channel/webhook/#my-incident-management-tool-is-not-listed-can-i-still-integrate).",
"tooltip_slack_url": "The URL of the slack [incoming webhook](https://docs.slack.dev/messaging/sending-messages-using-incoming-webhooks/) to send alerts to. Learn more about slack integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/slack/).",
"tooltip_pager_routing_key": "Learn how to obtain the routing key from your PagerDuty account [here](https://signoz.io/docs/alerts-management/notification-channel/pagerduty/#obtaining-integration-or-routing-key).",
"tooltip_opsgenie_api_key": "Learn how to obtain the API key from your OpsGenie account [here](https://support.atlassian.com/opsgenie/docs/integrate-opsgenie-with-prometheus/).",
"tooltip_email_to": "Enter email addresses separated by commas.",
"tooltip_ms_teams_url": "The URL of the Microsoft Teams [webhook](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498) to send alerts to. Learn more about Microsoft Teams integration in the docs [here](https://signoz.io/docs/alerts-management/notification-channel/ms-teams/).",
"field_slack_recipient": "Recipient",
"field_slack_title": "Title",
"field_slack_description": "Description",

View File

@@ -60,10 +60,14 @@
"INTEGRATIONS": "SigNoz | Integrations",
"ALERT_HISTORY": "SigNoz | Alert Rule History",
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
"MESSAGING_QUEUES": "SigNoz | Messaging Queues",
"MESSAGING_QUEUES_OVERVIEW": "SigNoz | Messaging Queues",
"MESSAGING_QUEUES_KAFKA": "SigNoz | Messaging Queues | Kafka",
"MESSAGING_QUEUES_KAFKA_DETAIL": "SigNoz | Messaging Queues | Kafka",
"MESSAGING_QUEUES_CELERY_TASK": "SigNoz | Messaging Queues | Celery",
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer"
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
"API_MONITORING": "SigNoz | API Monitoring"
}

View File

@@ -1,3 +1,4 @@
import * as Sentry from '@sentry/react';
import { ConfigProvider } from 'antd';
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
@@ -15,6 +16,7 @@ import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
import { NotificationProvider } from 'hooks/useNotifications';
import { ResourceProvider } from 'hooks/useResourceAttribute';
import history from 'lib/history';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import posthog from 'posthog-js';
import AlertRuleProvider from 'providers/Alert';
import { useAppContext } from 'providers/App/App';
@@ -26,6 +28,7 @@ import { Route, Router, Switch } from 'react-router-dom';
import { CompatRouter } from 'react-router-dom-v5-compat';
import { extractDomain } from 'utils/app';
import { Home } from './pageComponents';
import PrivateRoute from './Private';
import defaultRoutes, {
AppRoutes,
@@ -45,7 +48,6 @@ function App(): JSX.Element {
activeLicenseV3,
isFetchingActiveLicenseV3,
userFetchError,
licensesFetchError,
featureFlagsFetchError,
isLoggedIn: isLoggedInState,
featureFlags,
@@ -55,10 +57,7 @@ function App(): JSX.Element {
const { hostname, pathname } = window.location;
const {
isCloudUser: isCloudUserVal,
isEECloudUser: isEECloudUserVal,
} = useGetTenantLicense();
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
const enableAnalytics = useCallback(
(user: IUser): void => {
@@ -168,7 +167,7 @@ function App(): JSX.Element {
let updatedRoutes = defaultRoutes;
// if the user is a cloud user
if (isCloudUserVal || isEECloudUserVal) {
if (isCloudUser || isEnterpriseSelfHostedUser) {
// if the user is on basic plan then remove billing
if (isOnBasicPlan) {
updatedRoutes = updatedRoutes.filter(
@@ -190,10 +189,10 @@ function App(): JSX.Element {
isLoggedInState,
user,
licenses,
isCloudUserVal,
isCloudUser,
isEnterpriseSelfHostedUser,
isFetchingLicenses,
isFetchingUser,
isEECloudUserVal,
]);
useEffect(() => {
@@ -208,6 +207,7 @@ function App(): JSX.Element {
}
}, [pathname]);
// eslint-disable-next-line sonarjs/cognitive-complexity
useEffect(() => {
// feature flag shouldn't be loading and featureFlags or fetchError any one of this should be true indicating that req is complete
// licenses should also be present. there is no check for licenses for loading and error as that is mandatory if not present then routing
@@ -233,7 +233,12 @@ function App(): JSX.Element {
const showAddCreditCardModal =
!isPremiumSupportEnabled && !trialInfo?.trialConvertedToSubscription;
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
if (
isLoggedInState &&
isChatSupportEnabled &&
!showAddCreditCardModal &&
(isCloudUser || isEnterpriseSelfHostedUser)
) {
window.Intercom('boot', {
app_id: process.env.INTERCOM_APP_ID,
email: user?.email || '',
@@ -252,13 +257,53 @@ function App(): JSX.Element {
licenses,
activeLicenseV3,
trialInfo,
isCloudUser,
isEnterpriseSelfHostedUser,
]);
useEffect(() => {
if (!isFetchingUser && isCloudUserVal && user && user.email) {
if (!isFetchingUser && isCloudUser && user && user.email) {
enableAnalytics(user);
}
}, [user, isFetchingUser, isCloudUserVal, enableAnalytics]);
}, [user, isFetchingUser, isCloudUser, enableAnalytics]);
useEffect(() => {
if (isCloudUser || isEnterpriseSelfHostedUser) {
if (process.env.POSTHOG_KEY) {
posthog.init(process.env.POSTHOG_KEY, {
api_host: 'https://us.i.posthog.com',
person_profiles: 'identified_only', // or 'always' to create profiles for anonymous users as well
});
}
Sentry.init({
dsn: process.env.SENTRY_DSN,
tunnel: process.env.TUNNEL_URL,
environment: 'production',
integrations: [
Sentry.browserTracingIntegration(),
Sentry.replayIntegration({
maskAllText: false,
blockAllMedia: false,
}),
],
// Performance Monitoring
tracesSampleRate: 1.0, // Capture 100% of the transactions
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
tracePropagationTargets: [],
// Session Replay
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
});
} else {
posthog.reset();
Sentry.close();
if (window.cioanalytics && typeof window.cioanalytics.reset === 'function') {
window.cioanalytics.reset();
}
}
}, [isCloudUser, isEnterpriseSelfHostedUser]);
// if the user is in logged in state
if (isLoggedInState) {
@@ -270,60 +315,55 @@ function App(): JSX.Element {
// if the required calls fails then return a something went wrong error
// this needs to be on top of data missing error because if there is an error, data will never be loaded and it will
// move to indefinitive loading
if (
(userFetchError || licensesFetchError) &&
pathname !== ROUTES.SOMETHING_WENT_WRONG
) {
if (userFetchError && pathname !== ROUTES.SOMETHING_WENT_WRONG) {
history.replace(ROUTES.SOMETHING_WENT_WRONG);
}
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
if (
(!licenses || !user.email || !featureFlags) &&
!userFetchError &&
!licensesFetchError
) {
if ((!licenses || !user.email || !featureFlags) && !userFetchError) {
return <Spinner tip="Loading..." />;
}
}
return (
<ConfigProvider theme={themeConfig}>
<Router history={history}>
<CompatRouter>
<NotificationProvider>
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>
</PrivateRoute>
</NotificationProvider>
</CompatRouter>
</Router>
</ConfigProvider>
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<ConfigProvider theme={themeConfig}>
<Router history={history}>
<CompatRouter>
<NotificationProvider>
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>
</PrivateRoute>
</NotificationProvider>
</CompatRouter>
</Router>
</ConfigProvider>
</Sentry.ErrorBoundary>
);
}

View File

@@ -521,7 +521,7 @@ export default function CeleryOverviewTable({
locale={{
emptyText: isLoading ? null : <Typography.Text>No data</Typography.Text>,
}}
scroll={{ x: true }}
scroll={{ x: 'max-content' }}
showSorterTooltip
onDragColumn={handleDragColumn}
onRow={(record): { onClick: () => void; className: string } => ({

View File

@@ -18,6 +18,7 @@ function CopyClipboardHOC({
notifications.success({
message: notificationMessage,
key: notificationMessage,
});
}
}, [value, notifications, entityKey]);

View File

@@ -1,3 +1,5 @@
import './ResizeTable.styles.scss';
import { SyntheticEvent, useMemo } from 'react';
import { Resizable, ResizeCallbackData } from 'react-resizable';
@@ -10,8 +12,8 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
const handle = useMemo(
() => (
<SpanStyle
className="react-resizable-handle"
onClick={(e): void => e.stopPropagation()}
className="resize-handle"
/>
),
[],
@@ -19,7 +21,7 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
if (!width) {
// eslint-disable-next-line react/jsx-props-no-spreading
return <th {...restProps} />;
return <th {...restProps} className="resizable-header" />;
}
return (
@@ -29,9 +31,10 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
handle={handle}
onResize={onResize}
draggableOpts={enableUserSelectHack}
minConstraints={[150, 0]}
>
{/* eslint-disable-next-line react/jsx-props-no-spreading */}
<th {...restProps} />
<th {...restProps} className="resizable-header" />
</Resizable>
);
}

View File

@@ -0,0 +1,53 @@
.resizable-header {
user-select: none;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
position: relative;
.ant-table-column-title {
white-space: normal;
overflow: hidden;
text-overflow: ellipsis;
}
}
.resize-main-table {
.ant-table-body {
.ant-table-tbody {
.ant-table-row {
.ant-table-cell {
.ant-typography {
white-space: unset;
}
}
}
}
}
}
.logs-table,
.traces-table {
.resize-table {
.resize-handle {
position: absolute;
top: 0;
bottom: 0;
inset-inline-end: -5px;
width: 10px;
cursor: col-resize;
&::after {
content: '';
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
width: 1px;
height: 1.6em;
background-color: var(--bg-slate-200);
transition: background-color 0.2s;
}
}
}
}

View File

@@ -2,35 +2,63 @@
import { Table } from 'antd';
import { ColumnsType } from 'antd/lib/table';
import cx from 'classnames';
import { dragColumnParams } from 'hooks/useDragColumns/configs';
import { set } from 'lodash-es';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { debounce, set } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import {
SyntheticEvent,
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from 'react';
import ReactDragListView from 'react-drag-listview';
import { ResizeCallbackData } from 'react-resizable';
import { Widgets } from 'types/api/dashboard/getAll';
import ResizableHeader from './ResizableHeader';
import { DragSpanStyle } from './styles';
import { ResizeTableProps } from './types';
// eslint-disable-next-line sonarjs/cognitive-complexity
function ResizeTable({
columns,
onDragColumn,
pagination,
widgetId,
shouldPersistColumnWidths = false,
...restProps
}: ResizeTableProps): JSX.Element {
const [columnsData, setColumns] = useState<ColumnsType>([]);
const { setColumnWidths, selectedDashboard } = useDashboard();
const columnWidths = shouldPersistColumnWidths
? (selectedDashboard?.data?.widgets?.find(
(widget) => widget.id === widgetId,
) as Widgets)?.columnWidths
: undefined;
const updateAllColumnWidths = useRef(
debounce((widthsConfig: Record<string, number>) => {
if (!widgetId || !shouldPersistColumnWidths) return;
setColumnWidths?.((prev) => ({
...prev,
[widgetId]: widthsConfig,
}));
}, 1000),
).current;
const handleResize = useCallback(
(index: number) => (
_e: SyntheticEvent<Element>,
e: SyntheticEvent<Element>,
{ size }: ResizeCallbackData,
): void => {
e.preventDefault();
e.stopPropagation();
const newColumns = [...columnsData];
newColumns[index] = {
...newColumns[index],
@@ -65,6 +93,7 @@ function ResizeTable({
...restProps,
components: { header: { cell: ResizableHeader } },
columns: mergedColumns,
className: cx('resize-main-table', restProps.className),
};
set(
@@ -78,9 +107,39 @@ function ResizeTable({
useEffect(() => {
if (columns) {
setColumns(columns);
// Apply stored column widths from widget configuration
const columnsWithStoredWidths = columns.map((col) => {
const dataIndex = (col as RowData).dataIndex as string;
if (dataIndex && columnWidths && columnWidths[dataIndex]) {
return {
...col,
width: columnWidths[dataIndex], // Apply stored width
};
}
return col;
});
setColumns(columnsWithStoredWidths);
}
}, [columns]);
}, [columns, columnWidths]);
useEffect(() => {
if (!shouldPersistColumnWidths) return;
// Collect all column widths in a single object
const newColumnWidths: Record<string, number> = {};
mergedColumns.forEach((col) => {
if (col.width && (col as RowData).dataIndex) {
const dataIndex = (col as RowData).dataIndex as string;
newColumnWidths[dataIndex] = col.width as number;
}
});
// Only update if there are actual widths to set
if (Object.keys(newColumnWidths).length > 0) {
updateAllColumnWidths(newColumnWidths);
}
}, [mergedColumns, updateAllColumnWidths, shouldPersistColumnWidths]);
return onDragColumn ? (
<ReactDragListView.DragColumn {...dragColumnParams} onDragEnd={onDragColumn}>

View File

@@ -8,6 +8,8 @@ export const SpanStyle = styled.span`
width: 0.625rem;
height: 100%;
cursor: col-resize;
margin-left: 4px;
margin-right: 4px;
`;
export const DragSpanStyle = styled.span`

View File

@@ -9,6 +9,8 @@ import { TableDataSource } from './contants';
export interface ResizeTableProps extends TableProps<any> {
onDragColumn?: (fromIndex: number, toIndex: number) => void;
widgetId?: string;
shouldPersistColumnWidths?: boolean;
}
export interface DynamicColumnTableProps extends TableProps<any> {
tablesource: typeof TableDataSource[keyof typeof TableDataSource];

View File

@@ -1,28 +1,13 @@
// keep this consistent with backend constants.go
export enum FeatureKeys {
SSO = 'SSO',
ENTERPRISE_PLAN = 'ENTERPRISE_PLAN',
BASIC_PLAN = 'BASIC_PLAN',
ALERT_CHANNEL_SLACK = 'ALERT_CHANNEL_SLACK',
ALERT_CHANNEL_WEBHOOK = 'ALERT_CHANNEL_WEBHOOK',
ALERT_CHANNEL_PAGERDUTY = 'ALERT_CHANNEL_PAGERDUTY',
ALERT_CHANNEL_OPSGENIE = 'ALERT_CHANNEL_OPSGENIE',
ALERT_CHANNEL_MSTEAMS = 'ALERT_CHANNEL_MSTEAMS',
DurationSort = 'DurationSort',
TimestampSort = 'TimestampSort',
SMART_TRACE_DETAIL = 'SMART_TRACE_DETAIL',
CUSTOM_METRICS_FUNCTION = 'CUSTOM_METRICS_FUNCTION',
QUERY_BUILDER_PANELS = 'QUERY_BUILDER_PANELS',
QUERY_BUILDER_ALERTS = 'QUERY_BUILDER_ALERTS',
DISABLE_UPSELL = 'DISABLE_UPSELL',
USE_SPAN_METRICS = 'USE_SPAN_METRICS',
OSS = 'OSS',
ONBOARDING = 'ONBOARDING',
CHAT_SUPPORT = 'CHAT_SUPPORT',
GATEWAY = 'GATEWAY',
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
QUERY_BUILDER_SEARCH_V2 = 'QUERY_BUILDER_SEARCH_V2',
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
AWS_INTEGRATION = 'AWS_INTEGRATION',
ONBOARDING_V3 = 'ONBOARDING_V3',
THIRD_PARTY_API = 'THIRD_PARTY_API',
TRACE_FUNNELS = 'TRACE_FUNNELS',
}

View File

@@ -1,3 +1,4 @@
import ROUTES from 'constants/routes';
import AlertChannels from 'container/AllAlertChannels';
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
@@ -20,6 +21,13 @@ jest.mock('hooks/useNotifications', () => ({
})),
}));
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: (): { pathname: string } => ({
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
}),
}));
describe('Alert Channels Settings List page', () => {
beforeEach(() => {
render(<AlertChannels />);

View File

@@ -1,3 +1,4 @@
import ROUTES from 'constants/routes';
import AlertChannels from 'container/AllAlertChannels';
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
@@ -25,6 +26,13 @@ jest.mock('hooks/useComponentPermission', () => ({
default: jest.fn().mockImplementation(() => [false]),
}));
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: (): { pathname: string } => ({
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
}),
}));
describe('Alert Channels Settings List page (Normal User)', () => {
beforeEach(() => {
render(<AlertChannels />);

View File

@@ -31,6 +31,10 @@ jest.mock('hooks/useNotifications', () => ({
})),
}));
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
}));
describe('Create Alert Channel', () => {
afterEach(() => {
jest.clearAllMocks();

View File

@@ -18,6 +18,10 @@ import { render, screen } from 'tests/test-utils';
import { testLabelInputAndHelpValue } from './testUtils';
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
}));
describe('Create Alert Channel (Normal User)', () => {
afterEach(() => {
jest.clearAllMocks();

View File

@@ -20,6 +20,10 @@ jest.mock('hooks/useNotifications', () => ({
})),
}));
jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
MarkdownRenderer: jest.fn(() => <div>Mocked MarkdownRenderer</div>),
}));
describe('Should check if the edit alert channel is properly displayed ', () => {
beforeEach(() => {
render(<EditAlertChannels initialValue={editAlertChannelInitialValue} />);

View File

@@ -1,5 +1,6 @@
import { LoadingOutlined } from '@ant-design/icons';
import { Select, Spin, Table, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { ENTITY_VERSION_V4 } from 'constants/app';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import {
@@ -151,6 +152,7 @@ function AllEndPoints({
if (groupBy.length === 0) {
setSelectedEndPointName(record.endpointName); // this will open up the endpoint details tab
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
logEvent('API Monitoring: Endpoint name row clicked', {});
} else {
handleGroupByRowClick(record); // this will prepare the nested query payload
}

View File

@@ -392,6 +392,39 @@
gap: 20px;
padding-top: 20px;
.endpoint-meta-data {
display: flex;
gap: 8px;
.endpoint-meta-data-pill {
display: flex;
align-items: flex-start;
border-radius: 4px;
border: 1px solid var(--bg-slate-300);
width: fit-content;
.endpoint-meta-data-label {
display: flex;
padding: 6px 8px;
align-items: center;
gap: 4px;
border-right: 1px solid var(--bg-slate-300);
color: var(--text-vanilla-100);
background: var(--bg-slate-500);
height: calc(100% - 12px);
}
.endpoint-meta-data-value {
display: flex;
padding: 6px 8px;
justify-content: center;
align-items: center;
gap: 10px;
color: var(--text-vanilla-400);
background: var(--bg-slate-400);
height: calc(100% - 12px);
}
}
}
.endpoint-details-filters-container {
display: flex;
flex-direction: row;
@@ -405,6 +438,13 @@
}
}
.ant-select-item,
.ant-select-item-option-content {
flex: auto;
white-space: normal;
overflow-wrap: break-word;
}
.status-code-table-container {
border-radius: 3px;
border: 1px solid var(--bg-slate-500);
@@ -809,6 +849,13 @@
width: 100%;
}
}
.ant-select-item,
.ant-select-item-option-content {
flex: auto;
white-space: normal;
overflow-wrap: break-word;
}
}
.lightMode {
@@ -917,6 +964,20 @@
}
}
.endpoint-meta-data {
.endpoint-meta-data-pill {
.endpoint-meta-data-label {
color: var(--text-ink-300);
background: var(--bg-vanilla-100);
}
.endpoint-meta-data-value {
color: var(--text-ink-300);
background: var(--bg-vanilla-100);
}
}
}
.status-code-table-container {
.ant-table {
.ant-table-thead > tr > th {

View File

@@ -19,12 +19,14 @@ function DomainDetails({
selectedDomainIndex,
setSelectedDomainIndex,
domainListLength,
domainListFilters,
}: {
domainData: any;
handleClose: () => void;
selectedDomainIndex: number;
setSelectedDomainIndex: (index: number) => void;
domainListLength: number;
domainListFilters: IBuilderQuery['filters'];
}): JSX.Element {
const [selectedView, setSelectedView] = useState<VIEWS>(VIEWS.ALL_ENDPOINTS);
const [selectedEndPointName, setSelectedEndPointName] = useState<string>('');
@@ -132,6 +134,7 @@ function DomainDetails({
domainName={domainData.domainName}
endPointName={selectedEndPointName}
setSelectedEndPointName={setSelectedEndPointName}
domainListFilters={domainListFilters}
/>
)}
</>

View File

@@ -2,7 +2,10 @@ import { ENTITY_VERSION_V4 } from 'constants/app';
import { initialQueriesMap } from 'constants/queryBuilder';
import {
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
extractPortAndEndpoint,
getEndPointDetailsQueryPayload,
getLatencyOverTimeWidgetData,
getRateOverTimeWidgetData,
} from 'container/ApiMonitoring/utils';
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
@@ -27,10 +30,12 @@ function EndPointDetails({
domainName,
endPointName,
setSelectedEndPointName,
domainListFilters,
}: {
domainName: string;
endPointName: string;
setSelectedEndPointName: (value: string) => void;
domainListFilters: IBuilderQuery['filters'];
}): JSX.Element {
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
@@ -101,8 +106,6 @@ function EndPointDetails({
const [
endPointMetricsDataQuery,
endPointStatusCodeDataQuery,
endPointRateOverTimeDataQuery,
endPointLatencyOverTimeDataQuery,
endPointDropDownDataQuery,
endPointDependentServicesDataQuery,
endPointStatusCodeBarChartsDataQuery,
@@ -115,12 +118,29 @@ function EndPointDetails({
endPointDetailsDataQueries[3],
endPointDetailsDataQueries[4],
endPointDetailsDataQueries[5],
endPointDetailsDataQueries[6],
endPointDetailsDataQueries[7],
],
[endPointDetailsDataQueries],
);
const { endpoint, port } = useMemo(
() => extractPortAndEndpoint(endPointName),
[endPointName],
);
const [rateOverTimeWidget, latencyOverTimeWidget] = useMemo(
() => [
getRateOverTimeWidgetData(domainName, endPointName, {
items: [...domainListFilters.items, ...filters.items],
op: filters.op,
}),
getLatencyOverTimeWidgetData(domainName, endPointName, {
items: [...domainListFilters.items, ...filters.items],
op: filters.op,
}),
],
[domainName, endPointName, filters, domainListFilters],
);
return (
<div className="endpoint-details-container">
<div className="endpoint-details-filters-container">
@@ -129,6 +149,8 @@ function EndPointDetails({
selectedEndPointName={endPointName}
setSelectedEndPointName={setSelectedEndPointName}
endPointDropDownDataQuery={endPointDropDownDataQuery}
parentContainerDiv=".endpoint-details-filters-container"
dropdownStyle={{ width: 'calc(100% - 36px)' }}
/>
</div>
<div className="endpoint-details-filters-container-search">
@@ -141,6 +163,16 @@ function EndPointDetails({
/>
</div>
</div>
<div className="endpoint-meta-data">
<div className="endpoint-meta-data-pill">
<div className="endpoint-meta-data-label">Endpoint</div>
<div className="endpoint-meta-data-value">{endpoint || '-'}</div>
</div>
<div className="endpoint-meta-data-pill">
<div className="endpoint-meta-data-label">Port</div>
<div className="endpoint-meta-data-value">{port || '-'}</div>
</div>
</div>
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
{!isServicesFilterApplied && (
<DependentServices
@@ -152,18 +184,14 @@ function EndPointDetails({
endPointStatusCodeLatencyBarChartsDataQuery={
endPointStatusCodeLatencyBarChartsDataQuery
}
domainName={domainName}
endPointName={endPointName}
domainListFilters={domainListFilters}
filters={filters}
/>
<StatusCodeTable endPointStatusCodeDataQuery={endPointStatusCodeDataQuery} />
<MetricOverTimeGraph
metricOverTimeDataQuery={endPointRateOverTimeDataQuery}
widgetInfoIndex={0}
endPointName={endPointName}
/>
<MetricOverTimeGraph
metricOverTimeDataQuery={endPointLatencyOverTimeDataQuery}
widgetInfoIndex={1}
endPointName={endPointName}
/>
<MetricOverTimeGraph widget={rateOverTimeWidget} />
<MetricOverTimeGraph widget={latencyOverTimeWidget} />
</div>
);
}

View File

@@ -8,6 +8,7 @@ import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { GlobalReducer } from 'types/reducer/globalTime';
import EndPointDetailsZeroState from './components/EndPointDetailsZeroState';
@@ -17,10 +18,12 @@ function EndPointDetailsWrapper({
domainName,
endPointName,
setSelectedEndPointName,
domainListFilters,
}: {
domainName: string;
endPointName: string;
setSelectedEndPointName: (value: string) => void;
domainListFilters: IBuilderQuery['filters'];
}): JSX.Element {
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
@@ -69,6 +72,7 @@ function EndPointDetailsWrapper({
domainName={domainName}
endPointName={endPointName}
setSelectedEndPointName={setSelectedEndPointName}
domainListFilters={domainListFilters}
/>
);
}

View File

@@ -28,6 +28,8 @@ function EndPointDetailsZeroState({
<EndPointsDropDown
setSelectedEndPointName={setSelectedEndPointName}
endPointDropDownDataQuery={endPointDropDownDataQuery}
parentContainerDiv=".end-point-details-zero-state-wrapper"
dropdownStyle={{ width: '60%' }}
/>
</div>
</div>

View File

@@ -70,7 +70,7 @@ function EndPointMetrics({
<Skeleton.Button active size="small" />
) : (
<Tooltip title={metricsData?.rate}>
<span className="round-metric-tag">{metricsData?.rate}/sec</span>
<span className="round-metric-tag">{metricsData?.rate} ops/sec</span>
</Tooltip>
)}
</Typography.Text>

View File

@@ -8,16 +8,22 @@ interface EndPointsDropDownProps {
selectedEndPointName?: string;
setSelectedEndPointName: (value: string) => void;
endPointDropDownDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
parentContainerDiv?: string;
dropdownStyle?: React.CSSProperties;
}
const defaultProps = {
selectedEndPointName: '',
parentContainerDiv: '',
dropdownStyle: {},
};
function EndPointsDropDown({
selectedEndPointName,
setSelectedEndPointName,
endPointDropDownDataQuery,
parentContainerDiv,
dropdownStyle,
}: EndPointsDropDownProps): JSX.Element {
const { data, isLoading, isFetching } = endPointDropDownDataQuery;
@@ -39,6 +45,13 @@ function EndPointsDropDown({
style={{ width: '100%' }}
onChange={handleChange}
options={formattedData}
getPopupContainer={
parentContainerDiv
? (): HTMLElement =>
document.querySelector(parentContainerDiv) as HTMLElement
: (triggerNode): HTMLElement => triggerNode.parentNode as HTMLElement
}
dropdownStyle={dropdownStyle}
/>
);
}

View File

@@ -1,6 +1,7 @@
import { LoadingOutlined } from '@ant-design/icons';
import { Spin, Table } from 'antd';
import { ColumnType } from 'antd/lib/table';
import logEvent from 'api/common/logEvent';
import { ENTITY_VERSION_V4 } from 'constants/app';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import {
@@ -114,6 +115,7 @@ function ExpandedRow({
onClick: (): void => {
setSelectedEndPointName(record.endpointName);
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
logEvent('API Monitoring: Endpoint name row clicked', {});
},
className: 'expanded-clickable-row',
})}

View File

@@ -1,110 +1,18 @@
import { Card, Skeleton, Typography } from 'antd';
import cx from 'classnames';
import Uplot from 'components/Uplot';
import { PANEL_TYPES } from 'constants/queryBuilder';
import {
apiWidgetInfo,
extractPortAndEndpoint,
getFormattedChartData,
} from 'container/ApiMonitoring/utils';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useCallback, useMemo, useRef } from 'react';
import { UseQueryResult } from 'react-query';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
import { GlobalReducer } from 'types/reducer/globalTime';
import { Options } from 'uplot';
import ErrorState from './ErrorState';
function MetricOverTimeGraph({
metricOverTimeDataQuery,
widgetInfoIndex,
endPointName,
}: {
metricOverTimeDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
widgetInfoIndex: number;
endPointName: string;
}): JSX.Element {
const { data } = metricOverTimeDataQuery;
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const graphRef = useRef<HTMLDivElement>(null);
const dimensions = useResizeObserver(graphRef);
const { endpoint } = extractPortAndEndpoint(endPointName);
const formattedChartData = useMemo(
() => getFormattedChartData(data?.payload, [endpoint]),
[data?.payload, endpoint],
);
const chartData = useMemo(() => getUPlotChartData(formattedChartData), [
formattedChartData,
]);
const isDarkMode = useIsDarkMode();
const options = useMemo(
() =>
getUPlotChartOptions({
apiResponse: formattedChartData,
isDarkMode,
dimensions,
yAxisUnit: apiWidgetInfo[widgetInfoIndex].yAxisUnit,
softMax: null,
softMin: null,
minTimeScale: Math.floor(minTime / 1e9),
maxTimeScale: Math.floor(maxTime / 1e9),
panelType: PANEL_TYPES.TIME_SERIES,
}),
[
formattedChartData,
minTime,
maxTime,
widgetInfoIndex,
dimensions,
isDarkMode,
],
);
const renderCardContent = useCallback(
(query: UseQueryResult<SuccessResponse<any>, unknown>): JSX.Element => {
if (query.isLoading) {
return <Skeleton />;
}
if (query.error) {
return <ErrorState refetch={query.refetch} />;
}
return (
<div
className={cx('chart-container', {
'no-data-container':
!query.isLoading && !query?.data?.payload?.data?.result?.length,
})}
>
<Uplot options={options as Options} data={chartData} />
</div>
);
},
[options, chartData],
);
import { Card } from 'antd';
import GridCard from 'container/GridCardLayout/GridCard';
import { Widgets } from 'types/api/dashboard/getAll';
function MetricOverTimeGraph({ widget }: { widget: Widgets }): JSX.Element {
return (
<div>
<Card bordered className="endpoint-details-card">
<Typography.Text>{apiWidgetInfo[widgetInfoIndex].title}</Typography.Text>
<div className="graph-container" ref={graphRef}>
{renderCardContent(metricOverTimeDataQuery)}
<div className="graph-container">
<GridCard
widget={widget}
isQueryEnabled
onDragSelect={(): void => {}}
customOnDragSelect={(): void => {}}
/>
</div>
</Card>
</div>

View File

@@ -1,13 +1,22 @@
import { Color } from '@signozhq/design-tokens';
import { Button, Card, Skeleton, Typography } from 'antd';
import cx from 'classnames';
import { useGetGraphCustomSeries } from 'components/CeleryTask/useGetGraphCustomSeries';
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
import Uplot from 'components/Uplot';
import { PANEL_TYPES } from 'constants/queryBuilder';
import {
getCustomFiltersForBarChart,
getFormattedEndPointStatusCodeChartData,
getStatusCodeBarChartWidgetData,
statusCodeWidgetInfo,
} from 'container/ApiMonitoring/utils';
import { handleGraphClick } from 'container/GridCardLayout/GridCard/utils';
import { useGraphClickToShowButton } from 'container/GridCardLayout/useGraphClickToShowButton';
import useNavigateToExplorerPages from 'container/GridCardLayout/useNavigateToExplorerPages';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { useNotifications } from 'hooks/useNotifications';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useCallback, useMemo, useRef, useState } from 'react';
@@ -15,6 +24,8 @@ import { UseQueryResult } from 'react-query';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { GlobalReducer } from 'types/reducer/globalTime';
import { Options } from 'uplot';
@@ -23,6 +34,10 @@ import ErrorState from './ErrorState';
function StatusCodeBarCharts({
endPointStatusCodeBarChartsDataQuery,
endPointStatusCodeLatencyBarChartsDataQuery,
domainName,
endPointName,
domainListFilters,
filters,
}: {
endPointStatusCodeBarChartsDataQuery: UseQueryResult<
SuccessResponse<any>,
@@ -32,6 +47,10 @@ function StatusCodeBarCharts({
SuccessResponse<any>,
unknown
>;
domainName: string;
endPointName: string;
domainListFilters: IBuilderQuery['filters'];
filters: IBuilderQuery['filters'];
}): JSX.Element {
// 0 : Status Code Count
// 1 : Status Code Latency
@@ -85,6 +104,72 @@ function StatusCodeBarCharts({
const isDarkMode = useIsDarkMode();
const graphClick = useGraphClickToShowButton({
graphRef,
isButtonEnabled: true,
buttonClassName: 'view-onclick-show-button',
});
const navigateToExplorer = useNavigateToExplorer();
const navigateToExplorerPages = useNavigateToExplorerPages();
const { notifications } = useNotifications();
const { getCustomSeries } = useGetGraphCustomSeries({
isDarkMode,
drawStyle: 'bars',
colorMapping: {
'200-299': Color.BG_FOREST_500,
'300-399': Color.BG_AMBER_400,
'400-499': Color.BG_CHERRY_500,
'500-599': Color.BG_ROBIN_500,
Other: Color.BG_SIENNA_500,
},
});
const widget = useMemo<Widgets>(
() =>
getStatusCodeBarChartWidgetData(domainName, endPointName, {
items: [...domainListFilters.items, ...filters.items],
op: filters.op,
}),
[domainName, endPointName, domainListFilters, filters],
);
const graphClickHandler = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
metric?: { [key: string]: string },
queryData?: { queryName: string; inFocusOrNot: boolean },
): void => {
const customFilters = getCustomFiltersForBarChart(metric);
handleGraphClick({
xValue,
yValue,
mouseX,
mouseY,
metric,
queryData,
widget,
navigateToExplorerPages,
navigateToExplorer,
notifications,
graphClick,
customFilters,
});
},
[
widget,
navigateToExplorerPages,
navigateToExplorer,
notifications,
graphClick,
],
);
const options = useMemo(
() =>
getUPlotChartOptions({
@@ -100,6 +185,8 @@ function StatusCodeBarCharts({
minTimeScale: Math.floor(minTime / 1e9),
maxTimeScale: Math.floor(maxTime / 1e9),
panelType: PANEL_TYPES.BAR,
onClickHandler: graphClickHandler,
customSeries: getCustomSeries,
}),
[
minTime,
@@ -109,6 +196,8 @@ function StatusCodeBarCharts({
formattedEndPointStatusCodeBarChartsDataPayload,
formattedEndPointStatusCodeLatencyBarChartsDataPayload,
isDarkMode,
graphClickHandler,
getCustomSeries,
],
);

View File

@@ -3,6 +3,7 @@ import '../Explorer.styles.scss';
import { LoadingOutlined } from '@ant-design/icons';
import { Spin, Table, Typography } from 'antd';
import axios from 'api';
import logEvent from 'api/common/logEvent';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import cx from 'classnames';
@@ -130,6 +131,7 @@ function DomainList({
(item) => item.key === record.key,
);
setSelectedDomainIndex(dataIndex);
logEvent('API Monitoring: Domain name row clicked', {});
}
},
className: 'expanded-clickable-row',
@@ -147,6 +149,7 @@ function DomainList({
handleClose={(): void => {
setSelectedDomainIndex(-1);
}}
domainListFilters={query?.filters}
/>
)}
</section>

View File

@@ -3,13 +3,14 @@ import './Explorer.styles.scss';
import { FilterOutlined } from '@ant-design/icons';
import * as Sentry from '@sentry/react';
import { Switch, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import QuickFilters from 'components/QuickFilters/QuickFilters';
import { QuickFiltersSource } from 'components/QuickFilters/types';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { useMemo, useState } from 'react';
import { useEffect, useMemo, useState } from 'react';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
@@ -21,6 +22,10 @@ function Explorer(): JSX.Element {
const { currentQuery } = useQueryBuilder();
useEffect(() => {
logEvent('API Monitoring: Landing page visited', {});
}, []);
const { handleChangeQueryData } = useQueryOperations({
index: 0,
query: currentQuery.builder.queryData[0],
@@ -64,7 +69,12 @@ function Explorer(): JSX.Element {
style={{ marginLeft: 'auto' }}
checked={showIP}
onClick={(): void => {
setShowIP((showIP) => !showIP);
setShowIP((showIP): boolean => {
logEvent('API Monitoring: Show IP addresses clicked', {
showIP: !showIP,
});
return !showIP;
});
}}
/>
</div>

View File

@@ -8,16 +8,23 @@ import {
} from 'components/QuickFilters/types';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { GraphClickMetaData } from 'container/GridCardLayout/useNavigateToExplorerPages';
import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsApplication.factory';
import dayjs from 'dayjs';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { cloneDeep } from 'lodash-es';
import { ArrowUpDown, ChevronDown, ChevronRight } from 'lucide-react';
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import {
BaseAutocompleteData,
DataTypes,
} from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import {
IBuilderQuery,
TagFilterItem,
} from 'types/api/queryBuilder/queryBuilderData';
import { QueryData } from 'types/api/widgets/getQuery';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
@@ -128,12 +135,15 @@ export const columnsConfig: ColumnType<APIDomainsRowData>[] = [
sorter: false,
align: 'right',
className: `column`,
render: (lastUsed: number): string => getLastUsedRelativeTime(lastUsed),
render: (lastUsed: number | string): string =>
lastUsed === 'n/a' || lastUsed === '-'
? '-'
: getLastUsedRelativeTime(lastUsed as number),
},
{
title: (
<div>
Rate <span className="round-metric-tag">/s</span>
Rate <span className="round-metric-tag">ops/s</span>
</div>
),
dataIndex: 'rate',
@@ -155,21 +165,26 @@ export const columnsConfig: ColumnType<APIDomainsRowData>[] = [
sorter: false,
align: 'right',
className: `column`,
render: (errorRate: number): React.ReactNode => (
<Progress
status="active"
percent={Number((errorRate * 100).toFixed(1))}
strokeLinecap="butt"
size="small"
strokeColor={((): string => {
const errorRatePercent = Number((errorRate * 100).toFixed(1));
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
return Color.BG_FOREST_500;
})()}
className="progress-bar error-rate"
/>
),
render: (errorRate: number | string): React.ReactNode => {
if (errorRate === 'n/a' || errorRate === '-') {
return '-';
}
return (
<Progress
status="active"
percent={Number(((errorRate as number) * 100).toFixed(1))}
strokeLinecap="butt"
size="small"
strokeColor={((): string => {
const errorRatePercent = Number(((errorRate as number) * 100).toFixed(1));
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
return Color.BG_FOREST_500;
})()}
className="progress-bar error-rate"
/>
);
},
},
{
title: (
@@ -217,9 +232,9 @@ interface APIMonitoringResponseRow {
data: {
endpoints: number;
error_rate: number;
lastseen: number;
lastseen: number | string;
[domainNameKey]: string;
p99: number;
p99: number | string;
rps: number;
};
}
@@ -232,12 +247,12 @@ interface EndPointsResponseRow {
export interface APIDomainsRowData {
key: string;
domainName: React.ReactNode;
endpointCount: React.ReactNode;
rate: React.ReactNode;
errorRate: React.ReactNode;
latency: React.ReactNode;
lastUsed: React.ReactNode;
domainName: string;
endpointCount: number | string;
rate: number | string;
errorRate: number | string;
latency: number | string;
lastUsed: string;
}
// Rename this to a proper name
@@ -246,12 +261,20 @@ export const formatDataForTable = (
): APIDomainsRowData[] =>
data?.map((domain) => ({
key: v4(),
domainName: domain.data[domainNameKey] || '',
endpointCount: domain.data.endpoints,
rate: domain.data.rps,
errorRate: domain.data.error_rate,
latency: Math.round(domain.data.p99 / 1000000), // Convert from nanoseconds to milliseconds
lastUsed: new Date(Math.floor(domain.data.lastseen / 1000000)).toISOString(), // Convert from nanoseconds to milliseconds
domainName: domain?.data[domainNameKey] || '-',
endpointCount: domain?.data?.endpoints || '-',
rate: domain.data.rps || '-',
errorRate: domain.data.error_rate || '-',
latency:
domain.data.p99 === 'n/a'
? '-'
: Math.round(Number(domain.data.p99) / 1000000), // Convert from nanoseconds to milliseconds
lastUsed:
domain.data.lastseen === 'n/a'
? '-'
: new Date(
Math.floor(Number(domain.data.lastseen) / 1000000),
).toISOString(), // Convert from nanoseconds to milliseconds
}));
// Rename this to a proper name
@@ -468,7 +491,6 @@ export const extractPortAndEndpoint = (
}
};
// Add icons in the below column headers
export const getEndPointsColumnsConfig = (
isGroupedByAttribute: boolean,
expandedRowKeys: React.Key[],
@@ -576,7 +598,7 @@ export const formatEndPointsDataForTable = (
);
return {
key: v4(),
endpointName: (endpoint.data['http.url'] as string) || '',
endpointName: (endpoint.data['http.url'] as string) || '-',
port,
callCount: endpoint.data.A || '-',
latency:
@@ -593,7 +615,6 @@ export const formatEndPointsDataForTable = (
const groupedByAttributeData = groupBy.map((attribute) => attribute.key);
// TODO: Use tags to show the concatenated attribute values
return data?.map((endpoint) => {
const newEndpointName = groupedByAttributeData
.map((attribute) => endpoint.data[attribute])
@@ -639,7 +660,7 @@ export const createFiltersForSelectedRowData = (
type: null,
},
op: '=',
value: groupedByMeta[key],
value: groupedByMeta[key] || '',
id: key,
})),
);
@@ -649,12 +670,10 @@ export const createFiltersForSelectedRowData = (
// First query payload for endpoint metrics
// Second query payload for endpoint status code
// Third query payload for endpoint rate over time graph
// Fourth query payload for endpoint latency over time graph
// Fifth query payload for endpoint dropdown selection
// Sixth query payload for endpoint dependant services
// Seventh query payload for endpoint response status count bar chart
// Eighth query payload for endpoint response status code latency bar chart
// Third query payload for endpoint dropdown selection
// Fourth query payload for endpoint dependant services
// Fifth query payload for endpoint response status count bar chart
// Sixth query payload for endpoint response status code latency bar chart
export const getEndPointDetailsQueryPayload = (
domainName: string,
endPointName: string,
@@ -1101,205 +1120,6 @@ export const getEndPointDetailsQueryPayload = (
end,
step: 60,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
query: {
builder: {
queryData: [
{
aggregateAttribute: {
dataType: DataTypes.String,
id: '------false',
isColumn: false,
key: '',
type: '',
},
aggregateOperator: 'rate',
dataSource: DataSource.TRACES,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: '3c76fe0b',
key: {
dataType: DataTypes.String,
id: 'net.peer.name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'net.peer.name',
type: 'tag',
},
op: '=',
value: domainName,
},
{
id: '30710f04',
key: {
dataType: DataTypes.String,
id: 'http.url--string--tag--false',
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'tag',
},
op: '=',
value: endPointName,
},
...filters.items,
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'http.url--string--tag--false',
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'tag',
},
],
having: [],
legend: '',
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'rate',
},
],
queryFormulas: [],
},
clickhouse_sql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
],
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
promql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
start,
end,
step: 60,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
query: {
builder: {
queryData: [
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'duration_nano--float64----true',
isColumn: true,
isJSON: false,
key: 'duration_nano',
type: '',
},
aggregateOperator: 'p99',
dataSource: DataSource.TRACES,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: '63adb3ff',
key: {
dataType: DataTypes.String,
id: 'net.peer.name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'net.peer.name',
type: 'tag',
},
op: '=',
value: domainName,
},
{
id: '50142500',
key: {
dataType: DataTypes.String,
id: 'http.url--string--tag--false',
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'tag',
},
op: '=',
value: endPointName,
},
...filters.items,
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'http.url--string--tag--false',
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'tag',
},
],
having: [],
legend: '',
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'p99',
},
],
queryFormulas: [],
},
clickhouse_sql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
],
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
promql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
start,
end,
step: 60,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TABLE,
@@ -1801,7 +1621,7 @@ interface EndPointMetricsData {
interface EndPointStatusCodeData {
key: string;
statusCode: string;
count: number;
count: number | string;
p99Latency: number | string;
}
@@ -1824,8 +1644,8 @@ export const getFormattedEndPointStatusCodeData = (
): EndPointStatusCodeData[] =>
data?.map((row) => ({
key: v4(),
statusCode: row.data.response_status_code,
count: row.data.A,
statusCode: row.data.response_status_code || '-',
count: row.data.A || '-',
p99Latency:
row.data.B === 'n/a' ? '-' : Math.round(Number(row.data.B) / 1000000), // Convert from nanoseconds to milliseconds,
}));
@@ -1857,11 +1677,6 @@ export const endPointStatusCodeColumns: ColumnType<EndPointStatusCodeData>[] = [
},
];
export const apiWidgetInfo = [
{ title: 'Rate over time', yAxisUnit: 'ops/s' },
{ title: 'Latency over time', yAxisUnit: 'ns' },
];
export const statusCodeWidgetInfo = [
{ yAxisUnit: 'calls' },
{ yAxisUnit: 'ns' },
@@ -1885,8 +1700,8 @@ export const getFormattedEndPointDropDownData = (
): EndPointDropDownData[] =>
data?.map((row) => ({
key: v4(),
label: row.data['http.url'],
value: row.data['http.url'],
label: row.data['http.url'] || '-',
value: row.data['http.url'] || '-',
}));
interface DependentServicesResponseRow {
@@ -1903,6 +1718,7 @@ interface DependentServicesData {
percentage: number;
}
// Discuss once about type safety of this function
export const getFormattedDependentServicesData = (
data: DependentServicesResponseRow[],
): DependentServicesData[] => {
@@ -1983,7 +1799,7 @@ export const groupStatusCodes = (
// Track all timestamps
series.values.forEach((value) => {
allTimestamps.add(value[0]);
allTimestamps.add(Number(value[0]));
});
// Initialize or update the grouped series
@@ -2049,8 +1865,114 @@ export const groupStatusCodes = (
});
});
return Object.values(groupedSeries);
// Define the order of status code ranges
const statusCodeOrder = ['200-299', '300-399', '400-499', '500-599', 'Other'];
// Return the grouped series in the specified order
return statusCodeOrder
.filter((code) => groupedSeries[code]) // Only include codes that exist in the data
.map((code) => groupedSeries[code]);
};
export const getStatusCodeBarChartWidgetData = (
domainName: string,
endPointName: string,
filters: IBuilderQuery['filters'],
): Widgets => ({
query: {
builder: {
queryData: [
{
aggregateAttribute: {
dataType: DataTypes.String,
id: '------false',
isColumn: false,
key: '',
type: '',
},
aggregateOperator: 'count',
dataSource: DataSource.TRACES,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: 'c6724407',
key: {
dataType: DataTypes.String,
id: 'net.peer.name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'net.peer.name',
type: 'tag',
},
op: '=',
value: domainName,
},
{
id: '8b1be6f0',
key: {
dataType: DataTypes.String,
id: 'http.url--string--tag--false',
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'tag',
},
op: '=',
value: endPointName,
},
...filters.items,
],
op: 'AND',
},
functions: [],
groupBy: [],
having: [],
legend: '',
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'rate',
},
],
queryFormulas: [],
},
clickhouse_sql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
],
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
promql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},
description: '',
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
isStacked: false,
panelTypes: PANEL_TYPES.BAR,
title: '',
opacity: '',
nullZeroValues: '',
timePreferance: 'GLOBAL_TIME',
softMin: null,
softMax: null,
selectedLogFields: null,
selectedTracesFields: null,
});
interface EndPointStatusCodePayloadData {
data: {
result: QueryData[];
@@ -2085,3 +2007,277 @@ export const END_POINT_DETAILS_QUERY_KEYS_ARRAY = [
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA,
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA,
];
export const getRateOverTimeWidgetData = (
domainName: string,
endPointName: string,
filters: IBuilderQuery['filters'],
): Widgets => {
const { endpoint, port } = extractPortAndEndpoint(endPointName);
const legend = `${
port !== '-' && port !== 'n/a' ? `${port}:` : ''
}${endpoint}`;
return getWidgetQueryBuilder(
getWidgetQuery({
title: 'Rate Over Time',
description: 'Rate over time.',
queryData: [
{
aggregateAttribute: {
dataType: DataTypes.String,
id: '------false',
isColumn: false,
key: '',
type: '',
},
aggregateOperator: 'rate',
dataSource: DataSource.TRACES,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: '3c76fe0b',
key: {
dataType: DataTypes.String,
id: 'net.peer.name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'net.peer.name',
type: 'tag',
},
op: '=',
value: domainName,
},
{
id: '30710f04',
key: {
dataType: DataTypes.String,
id: 'http.url--string--tag--false',
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'tag',
},
op: '=',
value: endPointName,
},
...filters.items,
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'http.url--string--tag--false',
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'tag',
},
],
having: [],
legend,
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'rate',
},
],
yAxisUnit: 'ops/s',
}),
);
};
export const getLatencyOverTimeWidgetData = (
domainName: string,
endPointName: string,
filters: IBuilderQuery['filters'],
): Widgets => {
const { endpoint, port } = extractPortAndEndpoint(endPointName);
const legend = `${port}:${endpoint}`;
return getWidgetQueryBuilder(
getWidgetQuery({
title: 'Latency Over Time',
description: 'Latency over time.',
queryData: [
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'duration_nano--float64----true',
isColumn: true,
isJSON: false,
key: 'duration_nano',
type: '',
},
aggregateOperator: 'p99',
dataSource: DataSource.TRACES,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: '63adb3ff',
key: {
dataType: DataTypes.String,
id: 'net.peer.name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'net.peer.name',
type: 'tag',
},
op: '=',
value: domainName,
},
{
id: '50142500',
key: {
dataType: DataTypes.String,
id: 'http.url--string--tag--false',
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'tag',
},
op: '=',
value: endPointName,
},
...filters.items,
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'http.url--string--tag--false',
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'tag',
},
],
having: [],
legend,
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'p99',
},
],
yAxisUnit: 'ns',
}),
);
};
/**
* Helper function to get the start and end status codes from a status code range string
* @param value Status code range string (e.g. '200-299') or boolean
* @returns Tuple of [startStatusCode, endStatusCode] as strings
*/
const getStartAndEndStatusCode = (
value: string | boolean,
): [string, string] => {
if (!value) {
return ['', ''];
}
switch (value) {
case '100-199':
return ['100', '199'];
case '200-299':
return ['200', '299'];
case '300-399':
return ['300', '399'];
case '400-499':
return ['400', '499'];
case '500-599':
return ['500', '599'];
default:
return ['', ''];
}
};
/**
* Creates filter items for bar chart based on group by fields and request data
* Used specifically for filtering status code ranges in bar charts
* @param groupBy Array of group by fields to create filters for
* @param requestData Data from graph click containing values to filter on
* @returns Array of TagFilterItems with >= and < operators for status code ranges
*/
export const createGroupByFiltersForBarChart = (
groupBy: BaseAutocompleteData[],
requestData: GraphClickMetaData,
): TagFilterItem[] =>
groupBy
.map((gb) => {
const value = requestData[gb.key];
const [startStatusCode, endStatusCode] = getStartAndEndStatusCode(value);
return value
? [
{
id: v4(),
key: gb,
op: '>=',
value: startStatusCode,
},
{
id: v4(),
key: gb,
op: '<=',
value: endStatusCode,
},
]
: [];
})
.flat();
export const getCustomFiltersForBarChart = (
metric:
| {
[key: string]: string;
}
| undefined,
): TagFilterItem[] => {
if (!metric?.response_status_code) {
return [];
}
const [startStatusCode, endStatusCode] = getStartAndEndStatusCode(
metric.response_status_code,
);
return [
{
id: v4(),
key: {
dataType: DataTypes.String,
id: 'response_status_code--string--tag--false',
isColumn: false,
isJSON: false,
key: 'response_status_code',
type: 'tag',
},
op: '>=',
value: startStatusCode,
},
{
id: v4(),
key: {
dataType: DataTypes.String,
id: 'response_status_code--string--tag--false',
isColumn: false,
isJSON: false,
key: 'response_status_code',
type: 'tag',
},
op: '<=',
value: endStatusCode,
},
];
};

View File

@@ -61,7 +61,7 @@ export default function CustomDomainSettings(): JSX.Element {
isLoading: isLoadingDeploymentsData,
isFetching: isFetchingDeploymentsData,
refetch: refetchDeploymentsData,
} = useGetDeploymentsData();
} = useGetDeploymentsData(true);
const {
mutate: updateSubDomain,

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import React from 'react';
import { useTranslation } from 'react-i18next';
@@ -9,7 +10,20 @@ function MsTeams({ setSelectedConfig }: MsTeamsProps): JSX.Element {
return (
<>
<Form.Item name="webhook_url" label={t('field_webhook_url')}>
<Form.Item
name="webhook_url"
label={t('field_webhook_url')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_ms_teams_url')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
>
<Input
onChange={(event): void => {
setSelectedConfig((value) => ({

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import { useTranslation } from 'react-i18next';
import { OpsgenieChannel } from '../../CreateAlertChannels/config';
@@ -19,7 +20,21 @@ function OpsgenieForm({ setSelectedConfig }: OpsgenieFormProps): JSX.Element {
return (
<>
<Form.Item name="api_key" label={t('field_opsgenie_api_key')} required>
<Form.Item
name="api_key"
label={t('field_opsgenie_api_key')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_opsgenie_api_key')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
required
>
<Input
onChange={handleInputChange('api_key')}
data-testid="opsgenie-api-key-textbox"

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import { Dispatch, SetStateAction } from 'react';
import { useTranslation } from 'react-i18next';
@@ -10,7 +11,20 @@ function PagerForm({ setSelectedConfig }: PagerFormProps): JSX.Element {
const { t } = useTranslation('channels');
return (
<>
<Form.Item name="routing_key" label={t('field_pager_routing_key')} required>
<Form.Item
name="routing_key"
label={t('field_pager_routing_key')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_pager_routing_key')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
>
<Input
onChange={(event): void => {
setSelectedConfig((value) => ({

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import { Dispatch, SetStateAction } from 'react';
import { useTranslation } from 'react-i18next';
@@ -11,7 +12,20 @@ function Slack({ setSelectedConfig }: SlackProps): JSX.Element {
return (
<>
<Form.Item name="api_url" label={t('field_webhook_url')}>
<Form.Item
name="api_url"
label={t('field_webhook_url')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_slack_url')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
>
<Input
onChange={(event): void => {
setSelectedConfig((value) => ({

View File

@@ -1,4 +1,5 @@
import { Form, Input } from 'antd';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import { Dispatch, SetStateAction } from 'react';
import { useTranslation } from 'react-i18next';
@@ -9,7 +10,20 @@ function WebhookSettings({ setSelectedConfig }: WebhookProps): JSX.Element {
return (
<>
<Form.Item name="api_url" label={t('field_webhook_url')}>
<Form.Item
name="api_url"
label={t('field_webhook_url')}
tooltip={{
title: (
<MarkdownRenderer
markdownContent={t('tooltip_webhook_url')}
variables={{}}
/>
),
overlayInnerStyle: { maxWidth: 400 },
placement: 'right',
}}
>
<Input
onChange={(event): void => {
setSelectedConfig((value) => ({

View File

@@ -1,6 +1,5 @@
import { Form, FormInstance, Input, Select, Switch, Typography } from 'antd';
import { Store } from 'antd/lib/form/interface';
import { FeatureKeys } from 'constants/features';
import ROUTES from 'constants/routes';
import {
ChannelType,
@@ -11,11 +10,8 @@ import {
WebhookChannel,
} from 'container/CreateAlertChannels/config';
import history from 'lib/history';
import { useAppContext } from 'providers/App/App';
import { Dispatch, ReactElement, SetStateAction } from 'react';
import { useTranslation } from 'react-i18next';
import { FeatureFlagProps } from 'types/api/features/getFeaturesFlags';
import { isFeatureKeys } from 'utils/app';
import EmailSettings from './Settings/Email';
import MsTeamsSettings from './Settings/MsTeams';
@@ -39,17 +35,6 @@ function FormAlertChannels({
editing = false,
}: FormAlertChannelsProps): JSX.Element {
const { t } = useTranslation('channels');
const { featureFlags } = useAppContext();
const feature = `ALERT_CHANNEL_${type.toUpperCase()}`;
const featureKey = isFeatureKeys(feature)
? feature
: FeatureKeys.ALERT_CHANNEL_SLACK;
const hasFeature = featureFlags?.find(
(flag: FeatureFlagProps) => flag.name === featureKey,
);
const renderSettings = (): ReactElement | null => {
switch (type) {
@@ -146,7 +131,7 @@ function FormAlertChannels({
<Form.Item>
<Button
disabled={savingState || !hasFeature}
disabled={savingState}
loading={savingState}
type="primary"
onClick={(): void => onSaveHandler(type)}
@@ -154,7 +139,7 @@ function FormAlertChannels({
{t('button_save_channel')}
</Button>
<Button
disabled={testingState || !hasFeature}
disabled={testingState}
loading={testingState}
onClick={(): void => onTestHandler(type)}
>

View File

@@ -467,10 +467,6 @@ function FormAlertRules({
panelType,
]);
const isAlertAvailable =
!featureFlags?.find((flag) => flag.name === FeatureKeys.QUERY_BUILDER_ALERTS)
?.active || false;
const saveRule = useCallback(async () => {
if (!isFormValid()) {
return;
@@ -689,7 +685,6 @@ function FormAlertRules({
const isAlertNameMissing = !formInstance.getFieldValue('alert');
const isAlertAvailableToSave =
isAlertAvailable &&
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
alertType !== AlertTypes.METRICS_BASED_ALERT;

View File

@@ -49,6 +49,7 @@ function FullView({
isDependedDataLoaded = false,
onToggleModelHandler,
onClickHandler,
customOnDragSelect,
setCurrentGraphRef,
}: FullViewProps): JSX.Element {
const { safeNavigate } = useSafeNavigate();
@@ -252,7 +253,7 @@ function FullView({
onToggleModelHandler={onToggleModelHandler}
setGraphVisibility={setGraphsVisibilityStates}
graphVisibility={graphsVisibilityStates}
onDragSelect={onDragSelect}
onDragSelect={customOnDragSelect ?? onDragSelect}
tableProcessedDataRef={tableProcessedDataRef}
searchTerm={searchTerm}
onClickHandler={onClickHandler}

View File

@@ -50,6 +50,7 @@ export interface FullViewProps {
widget: Widgets;
fullViewOptions?: boolean;
onClickHandler?: OnClickPluginOpts['onClick'];
customOnDragSelect?: (start: number, end: number) => void;
name: string;
tableProcessedDataRef: MutableRefObject<RowData[]>;
version?: string;

View File

@@ -50,6 +50,7 @@ function WidgetGraphComponent({
setRequestData,
onClickHandler,
onDragSelect,
customOnDragSelect,
customTooltipElement,
openTracesButton,
onOpenTraceBtnClick,
@@ -327,6 +328,7 @@ function WidgetGraphComponent({
onToggleModelHandler={onToggleModelHandler}
tableProcessedDataRef={tableProcessedDataRef}
onClickHandler={onClickHandler ?? graphClickHandler}
customOnDragSelect={customOnDragSelect}
setCurrentGraphRef={setCurrentGraphRef}
/>
</Modal>

View File

@@ -36,6 +36,7 @@ function GridCardGraph({
version,
onClickHandler,
onDragSelect,
customOnDragSelect,
customTooltipElement,
dataAvailable,
getGraphData,
@@ -272,6 +273,7 @@ function GridCardGraph({
setRequestData={setRequestData}
onClickHandler={onClickHandler}
onDragSelect={onDragSelect}
customOnDragSelect={customOnDragSelect}
customTooltipElement={customTooltipElement}
openTracesButton={openTracesButton}
onOpenTraceBtnClick={onOpenTraceBtnClick}

View File

@@ -33,6 +33,7 @@ export interface WidgetGraphComponentProps {
setRequestData?: Dispatch<SetStateAction<GetQueryResultsProps>>;
onClickHandler?: OnClickPluginOpts['onClick'];
onDragSelect: (start: number, end: number) => void;
customOnDragSelect?: (start: number, end: number) => void;
customTooltipElement?: HTMLDivElement;
openTracesButton?: boolean;
onOpenTraceBtnClick?: (record: RowData) => void;
@@ -49,6 +50,7 @@ export interface GridCardGraphProps {
variables?: Dashboard['data']['variables'];
version?: string;
onDragSelect: (start: number, end: number) => void;
customOnDragSelect?: (start: number, end: number) => void;
customTooltipElement?: HTMLDivElement;
dataAvailable?: (isDataAvailable: boolean) => void;
getGraphData?: (graphData?: MetricRangePayloadProps['data']) => void;

View File

@@ -178,6 +178,7 @@ interface HandleGraphClickParams {
navigateToExplorer: (props: NavigateToExplorerProps) => void;
notifications: NotificationInstance;
graphClick: (props: GraphClickProps) => void;
customFilters?: TagFilterItem[];
}
export const handleGraphClick = async ({
@@ -192,6 +193,7 @@ export const handleGraphClick = async ({
navigateToExplorer,
notifications,
graphClick,
customFilters,
}: HandleGraphClickParams): Promise<void> => {
const { stepInterval } = widget?.query?.builder?.queryData?.[0] ?? {};
@@ -221,7 +223,7 @@ export const handleGraphClick = async ({
}: ${key}`,
onClick: (): void =>
navigateToExplorer({
filters: result[key].filters,
filters: [...result[key].filters, ...(customFilters || [])],
dataSource: result[key].dataSource as DataSource,
startTime: xValue,
endTime: xValue + (stepInterval ?? 60),

View File

@@ -44,7 +44,10 @@ import { EditMenuAction, ViewMenuAction } from './config';
import DashboardEmptyState from './DashboardEmptyState/DashboardEmptyState';
import GridCard from './GridCard';
import { Card, CardContainer, ReactGridLayout } from './styles';
import { removeUndefinedValuesFromLayout } from './utils';
import {
hasColumnWidthsChanged,
removeUndefinedValuesFromLayout,
} from './utils';
import { MenuItemKeys } from './WidgetHeader/contants';
import { WidgetRowHeader } from './WidgetRow';
@@ -68,6 +71,7 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
setDashboardQueryRangeCalled,
setSelectedRowWidgetId,
isDashboardFetching,
columnWidths,
} = useDashboard();
const { data } = selectedDashboard || {};
const { pathname } = useLocation();
@@ -162,6 +166,7 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
logEventCalledRef.current = true;
}
}, [data]);
const onSaveHandler = (): void => {
if (!selectedDashboard) return;
@@ -171,6 +176,15 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
...selectedDashboard.data,
panelMap: { ...currentPanelMap },
layout: dashboardLayout.filter((e) => e.i !== PANEL_TYPES.EMPTY_WIDGET),
widgets: selectedDashboard?.data?.widgets?.map((widget) => {
if (columnWidths?.[widget.id]) {
return {
...widget,
columnWidths: columnWidths[widget.id],
};
}
return widget;
}),
},
uuid: selectedDashboard.uuid,
};
@@ -227,20 +241,31 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
useEffect(() => {
if (
isDashboardLocked ||
!saveLayoutPermission ||
updateDashboardMutation.isLoading ||
isDashboardFetching
) {
return;
}
const shouldSaveLayout =
dashboardLayout &&
Array.isArray(dashboardLayout) &&
dashboardLayout.length > 0 &&
!isEqual(layouts, dashboardLayout) &&
!isDashboardLocked &&
saveLayoutPermission &&
!updateDashboardMutation.isLoading &&
!isDashboardFetching
) {
!isEqual(layouts, dashboardLayout);
const shouldSaveColumnWidths =
dashboardLayout &&
Array.isArray(dashboardLayout) &&
dashboardLayout.length > 0 &&
hasColumnWidthsChanged(columnWidths, selectedDashboard);
if (shouldSaveLayout || shouldSaveColumnWidths) {
onSaveHandler();
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dashboardLayout]);
}, [dashboardLayout, columnWidths]);
const onSettingsModalSubmit = (): void => {
const newTitle = form.getFieldValue('title');

View File

@@ -12,7 +12,7 @@ import { v4 } from 'uuid';
import { extractQueryNamesFromExpression } from './utils';
type GraphClickMetaData = {
export type GraphClickMetaData = {
[key: string]: string | boolean;
queryName: string;
inFocusOrNot: boolean;

View File

@@ -1,5 +1,7 @@
import { FORMULA_REGEXP } from 'constants/regExp';
import { isEmpty, isEqual } from 'lodash-es';
import { Layout } from 'react-grid-layout';
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
export const removeUndefinedValuesFromLayout = (layout: Layout[]): Layout[] =>
layout.map((obj) =>
@@ -25,3 +27,27 @@ export function extractQueryNamesFromExpression(expression: string): string[] {
// Extract matches and deduplicate
return [...new Set(expression.match(queryNameRegex) || [])];
}
export const hasColumnWidthsChanged = (
columnWidths: Record<string, Record<string, number>>,
selectedDashboard?: Dashboard,
): boolean => {
// If no column widths stored, no changes
if (isEmpty(columnWidths) || !selectedDashboard) return false;
// Check each widget's column widths
return Object.keys(columnWidths).some((widgetId) => {
const dashboardWidget = selectedDashboard?.data?.widgets?.find(
(widget) => widget.id === widgetId,
) as Widgets;
const newWidths = columnWidths[widgetId];
const existingWidths = dashboardWidget?.columnWidths;
// If both are empty/undefined, no change
if (isEmpty(newWidths) || isEmpty(existingWidths)) return false;
// Compare stored column widths with dashboard widget's column widths
return !isEqual(newWidths, existingWidths);
});
};

View File

@@ -43,6 +43,7 @@ function GridTableComponent({
sticky,
openTracesButton,
onOpenTraceBtnClick,
widgetId,
...props
}: GridTableComponentProps): JSX.Element {
const { t } = useTranslation(['valueGraph']);
@@ -229,6 +230,7 @@ function GridTableComponent({
columns={openTracesButton ? columnDataWithOpenTracesButton : newColumnData}
dataSource={dataSource}
sticky={sticky}
widgetId={widgetId}
onRow={
openTracesButton
? (record): React.HTMLAttributes<HTMLElement> => ({

View File

@@ -17,6 +17,7 @@ export type GridTableComponentProps = {
searchTerm?: string;
openTracesButton?: boolean;
onOpenTraceBtnClick?: (record: RowData) => void;
widgetId?: string;
} & Pick<LogsExplorerTableProps, 'data'> &
Omit<TableProps<RowData>, 'columns' | 'dataSource'>;

View File

@@ -23,10 +23,13 @@ function DataSourceInfo({
const notSendingData = !dataSentToSigNoz;
const isEnabled =
activeLicenseV3 && activeLicenseV3.platform === LicensePlatform.CLOUD;
const {
data: deploymentsData,
isError: isErrorDeploymentsData,
} = useGetDeploymentsData();
} = useGetDeploymentsData(isEnabled || false);
const [region, setRegion] = useState<string>('');
const [url, setUrl] = useState<string>('');

View File

@@ -293,7 +293,7 @@ function MultiIngestionSettings(): JSX.Element {
isLoading: isLoadingDeploymentsData,
isFetching: isFetchingDeploymentsData,
isError: isErrorDeploymentsData,
} = useGetDeploymentsData();
} = useGetDeploymentsData(true);
const {
mutate: createIngestionKey,

View File

@@ -10,7 +10,6 @@ import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQue
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import useDebouncedFn from 'hooks/useDebouncedFunction';
import { useEventSourceEvent } from 'hooks/useEventSourceEvent';
import { useNotifications } from 'hooks/useNotifications';
import { prepareQueryRangePayload } from 'lib/dashboard/prepareQueryRangePayload';
import { useEventSource } from 'providers/EventSource';
import { useCallback, useEffect, useRef, useState } from 'react';
@@ -38,8 +37,6 @@ function LiveLogsContainer(): JSX.Element {
const batchedEventsRef = useRef<ILog[]>([]);
const { notifications } = useNotifications();
const { selectedTime: globalSelectedTime } = useSelector<
AppState,
GlobalReducer
@@ -50,6 +47,8 @@ function LiveLogsContainer(): JSX.Element {
handleCloseConnection,
initialLoading,
isConnectionLoading,
isConnectionError,
reconnectDueToError,
} = useEventSource();
const compositeQuery = useGetCompositeQueryParam();
@@ -86,8 +85,8 @@ function LiveLogsContainer(): JSX.Element {
);
const handleError = useCallback(() => {
notifications.error({ message: 'Sorry, something went wrong' });
}, [notifications]);
console.error('Sorry, something went wrong');
}, []);
useEventSourceEvent('message', handleGetLiveLogs);
useEventSourceEvent('error', handleError);
@@ -153,6 +152,23 @@ function LiveLogsContainer(): JSX.Element {
handleStartNewConnection,
]);
useEffect((): (() => void) | undefined => {
if (isConnectionError && reconnectDueToError && compositeQuery) {
// Small delay to prevent immediate reconnection attempts
const reconnectTimer = setTimeout(() => {
handleStartNewConnection(compositeQuery);
}, 1000);
return (): void => clearTimeout(reconnectTimer);
}
return undefined;
}, [
isConnectionError,
reconnectDueToError,
compositeQuery,
handleStartNewConnection,
]);
useEffect(() => {
const prefetchedList = queryLocationState?.listQueryPayload[0]?.list;

Some files were not shown because too many files have changed in this diff Show More