Compare commits

..

117 Commits

Author SHA1 Message Date
nityanandagohain
99edf96910 fix: use new error in pipelines handler 2025-07-11 17:38:27 +05:30
Nityananda Gohain
552d44d208 chore: send email on role update (#8489)
* chore: send email on role update

* fix: minor changes

* fix: update template

* fix: minor changes

* fix: return updated user
2025-07-10 15:17:04 +00:00
SagarRajput-7
497315579f chore: added got at 11.8.5 patch to fix image-webpack-loader vulnerability (#8500) 2025-07-10 20:22:39 +05:30
SagarRajput-7
bfaac15ccb chore: replace image-webpack-loader (deprecated) with image-minimizer-webpack-plugin (#8498)
* chore: replace image-webpack-loader (deprecated) with image-minimizer-webpack-plugin

* chore: used sharp

* chore: remove got resolution
2025-07-10 19:17:02 +05:30
SagarRajput-7
5e18be6a23 chore: added got at 11.8.5 patch to fix image-webpack-loader vulnerability (#8493) 2025-07-10 16:07:10 +05:30
Yunus M
1793706f87 feat: show ingestion keys to self hosted users (#8490) 2025-07-10 14:51:53 +05:30
aniketio-ctrl
da2a3c738a fix(aws-elastic-cache): corrected variable query for elastic cache (#8487)
* fix(aws-elastic-cache): corrected variable query for elastic cache overview.json

* fix(aws-elastic-cache): corrected variable query for elastic cache overview.json

---------

Co-authored-by: Piyush Singariya <piyushsingariya@gmail.com>
2025-07-09 10:21:15 +00:00
primus-bot[bot]
d17dab9a1d chore(release): bump to v0.89.0 (#8482) 2025-07-09 12:06:47 +05:30
Srikanth Chekuri
88b75d4e72 fix(apdex): use right metric name for metadata (#8463) 2025-07-09 09:08:40 +05:30
Sahil Khan
6327ab5ec6 fix: allowed user to select text in json body field in log details (#8450) 2025-07-08 21:28:05 +05:30
Sahil Khan
5b09490ad7 fix: trace details v2 ui bugs (#8448) 2025-07-08 13:51:40 +00:00
Nageshbansal
b50127b567 feat(statsreporter): add railway platform detection (#8467) 2025-07-08 13:01:21 +00:00
Vishal Sharma
ba2ed3ad22 chore: only log telemetry query for explorer, rule and dashboard pages (#8464)
* chore: only log telemetry query for explorer, rule and dashboard pages

* chore: add dashboard and rule properties for no telemetry result
2025-07-08 11:32:46 +00:00
0xflotus
eb3dfbf63b docs: fixed small typo error (#8458)
Co-authored-by: Vibhu Pandey <vibhupandey28@gmail.com>
2025-07-08 16:46:43 +05:30
Nageshbansal
c3e048470d fix: add DOT_METRICS_ENABLED and remove clickhousemetricswrite (#8461) 2025-07-08 15:36:32 +05:30
Vibhu Pandey
4563ff0e62 fix(users): skip sending email if frontend base url is not set (#8459)
skip sending email if frontend base url is not set
2025-07-08 01:47:37 +05:30
Vibhu Pandey
c9e48b6de9 feat(sqlschema): add sqlschema (#8384)
## 📄 Summary

- add sqlschema package
- add unique index on email,org_id in users and user_invite
2025-07-08 00:21:26 +05:30
Amlan Kumar Nandy
06ef9ff384 fix: resolve ui full reload on auto-refresh (#8383) 2025-07-07 16:51:06 +00:00
Amlan Kumar Nandy
26d55875f5 chore: fix metrics explorer events (#8411) 2025-07-07 16:34:17 +00:00
Srikanth Chekuri
b1864ee328 chore: use {k8s.pod/k8s.node/container}.cpu.usage metric for metadata and CPU usage charts (#8398) 2025-07-07 11:25:20 +00:00
Amlan Kumar Nandy
8b62c8dced chore: fix regex issue in route tab (#8440) 2025-07-07 16:55:23 +07:00
aniketio-ctrl
273452352d chore(2354): added preloaded metrics metadata at first api call (#8229)
* chore(2354): added preloaded metrics metadata at first api call
2025-07-06 17:09:29 +05:30
Vibhu Pandey
8274ebfe37 fix(memorycache): add a cloneable interface (#8414) 2025-07-05 19:08:23 +05:30
Abhi kumar
7d5e14abb6 fix: simplify changelog fetching logic and enhance version display interactivity (#8432) 2025-07-05 13:49:09 +05:30
primus-bot[bot]
7c17ac42b1 chore(release): bump to v0.88.1 (#8436) 2025-07-04 16:54:57 +05:30
Aditya Singh
74ee7bb2c7 fix: fix setting nav items from multiple places (#8435)
Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-07-04 15:44:53 +05:30
Sahil Khan
2f5640b2e6 fix: used a new classname for banner container earlier one was in ad block list (#8433)
* fix: used a new classname for banner container earlier one was in ad block list

* chore: minor comment added

* chore: minor comment added
2025-07-04 08:20:32 +00:00
Aditya Singh
121debcecc Fix drag handle obstruction on dashboard and service details page (#8428)
* fix: fix overlapping pylon support btn on dashboard and services details page

* fix: minor refactor

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-07-04 12:12:30 +05:30
Sahil Khan
ff13504a74 fix: performance optimizations in log details view json field rendering (#8324)
* fix: log details filters use data types from log data response as primary data type

* chore: added test cases

* test: add comprehensive unit tests for chooseAutocompleteFromCustomValue function

* fix: added datatypes to util and test cases

* fix: added new tests

* fix: performance optimizations in log details view json field rendering

* fix: fixed import failing tests

* fix: added default html rendering field in body field in log details

* fix: fixed eslint errors

* chore: moved hook to a new file and renamed a state
2025-07-04 04:58:26 +00:00
dependabot[bot]
d4e373443b chore(deps): bump github.com/go-viper/mapstructure/v2 (#8379)
Bumps [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure) from 2.2.1 to 2.3.0.
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.2.1...v2.3.0)

---
updated-dependencies:
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.3.0
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-07-04 02:40:14 +00:00
Vibhu Pandey
3ccf822d67 fix(statsreporter): add unix timestamps for last observed time (#8426) 2025-07-03 14:25:12 +00:00
Abhi kumar
0e270e6f51 fix: update media styling for changelog renderer and adjust video class (#8425) 2025-07-03 18:35:11 +05:30
Vishal Sharma
749df2a979 fix: nil pointer exception when result[0].Table is nil (#8424)
* fix: nil pointer exception when result[0].Table is nil

* fix: decrease complexity

---------

Co-authored-by: grandwizard28 <vibhupandey28@gmail.com>
2025-07-03 12:13:51 +00:00
Sahil Khan
9ee5d5d599 fix: no logs in list view issue - added logs datasource for formatting and column options in the useoptionsmenu consumption (#8421)
* fix: added logs datasource for formatting and column options in the useoptionsmenu consumption

* fix: changed data source to logs in context log renderer from metrics
2025-07-03 14:32:19 +05:30
Amlan Kumar Nandy
4940dfd46f Revert "chore: fix regex issue (#8393)" (#8418)
This reverts commit 53c58b9983.
2025-07-03 06:04:51 +00:00
Aditya Singh
79a31cc205 Sentry issues (#8264)
* fix: fix breaking json parsing

* fix: sentry fix

* fix: fix sentry edge case

* test: update test for useUrlQueryData

* test: minor fix

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-07-03 05:33:11 +00:00
Srikanth Chekuri
5102cf2b7b fix: remove deprecated telemetry::metrics::address from config (#8412) 2025-07-02 11:58:40 +00:00
Vishal Sharma
9ec5594648 fix: telemetry query events (#8388)
* fix: telemetry query events

* chore: reduced cyclomatic complexity

* chore: nit
2025-07-02 08:22:54 +00:00
Shaheer Kochai
b6c2ebd6d7 feat: trace to logs custom empty state UI (#8381)
* feat: display custom empty message if no logs on navigating from trace to logs

* chore: write tests for logs explorer normal and custom empty state

* feat: build the custom empty logs UI based on the updated designs

* feat: clear the filters and run stage query on clicking clear filters in logs custom empty state

* fix: update the failing test to match the logs custom empty state

* chore: remove the unnecessary onClick for documentation links

* refactor: overall improvements

* refactor: move the empty logs list config to util

* chore: update the documentation links + remove the explicit height from resources card

* refactor: reuse the EmptyLogsListConfig type in EmptyLogsSearch

* test: update LogsExplorerList tests to reflect changes in documentation links

---------

Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2025-07-02 07:30:17 +00:00
primus-bot[bot]
9a3a8c8305 chore(release): bump SigNoz to v0.88.0, OTel Collector to v0.128.0 (#8410)
* chore(release): bump to v0.88.0

* chore: bump to v0.111.28

* chore: bump to v0.111.28

---------

Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
Co-authored-by: grandwizard28 <vibhupandey28@gmail.com>
2025-07-02 12:19:54 +05:30
Nageshbansal
2ac45b0174 feat: adds support for Hetzner and coolify deployment platform in statsreporter (#8409) 2025-07-02 06:30:32 +00:00
Srikanth Chekuri
2a53918ebd chore: make queries compatible with 24.1 and fix string json query (#8391) 2025-07-02 05:09:16 +00:00
Shaheer Kochai
9daefeb881 fix: override the stagedQuery orderBy and send order by timestamp in traces view of traces explorer (#8390)
* fix: override the stagedQuery orderBy and send order by timestamp in traces view of traces explorer

* chore: write test for sending order by timestamp in the traces view of traces explorer

* refactor: refactor the query transformer to accept partial query object and override fields
2025-07-01 14:14:07 +00:00
Shaheer Kochai
526cf01cb7 fix: fix the issue of traces filters getting duplicated on switching between the span scopes (#8389)
* fix: fix the issue of changing span scope duplicating filters

* chore: write test for duplicate filters issue on changing the span scope
2025-07-01 07:53:33 +00:00
Amlan Kumar Nandy
cd4766ec2b fix: correct step numbering for non-metric based alerts (#8367) 2025-07-01 05:29:24 +00:00
Amlan Kumar Nandy
2196b58d36 fix: correct query data for cluster details metric view in infra monitoring (#8380) 2025-07-01 05:12:11 +00:00
Amlan Kumar Nandy
53c58b9983 chore: fix regex issue (#8393) 2025-07-01 11:32:07 +07:00
Piyush Singariya
d174038dce fix: panic after connecting to collector (#8344) 2025-06-26 16:34:49 +05:30
Srikanth Chekuri
78d09e2940 chore: log the request and expected response payload (#8341) 2025-06-26 09:40:31 +00:00
Srikanth Chekuri
6cb7f152e1 chore: bump opamp-go version (#8310) 2025-06-26 15:01:17 +05:30
Amlan Kumar Nandy
f6730d3d09 chore: update memory usage field in hosts list to exclude cached memory (#8173) 2025-06-26 13:36:16 +07:00
Nityananda Gohain
899a6ab70a fix: fetch only required traces fields (#8351)
* fix: fetch only required traces fields

* fix: remove only logs case in field name

* fix: add extra if condition for logs json field names

* fix: tests
2025-06-25 18:29:22 +05:30
Amlan Kumar Nandy
a4b852bb99 chore: fix environment filter in infra monitoring (#8357) 2025-06-25 10:46:05 +00:00
Vishal Sharma
92cd108c0d doc: update docker metrics doc link (#8358) 2025-06-25 15:55:12 +05:30
SagarRajput-7
34c116fc7e fix: fixed stepInterval not getting updated in the request payload for Bar (#8350)
* fix: fixed stepInterval not getting updated in the request payload for Bar

* fix: added test case
2025-06-25 11:47:59 +05:30
Vibhu Pandey
250646a354 feat(telemetry): remove telemetry (#8326) 2025-06-24 15:59:23 +00:00
Shaheer Kochai
00191d5774 fix: show status message in trace details v2 drawer (#8346) 2025-06-24 15:36:36 +00:00
Sahil Khan
525a0d7a1a fix: back button issue in trace details page (#8347) 2025-06-24 15:27:24 +00:00
Sahil Khan
564edc7430 fix: added network call on search in explorercolumnsrenderer with debounce (#8325) 2025-06-24 15:16:53 +00:00
Amlan Kumar Nandy
78f396b94a chore: add environment filter in infra monitoring (#8309) 2025-06-24 13:25:34 +00:00
Sahil Khan
9e53c150b8 fix: added missing context provider (#8342) 2025-06-24 18:00:47 +05:30
Shaheer Kochai
f80a6c3014 feat: add support for expandable popover for stack message and body in trace details page (#8330)
* feat: add support for expandable popover for stack message and body in trace details page

* refactor: overall improvements + refactor ExpandableAttribute
2025-06-24 11:52:33 +00:00
Vishal Sharma
1eff6d82c9 fix: color code expiry dates in ingestion key settings (#8323) 2025-06-24 13:00:30 +05:30
Shaheer Kochai
f138eff26c fix: fix the flickering in logs explorer table view (#8304) 2025-06-24 08:31:13 +04:30
Vishal Sharma
50f3fc0ff9 Chore/update request texts and integrations (#8305)
* chore: update request dashboard and integrations text

* chore: hide tab/table when data is not available

* chore: add new template text

* fix: test case
2025-06-23 14:24:47 +00:00
Yunus M
ebcb172614 feat(user-profile): update organisation onboarding questions (#8206)
* feat: update org onboarding questions

* feat: type updates

* chore(user-profile): update the onboarding questions

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-06-23 13:27:05 +00:00
Shaheer Kochai
133c0deaa8 fix: prevent sending order by id with traces query (#8250)
* fix: prevent sending order by id with traces query

* test: write tests for preventing sending order by id with traces query
2025-06-23 12:34:59 +00:00
Shaheer Kochai
35e8165463 fix: recalculate the query_range start and end timestamps for logs explorer chart and list queries (#8277)
* fix: recalculate start and end time only for relative time ranges

* fix: don't dsipatch UpdateTimeInterval on initial load

* fix: change list query to state instead of memo

* fix: fix the failing test

* chore: improvement to the test

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-06-23 09:06:46 +00:00
Srikanth Chekuri
6d009c6607 chore: recognize variable in expression (#8328) 2025-06-23 08:30:50 +00:00
Amlan Kumar Nandy
f0994e52c0 chore: alerts fixes and improvements (#8327) 2025-06-23 07:08:17 +00:00
Srikanth Chekuri
7f5b388722 chore: add time range optimization for trace id search (#8317) 2025-06-23 04:09:19 +00:00
Sahil Khan
b11a4c0c21 fix: log details filters use data types from log data response as primary data type (#8278)
* fix: log details filters use data types from log data response as primary data type

* chore: added test cases

* test: add comprehensive unit tests for chooseAutocompleteFromCustomValue function

* fix: added datatypes to util and test cases

* fix: added new tests
2025-06-22 10:58:43 +00:00
Srikanth Chekuri
bbb21f608f chore: more validation, zero values and enfore max step interval (#8319) 2025-06-21 17:49:33 +05:30
Nityananda Gohain
50a5b88708 fix: fetch only required log fields (#8299)
* fix: fetch only required log fields

* fix: update old endpoints

* fix: remove old code
2025-06-21 04:37:57 +00:00
Vibhu Pandey
5601c0886d chore(signoz): deprecate all flags (#8308)
Deprecate all flags

- Use querier.config.fluxInterval in lieu of passing `--flux-interval` and `--flux-interval-for-trace-detail`
- Remove `--gateway-url`
- Use telemetrystore.clickhouse.cluster in lieu of passing `--cluster` or `--cluster-name`
- Add an `unparam` check in the linter. Updated some functions across the querier codebase to be compatible with this linter.
- Remove prometheus config from docker builds.
2025-06-21 00:55:38 +05:30
Srikanth Chekuri
5b342b9b5d chore: handle nan/inf in response (#8318) 2025-06-20 22:26:25 +05:30
Vishal Sharma
7ec59c3c77 chore: move posthog and appcues ids to userId and orgId (#8316) 2025-06-20 13:21:59 +00:00
Nityananda Gohain
a12990f0bd fix: update trace panel query (#8315) 2025-06-20 11:18:17 +00:00
Yunus M
1ee1ca7951 fix: update app layout height based on banners visible (#8307)
* fix: update app layout height based on banners visible

* fix: show banners only in logged in state

---------

Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2025-06-20 11:08:30 +05:30
Abhi kumar
3b1bf34d3e feat(changelog): show changelogs for newer versions available (#8270)
* feat(changelog): add getChangelogByVersion API and related types

* feat(changelog): implement ChangelogModal and ChangelogRenderer components with styles

* test(dateUtils): add unit tests for formatDate utility

* chore(changelog): fixed pr review changes

* style(ChangelogRenderer): format SCSS for improved readability

* feat(SideNav): integrate ChangelogModal and manage its visibility state

* feat(changelog): refactor changelog handling and integrate into app state

* test(ChangelogModal): add unit tests for scroll functionality and data rendering

* test(ChangelogRenderer): add unit tests for rendering changelog details

* test(ChangelogModal, ChangelogRenderer): refactor tests

* fix(applayout): bot fetching changelog for cloud users

* fix(ChangelogModal): update footer to display feature count dynamically

* fix(ChangelogModal): update link for workspace migration to point to releases page

* feat(ChangelogModal): enhance footer layout and update link behavior

* test(ChangelogModal): update link for workspace migration to point to releases page

* refactor(AppContext): migrate changelog state management to context and update related components

* feat(test-utils): add changelog state and updateChangelog mock to app context

* test(changelogModal): fixed test by adding mock for useAppContext

* fix: added PR review fixes

* Fixed css variable name in ChangelogModal.styles.scss

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

* fix(style): added light mode support for changelog modal

* Fixed heading color token

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

* fix: remove debug log for isLatestVersion in AppLayout

---------

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
2025-06-20 10:55:52 +05:30
Vibhu Pandey
fbcff29fae chore(sqlstore): remove sqlx (#8306)
## 📄 Summary

remove sqlx
2025-06-20 00:34:54 +05:30
Yunus M
81fcca3bd3 fix: use pathname to get channel id while saving (#8303) 2025-06-19 14:57:32 +00:00
Yunus M
4f7d84aa37 fix: use pathname to get channel id (#8298) 2025-06-19 19:28:47 +05:30
Abhi kumar
8f8dedb8b3 fix(sidebar): added fix routes not highlighting, minor gitter fix (#8297)
* fix(sidebar): added fix routes not highlighting, minor gitter fix

* chore(routes): tsc fix

* fix(private): added check in private route for routes with no role

* fix(private): minor fix in condition

* chore: added roles in empty routes
2025-06-19 16:17:54 +05:30
Ankit Nayan
3f65229506 fix: tracefunnel analytics duration fixes + 2-step funnel fixes (#8294) 2025-06-19 06:19:31 +00:00
Srikanth Chekuri
f006260719 chore: find contradictory condition keys in expression (#8238) 2025-06-19 05:40:50 +00:00
Piyush Singariya
3fc8f6c353 fix: JSON Query parse string int value (#8292)
* fix: json query parse string int

* chore: minor

* Update pkg/query-service/app/logs/v3/enrich_query_test.go

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

---------

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
2025-06-18 16:14:23 +00:00
Yunus M
e02ae9a5c4 feat: show billing , settings to admin when workspace is blocked (#8291)
* feat: show billing , settings to admin when workspace is blocked

* feat: enable keyboard shortcuts page for all

* feat: remove duplicated option
2025-06-18 20:43:30 +05:30
Nityananda Gohain
1989d07e52 fix: delete existing agents in migration (#8289) 2025-06-18 18:06:36 +05:30
Shaheer Kochai
78194ae955 chore: remove dev env check (#7994)
Co-authored-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-18 07:45:54 +00:00
Shivanshu Raj Shrivastava
da1b6d1ed0 feat: adds a final part of trace funnel feature (analytics APIs, and analytics queries) implementation (#8129)
* feat: trace funnel queries

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: update access

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: fix queries

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: minor fix in handler

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: update clauses

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: update step overview queries

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: add new api endpoints for analytics (#8253)

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fixing steps and funnel (#8283)

* add todo: remove identical function

---------

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
Co-authored-by: Ankit Nayan <ankit@signoz.io>
2025-06-18 07:40:20 +00:00
Amlan Kumar Nandy
d3c76ae8be chore: update alert details error state (#8246) 2025-06-18 07:20:07 +00:00
Shaheer Kochai
bed3dbc698 chore: funnel run and save flow changes (#8231)
* feat: while the funnel steps are invalid, handle auto save in local storage

* chore: handle lightmode style in 'add span to funnel' modal

* fix: don't save incomplete steps state in local storage if last saved configuration has valid steps

* chore: close the 'Add span to funnel' modal on clicking save or discard

* chore: deprecate the run funnel flow for unexecuted funnel

* feat: change the funnel configuration save logic, and deprecate auto save

* refactor: send all steps in the payload of analytics/overview

* refactor: send all steps in the payload of analytics/steps (graph API)

* chore: send all steps in the payload of analytics/steps/overview API

* chore: send funnel steps with slow and error traces + deprecate the refetch on latency type change

* chore: overall improvements

* chore: change the save funnel icon + increase the width of funnel steps

* fix: make the changes w.r.t. the updated funnel steps validation API + bugfixes

* fix: remove funnelId from funnel results APIs

* fix: handle edge case i.e. refetch funnel results on deleting a funnel step

* chore: remove funnel steps configuration cache on removing funnel

* chore: don't refetch the results on changing the latency type

* fix: fix the edge cases of save funnel button being enabled even after saving the funnel steps

* chore: remove the span count column from top traces tables

* fix: fix the failing CI check by removing unnecessary props / fixing the types
2025-06-18 06:08:41 +00:00
Amlan Kumar Nandy
66affb0ece chore: add unit tests for hosts list in infra monitoring (#8230) 2025-06-18 05:53:42 +00:00
Vibhu Pandey
75f62372ae feat(analytics): move frontend event to group_id (#8279)
* chore(api_key): add api key analytics

* feat(analytics): move frontend events

* feat(analytics): add collect config

* feat(analytics): add collect config

* feat(analytics): fix traits

* feat(analytics): fix traits

* feat(analytics): fix traits

* feat(analytics): fix traits

* feat(analytics): fix traits

* feat(analytics): fix factor api key

* fix(analytics): fix org stats

* fix(analytics): fix org stats
2025-06-18 01:54:55 +05:30
Sahil Khan
a3ac307b4e fix: sentry issues SIGNOZ-UI-Q9 SIGNOZ-UI-QA (#8281) 2025-06-17 23:44:21 +05:30
Vikrant Gupta
7672d2f636 chore(user): return user resource on register user request (#8271) 2025-06-17 17:26:06 +05:30
aniketio-ctrl
e3018d9529 fix(8232): added fix for error graph in services tab (#8263) 2025-06-17 08:08:38 +00:00
Nityananda Gohain
385ee268e3 fix: use first org in agent migration (#8269)
* fix: exit gracefull if there are more than one org

* fix: use first org
2025-06-17 06:25:12 +00:00
Piyush Singariya
01036a8a2f fix: top level keys EXIST and NOTEXIST filter simulation (#8255)
* fix: top level keys EXIST and NOTEXIST filter simulation

* test: fix tests

* test: temporarily change collector version

* test: updating go.mod

* fix: tests

* chore: revert changes

* chore: update collector's reference to stable version
2025-06-17 11:28:40 +05:30
Srikanth Chekuri
1542b9d6e9 chore: disallow unknown fields and address gaps (#8237) 2025-06-16 23:11:28 +05:30
Nityananda Gohain
8455349459 fix: support orgId and postgres in agents (#7327)
* fix: initial commit for agents

* fix: remove frontend package manger commit

* fix: use sqlstore

* fix: opamp server changes

* fix: tests

* fix: tests

* fix: minor changes

* fix: migrations

* fix: use uuid7

* fix: use default orgID for single tenant

* fix: pipelines tests fixed

* fix: use correct agentId

* fix: use orgID in coordinator

* fix: fix tests

* fix: remove redundant hash check

* fix: migration

* fix: migration

* fix: address comments

* fix: rename migration file

* fix: remove unwanted orgid code

* fix: use orggetter

* fix: comment

* fix: schema cleanup

* fix: minor changes

* chore: addresses changes

* fix: add back agentID as it used ulid

* fix: keep only 50 agents for an orgId

* chore: explicitly specify text type

* chore: use valuer.uuid for orgid

* fix: linting complain

* chore: final fixes

* chore: minor changes

* fix: add not null

* fix: fe tests

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-06-16 20:07:16 +05:30
aniketio-ctrl
c488a24d09 fix(prom-aggr): fix prom aggregation queries using utf-8 charset (#8262)
* fix(prom-aggr): added fix for prom aggregation

* fix(prom-aggr): added fix for prom aggregation
2025-06-16 19:42:17 +05:30
Vikrant Gupta
9091cf61fd chore(github): fix codeowners file (#8261) 2025-06-16 11:37:07 +00:00
Ekansh Gupta
eeb2ab3212 feat: added support for trace_operators in query range v5 (#8165)
* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5\n

* feat: added support for trace_operators in query range v5\n

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-06-16 16:43:51 +05:30
Nageshbansal
3f128f0f1d fix: configs in multi-node docker-swarm cluster (#8239) 2025-06-16 11:42:02 +05:30
Vibhu Pandey
59ff7ed1e1 feat(licensing): add analytics (#8252) 2025-06-16 01:09:41 +05:30
Vibhu Pandey
d236b6ce1e feat(statsreporter): add stats for telemetry.*.last_observed.time (#8251)
## 📄 Summary

- add stats for telemetry.*.last_observed.time
2025-06-16 00:02:17 +05:30
Dimitris Mavrommatis
44b118a212 fix: span links tab to span details drawer (#7888)
* fix: span links tab to span details drawer

* Update LinkedSpans.styles.scss

---------

Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2025-06-15 16:22:36 +04:30
Dimitris Mavrommatis
3fc6f7ee63 feat(trace): add visuals for events on span waterfall and flamegraph (#7889)
* fix: add visuals on waterfall and flamegraph for span events

* fix: correct offsets for events

* fix: addressed comments

* chore: update the name of the import

* fix: interface change

* chore: formatting

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-06-15 10:39:39 +00:00
Sahil Khan
f1016baf03 chore: updated http-proxy-middleware to 3.0.5 from 3.0.3 (#8245) 2025-06-13 21:35:48 +05:30
Amlan Kumar Nandy
e5c0d9e44a chore: allow url as label value in alerts (#8244) 2025-06-13 17:47:01 +07:00
Yunus M
e51056c804 fix: use preference.name rather than preference.key (#8234) 2025-06-13 11:44:42 +05:30
Nityananda Gohain
7d8dad4550 Revert "fix: remove whitespace from sso cert (#8141)" (#8233)
This reverts commit 44ea237039.
2025-06-12 22:39:24 +05:30
Yunus M
c477e0ef16 feat: sidebar revamp (#8087)
* feat: sidebar revamp - initial commit

* feat: move billinga and other isolated routes to settings

* feat: handle channel related routes

* feat: update account settings page

* feat: show dropdown for secondary items

* feat: handle reordering of pinned nav items

* feat: improve font load performance

* feat: update font reference

* feat: update page content styles

* feat: handle external links in sidebar

* feat: handle secondary nav item clicks

* feat: handle pinned nav items reordering

* feat: handle sidenav pinned state using preference, handle light mode

* feat: show sidenav items conditionally

* feat: show version diff indicator only to self hosted users

* feat: show billing to admins only and integrations to cloud and enterprise users

* feat: update fallback link

* feat: handle settings menu items

* fix: settings page reload on nav chnage

* feat: intercom to pylon

* feat: show invite user to admin only

* feat: handle review comments

* chore: remove react query dev tools

* feat: minor ui updates

* feat: update changes based on preference store changes

* feat: handle sidenav shortcut state

* feat: handle scroll for more

* feat: maintain shortcuts order

* feat: manage license ui updates

* feat: manage settings options based on license and roles

* feat: update types

* chore: add logEvents

* feat: update types

* chore: fix type errors

* chore: remove unused variable

* feat: update my settings page test cases

---------

Co-authored-by: makeavish <makeavish786@gmail.com>
2025-06-12 19:55:32 +05:30
Shaheer Kochai
fff7f8fc76 feat: add span scope filter to trace details page (#8005)
* feat: add span scope filter to trace details page

* chore: add tests for the span scope selector flows when onchange and query are provided

* refactor: remove the unnecessary queryName prop and infer it from query

* fix: fix the failing span scope selector tests
2025-06-12 11:03:28 +00:00
Shaheer Kochai
8cfeef4521 fix: fix sentries (#8003)
* fix: handle potential undefined values in groupBy calculation in TracesExplorer

* fix: add optional chaining for aggregateAttribute key check in Query component

* fix: add optional chaining for filters in SpanScopeSelector to handle potential undefined values

* fix: fix the warning in logs chart by adding the missing date-time format option

* fix: improve trace graph allDataPoints null check

* chore: remove the keys.length from null check
2025-06-12 15:28:06 +04:30
522 changed files with 30993 additions and 11105 deletions

View File

@@ -40,7 +40,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.111.42
image: signoz/signoz-schema-migrator:v0.128.0
container_name: schema-migrator-sync
command:
- sync
@@ -53,7 +53,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.111.42
image: signoz/signoz-schema-migrator:v0.128.0
container_name: schema-migrator-async
command:
- async

7
.github/CODEOWNERS vendored
View File

@@ -12,4 +12,9 @@
/pkg/factory/ @grandwizard28
/pkg/types/ @grandwizard28
.golangci.yml @grandwizard28
**/(zeus|licensing|sqlmigration)/ @vikrantgupta25
/pkg/zeus/ @vikrantgupta25
/pkg/licensing/ @vikrantgupta25
/pkg/sqlmigration/ @vikrantgupta25
/ee/zeus/ @vikrantgupta25
/ee/licensing/ @vikrantgupta25
/ee/sqlmigration/ @vikrantgupta25

View File

@@ -22,7 +22,7 @@ jobs:
- 24.1.2-alpine
- 24.12-alpine
schema-migrator-version:
- v0.111.38
- v0.128.0
postgres-version:
- 15
if: |

View File

@@ -7,6 +7,7 @@ linters:
- sloglint
- depguard
- iface
- unparam
linters-settings:
sloglint:

View File

@@ -90,6 +90,15 @@ apiserver:
- /api/v1/version
- /
##################### Querier #####################
querier:
# The TTL for cached query results.
cache_ttl: 168h
# The interval for recent data that should not be cached.
flux_interval: 5m
# The maximum number of concurrent queries for missing ranges.
max_concurrent_queries: 4
##################### TelemetryStore #####################
telemetrystore:
# Maximum number of idle connections in the connection pool.
@@ -103,13 +112,15 @@ telemetrystore:
clickhouse:
# The DSN to use for clickhouse.
dsn: tcp://localhost:9000
# The cluster name to use for clickhouse.
cluster: cluster
# The query settings for clickhouse.
settings:
max_execution_time: 0
max_execution_time_leaf: 0
timeout_before_checking_execution_speed: 0
max_bytes_to_read: 0
max_result_rows_for_ch_query: 0
max_result_rows: 0
##################### Prometheus #####################
prometheus:
@@ -224,3 +235,12 @@ statsreporter:
enabled: true
# The interval at which the stats are collected.
interval: 6h
collect:
# Whether to collect identities and traits (emails).
identities: true
##################### Gateway (License only) #####################
gateway:
# The URL of the gateway's api.
url: http://localhost:8080

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.87.0
image: signoz/signoz:v0.89.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -194,6 +194,7 @@ services:
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-swarm
- SIGNOZ_JWT_SECRET=secret
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -206,7 +207,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.42
image: signoz/signoz-otel-collector:v0.128.0
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -230,7 +231,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.42
image: signoz/signoz-schema-migrator:v0.128.0
deploy:
restart_policy:
condition: on-failure

View File

@@ -100,26 +100,32 @@ services:
# - "9000:9000"
# - "8123:8123"
# - "9181:9181"
configs:
- source: clickhouse-config
target: /etc/clickhouse-server/config.xml
- source: clickhouse-users
target: /etc/clickhouse-server/users.xml
- source: clickhouse-custom-function
target: /etc/clickhouse-server/custom-function.xml
- source: clickhouse-cluster
target: /etc/clickhouse-server/config.d/cluster.xml
volumes:
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
- ../common/clickhouse/cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
- clickhouse:/var/lib/clickhouse/
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.87.0
image: signoz/signoz:v0.89.0
command:
- --config=/root/config/prometheus.yml
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port
volumes:
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
- ../common/dashboards:/root/config/dashboards
- sqlite:/var/lib/signoz/
configs:
- source: signoz-prometheus-config
target: /root/config/prometheus.yml
environment:
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
@@ -129,6 +135,7 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-swarm
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -141,15 +148,17 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.42
image: signoz/signoz-otel-collector:v0.128.0
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
- --copy-path=/var/tmp/collector-config.yaml
- --feature-gates=-pkg.translator.prometheus.NormalizeName
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
configs:
- source: otel-collector-config
target: /etc/otel-collector-config.yaml
- source: otel-manager-config
target: /etc/manager-config.yaml
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
- LOW_CARDINAL_EXCEPTION_GROUPING=false
@@ -165,7 +174,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.42
image: signoz/signoz-schema-migrator:v0.128.0
deploy:
restart_policy:
condition: on-failure
@@ -186,3 +195,24 @@ volumes:
name: signoz-sqlite
zookeeper-1:
name: signoz-zookeeper-1
configs:
clickhouse-config:
file: ../common/clickhouse/config.xml
clickhouse-users:
file: ../common/clickhouse/users.xml
clickhouse-custom-function:
file: ../common/clickhouse/custom-function.xml
clickhouse-cluster:
file: ../common/clickhouse/cluster.xml
signoz-prometheus-config:
file: ../common/signoz/prometheus.yml
# If you have multiple dashboard files, you can list them individually:
# dashboard-foo:
# file: ../common/dashboards/foo.json
# dashboard-bar:
# file: ../common/dashboards/bar.json
otel-collector-config:
file: ./otel-collector-config.yaml
otel-manager-config:
file: ../common/signoz/otel-collector-opamp-config.yaml

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
metrics_exporter: signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
@@ -60,27 +60,16 @@ exporters:
datasource: tcp://clickhouse:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
resource_to_telemetry_conversion:
enabled: true
disable_v2: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
@@ -92,11 +81,11 @@ service:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [clickhousemetricswrite, signozclickhousemetrics]
exporters: [signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics]
exporters: [signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.87.0}
image: signoz/signoz:${VERSION:-v0.89.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -197,6 +197,7 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-standalone-amd
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -210,7 +211,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -236,7 +237,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
container_name: schema-migrator-sync
command:
- sync
@@ -247,7 +248,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.87.0}
image: signoz/signoz:${VERSION:-v0.89.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -130,6 +130,7 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-standalone-amd
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -142,7 +143,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -164,7 +165,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
container_name: schema-migrator-sync
command:
- sync
@@ -176,7 +177,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
container_name: schema-migrator-async
command:
- async

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
metrics_exporter: signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
@@ -60,27 +60,16 @@ exporters:
datasource: tcp://clickhouse:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
@@ -92,11 +81,11 @@ service:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [clickhousemetricswrite, signozclickhousemetrics]
exporters: [signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics]
exporters: [signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]

View File

@@ -16,7 +16,7 @@ __Table of Contents__
- [Prerequisites](#prerequisites-1)
- [Install Helm Repo and Charts](#install-helm-repo-and-charts)
- [Start the OpenTelemetry Demo App](#start-the-opentelemetry-demo-app-1)
- [Moniitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes)
- [Monitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes)
- [What's next](#whats-next)

View File

@@ -6,11 +6,13 @@ import (
"time"
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
@@ -23,16 +25,17 @@ type provider struct {
config licensing.Config
settings factory.ScopedProviderSettings
orgGetter organization.Getter
analytics analytics.Analytics
stopChan chan struct{}
}
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
return New(ctx, providerSettings, config, store, zeus, orgGetter)
return New(ctx, providerSettings, config, store, zeus, orgGetter, analytics)
})
}
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) (licensing.Licensing, error) {
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) (licensing.Licensing, error) {
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing")
licensestore := sqllicensingstore.New(sqlstore)
return &provider{
@@ -42,6 +45,7 @@ func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Conf
settings: settings,
orgGetter: orgGetter,
stopChan: make(chan struct{}),
analytics: analytics,
}, nil
}
@@ -159,6 +163,25 @@ func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUI
return err
}
stats := licensetypes.NewStatsFromLicense(activeLicense)
provider.analytics.Send(ctx,
analyticstypes.Track{
UserId: "stats_" + organizationID.String(),
Event: "License Updated",
Properties: analyticstypes.NewPropertiesFromMap(stats),
Context: &analyticstypes.Context{
Extra: map[string]interface{}{
analyticstypes.KeyGroupID: organizationID.String(),
},
},
},
analyticstypes.Group{
UserId: "stats_" + organizationID.String(),
GroupId: organizationID.String(),
Traits: analyticstypes.NewTraitsFromMap(stats),
},
)
return nil
}

View File

@@ -11,11 +11,9 @@ RUN apk update && \
COPY ./target/${OS}-${TARGETARCH}/signoz /root/signoz
COPY ./conf/prometheus.yml /root/config/prometheus.yml
COPY ./templates/email /root/templates
COPY frontend/build/ /etc/signoz/web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["./signoz"]
CMD ["-config", "/root/config/prometheus.yml"]
ENTRYPOINT ["./signoz"]

View File

@@ -12,11 +12,9 @@ RUN apk update && \
rm -rf /var/cache/apk/*
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
COPY ./conf/prometheus.yml /root/config/prometheus.yml
COPY ./templates/email /root/templates
COPY frontend/build/ /etc/signoz/web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["./signoz"]
CMD ["-config", "/root/config/prometheus.yml"]

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/interfaces"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
@@ -17,6 +16,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -26,8 +26,7 @@ import (
)
type APIHandlerOptions struct {
DataConnector interfaces.DataConnector
PreferSpanMetrics bool
DataConnector interfaces.Reader
RulesManager *rules.Manager
UsageManager *usage.Manager
IntegrationsController *integrations.Controller
@@ -51,7 +50,6 @@ type APIHandler struct {
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
PreferSpanMetrics: opts.PreferSpanMetrics,
RuleManager: opts.RulesManager,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsController: opts.CloudIntegrationsController,
@@ -61,7 +59,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Querier),
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier),
})
if err != nil {

View File

@@ -96,7 +96,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
return
}
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email)
if err != nil {
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)

View File

@@ -59,7 +59,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
}
}
if ah.opts.PreferSpanMetrics {
if constants.IsPreferSpanMetrics {
for idx, feature := range featureSet {
if feature.Name == licensetypes.UseSpanMetrics {
featureSet[idx].Active = true

View File

@@ -1,39 +0,0 @@
package db
import (
"time"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/prometheus"
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
)
type ClickhouseReader struct {
conn clickhouse.Conn
appdb sqlstore.SQLStore
*basechr.ClickHouseReader
}
func NewDataConnector(
sqlDB sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus,
cluster string,
fluxIntervalForTraceDetail time.Duration,
cache cache.Cache,
) *ClickhouseReader {
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, fluxIntervalForTraceDetail, cache)
return &ClickhouseReader{
conn: telemetryStore.ClickhouseDB(),
appdb: sqlDB,
ClickHouseReader: chReader,
}
}
func (r *ClickhouseReader) GetSQLStore() sqlstore.SQLStore {
return r.appdb
}

View File

@@ -6,14 +6,10 @@ import (
"net"
"net/http"
_ "net/http/pprof" // http profiler
"time"
"github.com/gorilla/handlers"
"github.com/jmoiron/sqlx"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/app/db"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
@@ -32,6 +28,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
@@ -41,7 +38,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
@@ -59,62 +55,55 @@ type ServerOptions struct {
Jwt *authtypes.JWT
}
// Server runs HTTP api service
// Server runs HTTP, Mux and a grpc server
type Server struct {
serverOptions *ServerOptions
ruleManager *baserules.Manager
config signoz.Config
signoz *signoz.SigNoz
jwt *authtypes.JWT
ruleManager *baserules.Manager
// public http router
httpConn net.Listener
httpServer *http.Server
httpConn net.Listener
httpServer *http.Server
httpHostPort string
// private http
privateConn net.Listener
privateHTTP *http.Server
privateConn net.Listener
privateHTTP *http.Server
privateHostPort string
opampServer *opamp.Server
// Usage manager
usageManager *usage.Manager
opampServer *opamp.Server
unavailableChannel chan healthcheck.Status
}
// HealthCheckStatus returns health check status channel a client can subscribe to
func (s Server) HealthCheckStatus() chan healthcheck.Status {
return s.unavailableChannel
}
// NewServer creates and initializes Server
func NewServer(serverOptions *ServerOptions) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
if err != nil {
return nil, err
}
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
if err != nil {
return nil, err
}
reader := db.NewDataConnector(
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
serverOptions.Cluster,
fluxIntervalForTraceDetail,
serverOptions.SigNoz.Cache,
reader := clickhouseReader.NewReader(
signoz.SQLStore,
signoz.TelemetryStore,
signoz.Prometheus,
signoz.TelemetryStore.Cluster(),
config.Querier.FluxInterval,
signoz.Cache,
)
rm, err := makeRulesManager(
serverOptions.SigNoz.SQLStore.SQLxDB(),
reader,
serverOptions.SigNoz.Cache,
serverOptions.SigNoz.Alertmanager,
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
serverOptions.SigNoz.Modules.OrgGetter,
signoz.Cache,
signoz.Alertmanager,
signoz.SQLStore,
signoz.TelemetryStore,
signoz.Prometheus,
signoz.Modules.OrgGetter,
)
if err != nil {
@@ -122,19 +111,16 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
// initiate opamp
_, err = opAmpModel.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB())
if err != nil {
return nil, err
}
opAmpModel.Init(signoz.SQLStore, signoz.Instrumentation.Logger(), signoz.Modules.OrgGetter)
integrationsController, err := integrations.NewController(serverOptions.SigNoz.SQLStore)
integrationsController, err := integrations.NewController(signoz.SQLStore)
if err != nil {
return nil, fmt.Errorf(
"couldn't create integrations controller: %w", err,
)
}
cloudIntegrationsController, err := cloudintegrations.NewController(serverOptions.SigNoz.SQLStore)
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
if err != nil {
return nil, fmt.Errorf(
"couldn't create cloud provider integrations controller: %w", err,
@@ -143,7 +129,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
// ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
serverOptions.SigNoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations,
signoz.SQLStore,
integrationsController.GetPipelinesForInstalledIntegrations,
)
if err != nil {
return nil, err
@@ -151,7 +138,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
// initiate agent config handler
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
DB: serverOptions.SigNoz.SQLStore.SQLxDB(),
Store: signoz.SQLStore,
AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController},
})
if err != nil {
@@ -159,7 +146,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
// start the usagemanager
usageManager, err := usage.New(serverOptions.SigNoz.Licensing, serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus, serverOptions.SigNoz.Modules.OrgGetter)
usageManager, err := usage.New(signoz.Licensing, signoz.TelemetryStore.ClickhouseDB(), signoz.Zeus, signoz.Modules.OrgGetter)
if err != nil {
return nil, err
}
@@ -168,47 +155,36 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
return nil, err
}
telemetry.GetInstance().SetReader(reader)
telemetry.GetInstance().SetSqlStore(serverOptions.SigNoz.SQLStore)
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
telemetry.GetInstance().SetSavedViewsInfoCallback(telemetry.GetSavedViewsInfo)
telemetry.GetInstance().SetAlertsInfoCallback(telemetry.GetAlertsInfo)
telemetry.GetInstance().SetGetUsersCallback(telemetry.GetUsers)
telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount)
telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo)
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
if err != nil {
return nil, err
}
apiOpts := api.APIHandlerOptions{
DataConnector: reader,
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
RulesManager: rm,
UsageManager: usageManager,
IntegrationsController: integrationsController,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: fluxInterval,
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: serverOptions.GatewayUrl,
JWT: serverOptions.Jwt,
GatewayUrl: config.Gateway.URL.String(),
JWT: jwt,
}
apiHandler, err := api.NewAPIHandler(apiOpts, serverOptions.SigNoz)
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
if err != nil {
return nil, err
}
s := &Server{
config: config,
signoz: signoz,
jwt: jwt,
ruleManager: rm,
serverOptions: serverOptions,
httpHostPort: baseconst.HTTPHostPort,
privateHostPort: baseconst.PrivateHostPort,
unavailableChannel: make(chan healthcheck.Status),
usageManager: usageManager,
}
httpServer, err := s.createPublicServer(apiHandler, serverOptions.SigNoz.Web)
httpServer, err := s.createPublicServer(apiHandler, signoz.Web)
if err != nil {
return nil, err
@@ -224,35 +200,28 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
s.privateHTTP = privateServer
s.opampServer = opamp.InitializeServer(
&opAmpModel.AllAgents, agentConfMgr,
&opAmpModel.AllAgents, agentConfMgr, signoz.Instrumentation,
)
orgs, err := apiHandler.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background())
if err != nil {
return nil, err
}
for _, org := range orgs {
errorList := reader.PreloadMetricsMetadata(context.Background(), org.ID)
for _, er := range errorList {
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
}
}
return s, nil
}
// HealthCheckStatus returns health check status channel a client can subscribe to
func (s Server) HealthCheckStatus() chan healthcheck.Status {
return s.unavailableChannel
}
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
r := baseapp.NewRouter()
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max,
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
s.config.APIServer.Timeout.ExcludedRoutes,
s.config.APIServer.Timeout.Default,
s.config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewAnalytics().Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterPrivateRoutes(r)
@@ -274,17 +243,16 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger())
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max,
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
s.config.APIServer.Timeout.ExcludedRoutes,
s.config.APIServer.Timeout.Default,
s.config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewAnalytics().Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am)
@@ -325,7 +293,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
func (s *Server) initListeners() error {
// listen on public port
var err error
publicHostPort := s.serverOptions.HTTPHostPort
publicHostPort := s.httpHostPort
if publicHostPort == "" {
return fmt.Errorf("baseconst.HTTPHostPort is required")
}
@@ -335,10 +303,10 @@ func (s *Server) initListeners() error {
return err
}
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
// listen on private port to support internal services
privateHostPort := s.serverOptions.PrivateHostPort
privateHostPort := s.privateHostPort
if privateHostPort == "" {
return fmt.Errorf("baseconst.PrivateHostPort is required")
@@ -348,7 +316,7 @@ func (s *Server) initListeners() error {
if err != nil {
return err
}
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.privateHostPort))
return nil
}
@@ -368,7 +336,7 @@ func (s *Server) Start(ctx context.Context) error {
}
go func() {
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.httpHostPort))
switch err := s.httpServer.Serve(s.httpConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
@@ -394,7 +362,7 @@ func (s *Server) Start(ctx context.Context) error {
}
go func() {
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.privateHostPort))
switch err := s.privateHTTP.Serve(s.privateConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
@@ -446,7 +414,6 @@ func (s *Server) Stop(ctx context.Context) error {
}
func makeRulesManager(
db *sqlx.DB,
ch baseint.Reader,
cache cache.Cache,
alertmanager alertmanager.Alertmanager,
@@ -459,7 +426,6 @@ func makeRulesManager(
managerOpts := &baserules.ManagerOptions{
TelemetryStore: telemetryStore,
Prometheus: prometheus,
DBConn: db,
Context: context.Background(),
Logger: zap.L(),
Reader: ch,

View File

@@ -37,9 +37,14 @@ func GetDefaultSiteURL() string {
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
var IsDotMetricsEnabled = false
var IsPreferSpanMetrics = false
func init() {
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
IsDotMetricsEnabled = true
}
if GetOrDefaultEnv("USE_SPAN_METRICS", "false") == "true" {
IsPreferSpanMetrics = true
}
}

View File

@@ -1,11 +0,0 @@
package interfaces
import (
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
)
// Connector defines methods for interaction
// with o11y data. for example - clickhouse
type DataConnector interface {
baseint.Reader
}

View File

@@ -9,9 +9,11 @@ import (
"github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/ee/zeus"
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider"
"github.com/SigNoz/signoz/pkg/config/fileprovider"
@@ -20,6 +22,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/organization"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -101,10 +104,14 @@ func main() {
fileprovider.NewFactory(),
},
}, signoz.DeprecatedFlags{
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
Config: promConfigPath,
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
Config: promConfigPath,
FluxInterval: fluxInterval,
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
Cluster: cluster,
GatewayUrl: gatewayUrl,
})
if err != nil {
zap.L().Fatal("Failed to create config", zap.Error(err))
@@ -134,12 +141,20 @@ func main() {
zeus.Config(),
httpzeus.NewProviderFactory(),
licensing.Config(24*time.Hour, 3),
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter)
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics)
},
signoz.NewEmailingProviderFactories(),
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] {
existingFactories := signoz.NewSQLSchemaProviderFactories(sqlstore)
if err := existingFactories.Add(postgressqlschema.NewFactory(sqlstore)); err != nil {
zap.L().Fatal("Failed to add postgressqlschema factory", zap.Error(err))
}
return existingFactories
},
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
)
@@ -147,20 +162,7 @@ func main() {
zap.L().Fatal("Failed to create signoz", zap.Error(err))
}
serverOptions := &app.ServerOptions{
Config: config,
SigNoz: signoz,
HTTPHostPort: baseconst.HTTPHostPort,
PreferSpanMetrics: preferSpanMetrics,
PrivateHostPort: baseconst.PrivateHostPort,
FluxInterval: fluxInterval,
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
Cluster: cluster,
GatewayUrl: gatewayUrl,
Jwt: jwt,
}
server, err := app.NewServer(serverOptions)
server, err := app.NewServer(config, signoz, jwt)
if err != nil {
zap.L().Fatal("Failed to create server", zap.Error(err))
}

View File

@@ -0,0 +1,36 @@
package postgressqlschema
import (
"strings"
"github.com/SigNoz/signoz/pkg/sqlschema"
)
type Formatter struct {
sqlschema.Formatter
}
func (formatter Formatter) SQLDataTypeOf(dataType sqlschema.DataType) string {
if dataType == sqlschema.DataTypeTimestamp {
return "TIMESTAMPTZ"
}
return strings.ToUpper(dataType.String())
}
func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
switch strings.ToUpper(dataType) {
case "TIMESTAMPTZ", "TIMESTAMP", "TIMESTAMP WITHOUT TIME ZONE", "TIMESTAMP WITH TIME ZONE":
return sqlschema.DataTypeTimestamp
case "INT8":
return sqlschema.DataTypeBigInt
case "INT2", "INT4", "SMALLINT", "INTEGER":
return sqlschema.DataTypeInteger
case "BOOL", "BOOLEAN":
return sqlschema.DataTypeBoolean
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
return sqlschema.DataTypeText
}
return formatter.Formatter.DataTypeOf(dataType)
}

View File

@@ -0,0 +1,285 @@
package postgressqlschema
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
)
type provider struct {
settings factory.ScopedProviderSettings
fmter sqlschema.SQLFormatter
sqlstore sqlstore.SQLStore
operator sqlschema.SQLOperator
}
func NewFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config] {
return factory.NewProviderFactory(factory.MustNewName("postgres"), func(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config) (sqlschema.SQLSchema, error) {
return New(ctx, providerSettings, config, sqlstore)
})
}
func New(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config, sqlstore sqlstore.SQLStore) (sqlschema.SQLSchema, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/sqlschema/postgressqlschema")
fmter := Formatter{Formatter: sqlschema.NewFormatter(sqlstore.BunDB().Dialect())}
return &provider{
sqlstore: sqlstore,
fmter: fmter,
settings: settings,
operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{
DropConstraint: true,
ColumnIfNotExistsExists: true,
AlterColumnSetNotNull: true,
}),
}, nil
}
func (provider *provider) Formatter() sqlschema.SQLFormatter {
return provider.fmter
}
func (provider *provider) Operator() sqlschema.SQLOperator {
return provider.operator
}
func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) {
rows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
c.column_name,
c.is_nullable = 'YES',
c.udt_name,
c.column_default
FROM
information_schema.columns AS c
WHERE
c.table_name = ?`, string(tableName))
if err != nil {
return nil, nil, err
}
defer func() {
if err := rows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
columns := make([]*sqlschema.Column, 0)
for rows.Next() {
var (
name string
sqlDataType string
nullable bool
defaultVal *string
)
if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil {
return nil, nil, err
}
columnDefault := ""
if defaultVal != nil {
columnDefault = *defaultVal
}
columns = append(columns, &sqlschema.Column{
Name: sqlschema.ColumnName(name),
Nullable: nullable,
DataType: provider.fmter.DataTypeOf(sqlDataType),
Default: columnDefault,
})
}
constraintsRows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
c.column_name,
constraint_name,
constraint_type
FROM
information_schema.table_constraints tc
JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_catalog, table_name, constraint_name)
JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema AND tc.table_name = c.table_name AND ccu.column_name = c.column_name
WHERE
c.table_name = ?`, string(tableName))
if err != nil {
return nil, nil, err
}
defer func() {
if err := constraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
var primaryKeyConstraint *sqlschema.PrimaryKeyConstraint
uniqueConstraintsMap := make(map[string]*sqlschema.UniqueConstraint)
for constraintsRows.Next() {
var (
name string
constraintName string
constraintType string
)
if err := constraintsRows.Scan(&name, &constraintName, &constraintType); err != nil {
return nil, nil, err
}
if constraintType == "PRIMARY KEY" {
if primaryKeyConstraint == nil {
primaryKeyConstraint = (&sqlschema.PrimaryKeyConstraint{
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)},
}).Named(constraintName).(*sqlschema.PrimaryKeyConstraint)
} else {
primaryKeyConstraint.ColumnNames = append(primaryKeyConstraint.ColumnNames, sqlschema.ColumnName(name))
}
}
if constraintType == "UNIQUE" {
if _, ok := uniqueConstraintsMap[constraintName]; !ok {
uniqueConstraintsMap[constraintName] = (&sqlschema.UniqueConstraint{
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)},
}).Named(constraintName).(*sqlschema.UniqueConstraint)
} else {
uniqueConstraintsMap[constraintName].ColumnNames = append(uniqueConstraintsMap[constraintName].ColumnNames, sqlschema.ColumnName(name))
}
}
}
foreignKeyConstraintsRows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
tc.constraint_name,
kcu.table_name AS referencing_table,
kcu.column_name AS referencing_column,
ccu.table_name AS referenced_table,
ccu.column_name AS referenced_column
FROM
information_schema.key_column_usage kcu
JOIN information_schema.table_constraints tc ON kcu.constraint_name = tc.constraint_name AND kcu.table_schema = tc.table_schema
JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name AND ccu.table_schema = tc.table_schema
WHERE
tc.constraint_type = ?
AND kcu.table_name = ?`, "FOREIGN KEY", string(tableName))
if err != nil {
return nil, nil, err
}
defer func() {
if err := foreignKeyConstraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
foreignKeyConstraints := make([]*sqlschema.ForeignKeyConstraint, 0)
for foreignKeyConstraintsRows.Next() {
var (
constraintName string
referencingTable string
referencingColumn string
referencedTable string
referencedColumn string
)
if err := foreignKeyConstraintsRows.Scan(&constraintName, &referencingTable, &referencingColumn, &referencedTable, &referencedColumn); err != nil {
return nil, nil, err
}
foreignKeyConstraints = append(foreignKeyConstraints, (&sqlschema.ForeignKeyConstraint{
ReferencingColumnName: sqlschema.ColumnName(referencingColumn),
ReferencedTableName: sqlschema.TableName(referencedTable),
ReferencedColumnName: sqlschema.ColumnName(referencedColumn),
}).Named(constraintName).(*sqlschema.ForeignKeyConstraint))
}
uniqueConstraints := make([]*sqlschema.UniqueConstraint, 0)
for _, uniqueConstraint := range uniqueConstraintsMap {
uniqueConstraints = append(uniqueConstraints, uniqueConstraint)
}
return &sqlschema.Table{
Name: tableName,
Columns: columns,
PrimaryKeyConstraint: primaryKeyConstraint,
ForeignKeyConstraints: foreignKeyConstraints,
}, uniqueConstraints, nil
}
func (provider *provider) GetIndices(ctx context.Context, name sqlschema.TableName) ([]sqlschema.Index, error) {
rows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
ct.relname AS table_name,
ci.relname AS index_name,
i.indisunique AS unique,
i.indisprimary AS primary,
a.attname AS column_name
FROM
pg_index i
LEFT JOIN pg_class ct ON ct.oid = i.indrelid
LEFT JOIN pg_class ci ON ci.oid = i.indexrelid
LEFT JOIN pg_attribute a ON a.attrelid = ct.oid
LEFT JOIN pg_constraint con ON con.conindid = i.indexrelid
WHERE
a.attnum = ANY(i.indkey)
AND con.oid IS NULL
AND ct.relkind = 'r'
AND ct.relname = ?`, string(name))
if err != nil {
return nil, err
}
defer func() {
if err := rows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
uniqueIndicesMap := make(map[string]*sqlschema.UniqueIndex)
for rows.Next() {
var (
tableName string
indexName string
unique bool
primary bool
columnName string
)
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName); err != nil {
return nil, err
}
if unique {
if _, ok := uniqueIndicesMap[indexName]; !ok {
uniqueIndicesMap[indexName] = &sqlschema.UniqueIndex{
TableName: name,
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
}
} else {
uniqueIndicesMap[indexName].ColumnNames = append(uniqueIndicesMap[indexName].ColumnNames, sqlschema.ColumnName(columnName))
}
}
}
indices := make([]sqlschema.Index, 0)
for _, index := range uniqueIndicesMap {
indices = append(indices, index)
}
return indices, nil
}
func (provider *provider) ToggleFKEnforcement(_ context.Context, _ bun.IDB, _ bool) error {
return nil
}

View File

@@ -17,19 +17,21 @@ var (
)
var (
Org = "org"
User = "user"
UserNoCascade = "user_no_cascade"
FactorPassword = "factor_password"
CloudIntegration = "cloud_integration"
Org = "org"
User = "user"
UserNoCascade = "user_no_cascade"
FactorPassword = "factor_password"
CloudIntegration = "cloud_integration"
AgentConfigVersion = "agent_config_version"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
AgentConfigVersionReference = `("version_id") REFERENCES "agent_config_version" ("id")`
)
type dialect struct{}
@@ -274,6 +276,8 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, FactorPasswordReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
} else if reference == AgentConfigVersion && !slices.Contains(fkReferences, AgentConfigVersionReference) {
fkReferences = append(fkReferences, AgentConfigVersionReference)
}
}

View File

@@ -10,7 +10,6 @@ import (
"github.com/jackc/pgx/v5/pgconn"
"github.com/jackc/pgx/v5/pgxpool"
"github.com/jackc/pgx/v5/stdlib"
"github.com/jmoiron/sqlx"
"github.com/uptrace/bun"
"github.com/uptrace/bun/dialect/pgdialect"
)
@@ -19,7 +18,6 @@ type provider struct {
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
sqlxdb *sqlx.DB
dialect *dialect
}
@@ -61,7 +59,6 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
settings: settings,
sqldb: sqldb,
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
sqlxdb: sqlx.NewDb(sqldb, "postgres"),
dialect: new(dialect),
}, nil
}
@@ -74,10 +71,6 @@ func (provider *provider) SQLDB() *sql.DB {
return provider.sqldb
}
func (provider *provider) SQLxDB() *sqlx.DB {
return provider.sqlxdb
}
func (provider *provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}

View File

@@ -78,7 +78,7 @@
"fontfaceobserver": "2.3.0",
"history": "4.10.1",
"html-webpack-plugin": "5.5.0",
"http-proxy-middleware": "3.0.3",
"http-proxy-middleware": "3.0.5",
"http-status-codes": "2.3.0",
"i18next": "^21.6.12",
"i18next-browser-languagedetector": "^6.1.3",
@@ -213,7 +213,9 @@
"eslint-plugin-simple-import-sort": "^7.0.0",
"eslint-plugin-sonarjs": "^0.12.0",
"husky": "^7.0.4",
"image-webpack-loader": "8.1.0",
"image-minimizer-webpack-plugin": "^4.0.0",
"imagemin": "^8.0.1",
"imagemin-svgo": "^10.0.1",
"is-ci": "^3.0.1",
"jest-styled-components": "^7.0.8",
"lint-staged": "^12.5.0",
@@ -230,6 +232,7 @@
"redux-mock-store": "1.5.4",
"sass": "1.66.1",
"sass-loader": "13.3.2",
"sharp": "^0.33.4",
"ts-jest": "^27.1.5",
"ts-node": "^10.2.1",
"typescript-plugin-css-modules": "5.0.1",
@@ -250,10 +253,11 @@
"xml2js": "0.5.0",
"phin": "^3.7.1",
"body-parser": "1.20.3",
"http-proxy-middleware": "3.0.3",
"http-proxy-middleware": "3.0.5",
"cross-spawn": "7.0.5",
"cookie": "^0.7.1",
"serialize-javascript": "6.0.2",
"prismjs": "1.30.0"
"prismjs": "1.30.0",
"got": "11.8.5"
}
}

View File

@@ -14,8 +14,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -9,8 +9,8 @@
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels",
"page_title_create": "New Notification Channel",
"page_title_edit": "Edit Notification Channel",
"button_save_channel": "Save",
"button_test_channel": "Test",
"button_return": "Back",
@@ -62,5 +62,8 @@
"channel_test_failed": "Failed to send a test message to this channel, please confirm that the parameters are set correctly",
"channel_test_unexpected": "An unexpected error occurred while sending a message to this channel, please try again",
"webhook_url_required": "Webhook URL is mandatory",
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)"
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)",
"api_key_required": "API Key is mandatory",
"to_required": "To field is mandatory",
"channel_name_required": "Channel name is mandatory"
}

View File

@@ -7,8 +7,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -129,5 +129,6 @@
"text_num_points": "data points in each result group",
"text_alert_frequency": "Run alert every",
"text_for": "minutes",
"selected_query_placeholder": "Select query"
"selected_query_placeholder": "Select query",
"alert_rule_not_found": "Alert Rule not found"
}

View File

@@ -9,8 +9,8 @@
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels",
"page_title_create": "New Notification Channel",
"page_title_edit": "Edit Notification Channel",
"button_save_channel": "Save",
"button_test_channel": "Test",
"button_return": "Back",
@@ -77,5 +77,8 @@
"channel_test_failed": "Failed to send a test message to this channel, please confirm that the parameters are set correctly",
"channel_test_unexpected": "An unexpected error occurred while sending a message to this channel, please try again",
"webhook_url_required": "Webhook URL is mandatory",
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)"
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)",
"api_key_required": "API Key is mandatory",
"to_required": "To field is mandatory",
"channel_name_required": "Channel name is mandatory"
}

View File

@@ -7,8 +7,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -3,6 +3,7 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
import getAll from 'api/v1/user/get';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import { ORG_PREFERENCES } from 'constants/orgPreferences';
import ROUTES from 'constants/routes';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import history from 'lib/history';
@@ -14,6 +15,7 @@ import { matchPath, useLocation } from 'react-router-dom';
import { SuccessResponseV2 } from 'types/api';
import APIError from 'types/api/error';
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
import { OrgPreference } from 'types/api/preferences/preference';
import { Organization } from 'types/api/user/getOrganization';
import { UserResponse } from 'types/api/user/getUser';
import { USER_ROLES } from 'types/roles';
@@ -95,7 +97,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
usersData.data
) {
const isOnboardingComplete = orgPreferences?.find(
(preference: Record<string, any>) => preference.name === 'org_onboarding',
(preference: OrgPreference) =>
preference.name === ORG_PREFERENCES.ORG_ONBOARDING,
)?.value;
const isFirstUser = checkFirstTimeUser();
@@ -123,7 +126,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const isRouteEnabledForWorkspaceBlockedState =
isAdmin &&
(path === ROUTES.ORG_SETTINGS ||
(path === ROUTES.SETTINGS ||
path === ROUTES.ORG_SETTINGS ||
path === ROUTES.BILLING ||
path === ROUTES.MY_SETTINGS);

View File

@@ -71,7 +71,7 @@ function App(): JSX.Element {
const orgName =
org && Array.isArray(org) && org.length > 0 ? org[0].displayName : '';
const { displayName, email, role } = user;
const { displayName, email, role, id, orgId } = user;
const domain = extractDomain(email);
const hostNameParts = hostname.split('.');
@@ -105,7 +105,7 @@ function App(): JSX.Element {
logEvent('Domain Identified', groupTraits, 'group');
}
if (window && window.Appcues) {
window.Appcues.identify(email, {
window.Appcues.identify(id, {
name: displayName,
tenant_id: hostNameParts[0],
@@ -131,7 +131,7 @@ function App(): JSX.Element {
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
posthog?.identify(email, {
posthog?.identify(id, {
email,
name: displayName,
orgName,
@@ -143,7 +143,7 @@ function App(): JSX.Element {
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
posthog?.group('company', domain, {
posthog?.group('company', orgId, {
name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
@@ -191,19 +191,22 @@ function App(): JSX.Element {
// if the user is on basic plan then remove billing
if (isOnBasicPlan) {
updatedRoutes = updatedRoutes.filter(
(route) => route?.path !== ROUTES.BILLING,
(route) =>
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
);
if (isEnterpriseSelfHostedUser) {
updatedRoutes.push(LIST_LICENSES);
}
}
if (isEnterpriseSelfHostedUser) {
updatedRoutes.push(LIST_LICENSES);
}
// always add support route for cloud users
updatedRoutes = [...updatedRoutes, SUPPORT_ROUTE];
} else {
// if not a cloud user then remove billing and add list licenses route
updatedRoutes = updatedRoutes.filter(
(route) => route?.path !== ROUTES.BILLING,
(route) =>
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
);
updatedRoutes = [...updatedRoutes, LIST_LICENSES];
}

View File

@@ -128,12 +128,7 @@ export const AlertOverview = Loadable(
);
export const CreateAlertChannelAlerts = Loadable(
() =>
import(/* webpackChunkName: "Create Channels" */ 'pages/AlertChannelCreate'),
);
export const EditAlertChannelsAlerts = Loadable(
() => import(/* webpackChunkName: "Edit Channels" */ 'pages/ChannelsEdit'),
() => import(/* webpackChunkName: "Create Channels" */ 'pages/Settings'),
);
export const AllAlertChannels = Loadable(
@@ -165,7 +160,7 @@ export const APIKeys = Loadable(
);
export const MySettings = Loadable(
() => import(/* webpackChunkName: "All MySettings" */ 'pages/MySettings'),
() => import(/* webpackChunkName: "All MySettings" */ 'pages/Settings'),
);
export const CustomDomainSettings = Loadable(
@@ -222,7 +217,7 @@ export const LogsIndexToFields = Loadable(
);
export const BillingPage = Loadable(
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Billing'),
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Settings'),
);
export const SupportPage = Loadable(
@@ -249,7 +244,7 @@ export const WorkspaceAccessRestricted = Loadable(
);
export const ShortcutsPage = Loadable(
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'),
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Settings'),
);
export const InstalledIntegrations = Loadable(

View File

@@ -7,20 +7,15 @@ import {
AlertOverview,
AllAlertChannels,
AllErrors,
APIKeys,
ApiMonitoring,
BillingPage,
CreateAlertChannelAlerts,
CreateNewAlerts,
CustomDomainSettings,
DashboardPage,
DashboardWidget,
EditAlertChannelsAlerts,
EditRulesPage,
ErrorDetails,
Home,
InfrastructureMonitoring,
IngestionSettings,
InstalledIntegrations,
LicensePage,
ListAllALertsPage,
@@ -31,12 +26,10 @@ import {
LogsIndexToFields,
LogsSaveViews,
MetricsExplorer,
MySettings,
NewDashboardPage,
OldLogsExplorer,
Onboarding,
OnboardingV2,
OrganizationSettings,
OrgOnboarding,
PasswordReset,
PipelinePage,
@@ -45,7 +38,6 @@ import {
ServicesTablePage,
ServiceTopLevelOperationsPage,
SettingsPage,
ShortcutsPage,
SignupPage,
SomethingWentWrong,
StatusPage,
@@ -150,7 +142,7 @@ const routes: AppRoutes[] = [
},
{
path: ROUTES.SETTINGS,
exact: true,
exact: false,
component: SettingsPage,
isPrivate: true,
key: 'SETTINGS',
@@ -260,13 +252,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'CHANNELS_NEW',
},
{
path: ROUTES.CHANNELS_EDIT,
exact: true,
component: EditAlertChannelsAlerts,
isPrivate: true,
key: 'CHANNELS_EDIT',
},
{
path: ROUTES.ALL_CHANNELS,
exact: true,
@@ -295,41 +280,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'VERSION',
},
{
path: ROUTES.ORG_SETTINGS,
exact: true,
component: OrganizationSettings,
isPrivate: true,
key: 'ORG_SETTINGS',
},
{
path: ROUTES.INGESTION_SETTINGS,
exact: true,
component: IngestionSettings,
isPrivate: true,
key: 'INGESTION_SETTINGS',
},
{
path: ROUTES.API_KEYS,
exact: true,
component: APIKeys,
isPrivate: true,
key: 'API_KEYS',
},
{
path: ROUTES.MY_SETTINGS,
exact: true,
component: MySettings,
isPrivate: true,
key: 'MY_SETTINGS',
},
{
path: ROUTES.CUSTOM_DOMAIN_SETTINGS,
exact: true,
component: CustomDomainSettings,
isPrivate: true,
key: 'CUSTOM_DOMAIN_SETTINGS',
},
{
path: ROUTES.LOGS,
exact: true,
@@ -393,13 +343,6 @@ const routes: AppRoutes[] = [
key: 'SOMETHING_WENT_WRONG',
isPrivate: false,
},
{
path: ROUTES.BILLING,
exact: true,
component: BillingPage,
key: 'BILLING',
isPrivate: true,
},
{
path: ROUTES.WORKSPACE_LOCKED,
exact: true,
@@ -421,13 +364,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'WORKSPACE_ACCESS_RESTRICTED',
},
{
path: ROUTES.SHORTCUTS,
exact: true,
component: ShortcutsPage,
isPrivate: true,
key: 'SHORTCUTS',
},
{
path: ROUTES.INTEGRATIONS,
exact: true,

View File

@@ -0,0 +1,29 @@
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import axios, { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
const getChangelogByVersion = async (
versionId: string,
): Promise<SuccessResponse<ChangelogSchema> | ErrorResponse> => {
try {
const response = await axios.get(`
https://cms.signoz.cloud/api/release-changelogs?filters[version][$eq]=${versionId}&populate[features][sort]=sort_order:asc&populate[features][populate][media][fields]=id,ext,url,mime,alternativeText
`);
if (!Array.isArray(response.data.data) || response.data.data.length === 0) {
throw new Error('No changelog found!');
}
return {
statusCode: 200,
error: null,
message: response.statusText,
payload: response.data.data[0],
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default getChangelogByVersion;

View File

@@ -119,6 +119,7 @@ export const updateFunnelSteps = async (
export interface ValidateFunnelPayload {
start_time: number;
end_time: number;
steps: FunnelStepData[];
}
export interface ValidateFunnelResponse {
@@ -132,12 +133,11 @@ export interface ValidateFunnelResponse {
}
export const validateFunnelSteps = async (
funnelId: string,
payload: ValidateFunnelPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<ValidateFunnelResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/validate`,
`${FUNNELS_BASE_PATH}/analytics/validate`,
payload,
{ signal },
);
@@ -185,6 +185,7 @@ export interface FunnelOverviewPayload {
end_time: number;
step_start?: number;
step_end?: number;
steps: FunnelStepData[];
}
export interface FunnelOverviewResponse {
@@ -202,12 +203,11 @@ export interface FunnelOverviewResponse {
}
export const getFunnelOverview = async (
funnelId: string,
payload: FunnelOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelOverviewResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/overview`,
`${FUNNELS_BASE_PATH}/analytics/overview`,
payload,
{
signal,
@@ -235,12 +235,11 @@ export interface SlowTraceData {
}
export const getFunnelSlowTraces = async (
funnelId: string,
payload: FunnelOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<SlowTraceData> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/slow-traces`,
`${FUNNELS_BASE_PATH}/analytics/slow-traces`,
payload,
{
signal,
@@ -273,7 +272,7 @@ export const getFunnelErrorTraces = async (
signal?: AbortSignal,
): Promise<SuccessResponse<ErrorTraceData> | ErrorResponse> => {
const response: AxiosResponse = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/error-traces`,
`${FUNNELS_BASE_PATH}/analytics/error-traces`,
payload,
{
signal,
@@ -291,6 +290,7 @@ export const getFunnelErrorTraces = async (
export interface FunnelStepsPayload {
start_time: number;
end_time: number;
steps: FunnelStepData[];
}
export interface FunnelStepGraphMetrics {
@@ -307,12 +307,11 @@ export interface FunnelStepsResponse {
}
export const getFunnelSteps = async (
funnelId: string,
payload: FunnelStepsPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelStepsResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps`,
`${FUNNELS_BASE_PATH}/analytics/steps`,
payload,
{ signal },
);
@@ -330,6 +329,7 @@ export interface FunnelStepsOverviewPayload {
end_time: number;
step_start?: number;
step_end?: number;
steps: FunnelStepData[];
}
export interface FunnelStepsOverviewResponse {
@@ -341,12 +341,11 @@ export interface FunnelStepsOverviewResponse {
}
export const getFunnelStepsOverview = async (
funnelId: string,
payload: FunnelStepsOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelStepsOverviewResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps/overview`,
`${FUNNELS_BASE_PATH}/analytics/steps/overview`,
payload,
{ signal },
);

View File

@@ -1,21 +0,0 @@
import axios from 'api';
import { Props } from 'types/api/userFeedback/sendResponse';
const sendFeedback = async (props: Props): Promise<number> => {
const response = await axios.post(
'/feedback',
{
email: props.email,
message: props.message,
},
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
},
);
return response.status;
};
export default sendFeedback;

View File

@@ -0,0 +1,161 @@
.changelog-modal {
.ant-modal-content {
padding: unset;
background-color: var(--bg-ink-400, #121317);
.ant-modal-header {
margin-bottom: unset;
}
.ant-modal-footer {
margin-top: unset;
}
}
&-title {
display: flex;
align-items: center;
gap: 8px;
background-color: var(--bg-ink-400, #121317);
padding: 16px;
font-size: 14px;
line-height: 20px;
color: var(--text-vanilla-100, #fff);
border-bottom: 1px solid var(--bg-slate-500, #161922);
}
&-footer.scroll-available {
.scroll-btn-container {
display: block;
}
}
&-footer {
position: relative;
border: 1px solid var(--bg-slate-500, #161922);
padding: 12px;
display: flex;
align-items: center;
justify-content: space-between;
&-label {
color: var(--text-robin-400, #7190f9);
font-size: 14px;
line-height: 24px;
position: relative;
padding-left: 14px;
&::before {
content: '';
position: absolute;
top: 50%;
left: 0;
transform: translateY(-50%);
width: 6px;
height: 6px;
border-radius: 100%;
background-color: var(--bg-robin-500, #7190f9);
}
}
&-ctas {
display: flex;
& svg {
font-size: 14px;
}
}
.scroll-btn-container {
display: none;
position: absolute;
top: -40px;
left: 50%;
transform: translateX(-50%);
.scroll-btn {
all: unset;
padding: 4px 12px 4px 10px;
background-color: var(--bg-slate-400, #1d212d);
border-radius: 20px;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
gap: 4px;
transition: background-color 0.1s;
&:hover {
background-color: var(--bg-slate-200, #2c3140);
}
&:active {
background-color: var(--bg-slate-600, #1c1f2a);
}
span {
font-size: 12px;
line-height: 18px;
color: var(--text-vanilla-400, #c0c1c3);
}
// add animation to the chevrons down icon
svg {
animation: pulse 1s infinite;
}
}
}
}
&-content {
max-height: calc(100vh - 300px);
overflow-y: auto;
padding: 16px;
border: 1px solid var(--bg-slate-500, #161922);
border-top-width: 0;
border-bottom-width: 0;
}
}
// pulse for the scroll for more icon
@keyframes pulse {
0% {
opacity: 1;
}
50% {
opacity: 0.5;
}
}
.lightMode {
.changelog-modal {
.ant-modal-content {
background-color: var(--bg-vanilla-100);
border-color: var(--bg-vanilla-300);
}
&-title {
background: var(--bg-vanilla-100);
color: var(--bg-ink-500);
border-color: var(--bg-vanilla-300);
}
&-content {
border-color: var(--bg-vanilla-300);
}
&-footer {
border-color: var(--bg-vanilla-300);
.scroll-btn-container {
.scroll-btn {
background-color: var(--bg-vanilla-300);
span {
color: var(--text-ink-500);
}
}
}
}
}
}

View File

@@ -0,0 +1,131 @@
import './ChangelogModal.styles.scss';
import { CheckOutlined, CloseOutlined } from '@ant-design/icons';
import { Button, Modal } from 'antd';
import cx from 'classnames';
import dayjs from 'dayjs';
import { ChevronsDown, ScrollText } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { useCallback, useEffect, useRef, useState } from 'react';
import ChangelogRenderer from './components/ChangelogRenderer';
interface Props {
onClose: () => void;
}
function ChangelogModal({ onClose }: Props): JSX.Element {
const [hasScroll, setHasScroll] = useState(false);
const changelogContentSectionRef = useRef<HTMLDivElement>(null);
const { changelog } = useAppContext();
const formattedReleaseDate = dayjs(changelog?.release_date).format(
'MMMM D, YYYY',
);
const checkScroll = useCallback((): void => {
if (changelogContentSectionRef.current) {
const {
scrollHeight,
clientHeight,
scrollTop,
} = changelogContentSectionRef.current;
const isAtBottom = scrollHeight - clientHeight - scrollTop <= 8;
setHasScroll(scrollHeight > clientHeight + 24 && !isAtBottom); // 24px - buffer height to show show more
}
}, []);
useEffect(() => {
checkScroll();
const changelogContentSection = changelogContentSectionRef.current;
if (changelogContentSection) {
changelogContentSection.addEventListener('scroll', checkScroll);
}
return (): void => {
if (changelogContentSection) {
changelogContentSection.removeEventListener('scroll', checkScroll);
}
};
}, [checkScroll]);
const onClickUpdateWorkspace = (): void => {
window.open(
'https://github.com/SigNoz/signoz/releases',
'_blank',
'noopener,noreferrer',
);
};
const onClickScrollForMore = (): void => {
if (changelogContentSectionRef.current) {
changelogContentSectionRef.current.scrollTo({
top: changelogContentSectionRef.current.scrollTop + 600, // Scroll 600px from the current position
behavior: 'smooth',
});
}
};
return (
<Modal
className={cx('changelog-modal')}
title={
<div className="changelog-modal-title">
<ScrollText size={16} />
Whats New Changelog : {formattedReleaseDate}
</div>
}
width={820}
open
onCancel={onClose}
footer={
<div
className={cx('changelog-modal-footer', hasScroll && 'scroll-available')}
>
{changelog?.features && changelog.features.length > 0 && (
<span className="changelog-modal-footer-label">
{changelog.features.length} new&nbsp;
{changelog.features.length > 1 ? 'features' : 'feature'}
</span>
)}
<div className="changelog-modal-footer-ctas">
<Button type="default" icon={<CloseOutlined />} onClick={onClose}>
Skip for now
</Button>
<Button
type="primary"
icon={<CheckOutlined />}
onClick={onClickUpdateWorkspace}
>
Update my workspace
</Button>
</div>
{changelog && (
<div className="scroll-btn-container">
<button
data-testid="scroll-more-btn"
type="button"
className="scroll-btn"
onClick={onClickScrollForMore}
>
<ChevronsDown size={14} />
<span>Scroll for more</span>
</button>
</div>
)}
</div>
}
>
<div
className="changelog-modal-content"
data-testid="changelog-content"
ref={changelogContentSectionRef}
>
{changelog && <ChangelogRenderer changelog={changelog} />}
</div>
</Modal>
);
}
export default ChangelogModal;

View File

@@ -0,0 +1,79 @@
/* eslint-disable sonarjs/no-duplicate-string */
/* eslint-disable sonarjs/no-identical-functions */
/* eslint-disable @typescript-eslint/explicit-function-return-type */
import { fireEvent, render, screen } from '@testing-library/react';
import ChangelogModal from '../ChangelogModal';
const mockChangelog = {
release_date: '2025-06-10',
features: [
{
id: 1,
title: 'Feature 1',
description: 'Description for feature 1',
media: null,
},
],
bug_fixes: 'Bug fix details',
maintenance: 'Maintenance details',
};
// Mock react-markdown to just render children as plain text
jest.mock(
'react-markdown',
() =>
function ReactMarkdown({ children }: any) {
return <div>{children}</div>;
},
);
// mock useAppContext
jest.mock('providers/App/App', () => ({
useAppContext: jest.fn(() => ({ changelog: mockChangelog })),
}));
describe('ChangelogModal', () => {
it('renders modal with changelog data', () => {
render(<ChangelogModal onClose={jest.fn()} />);
expect(
screen.getByText('Whats New ⎯ Changelog : June 10, 2025'),
).toBeInTheDocument();
expect(screen.getByText('Feature 1')).toBeInTheDocument();
expect(screen.getByText('Description for feature 1')).toBeInTheDocument();
expect(screen.getByText('Bug fix details')).toBeInTheDocument();
expect(screen.getByText('Maintenance details')).toBeInTheDocument();
});
it('calls onClose when Skip for now is clicked', () => {
const onClose = jest.fn();
render(<ChangelogModal onClose={onClose} />);
fireEvent.click(screen.getByText('Skip for now'));
expect(onClose).toHaveBeenCalled();
});
it('opens migration docs when Update my workspace is clicked', () => {
window.open = jest.fn();
render(<ChangelogModal onClose={jest.fn()} />);
fireEvent.click(screen.getByText('Update my workspace'));
expect(window.open).toHaveBeenCalledWith(
'https://github.com/SigNoz/signoz/releases',
'_blank',
'noopener,noreferrer',
);
});
it('scrolls for more when Scroll for more is clicked', () => {
render(<ChangelogModal onClose={jest.fn()} />);
const scrollBtn = screen.getByTestId('scroll-more-btn');
const contentDiv = screen.getByTestId('changelog-content');
if (contentDiv) {
contentDiv.scrollTo = jest.fn();
}
fireEvent.click(scrollBtn);
if (contentDiv) {
expect(contentDiv.scrollTo).toHaveBeenCalled();
}
});
});

View File

@@ -0,0 +1,63 @@
/* eslint-disable sonarjs/no-duplicate-string */
/* eslint-disable sonarjs/no-identical-functions */
/* eslint-disable @typescript-eslint/explicit-function-return-type */
import { render, screen } from '@testing-library/react';
import ChangelogRenderer from '../components/ChangelogRenderer';
// Mock react-markdown to just render children as plain text
jest.mock(
'react-markdown',
() =>
function ReactMarkdown({ children }: any) {
return <div>{children}</div>;
},
);
const mockChangelog = {
id: 1,
documentId: 'changelog-doc-1',
version: '1.0.0',
createdAt: '2025-06-09T12:00:00Z',
release_date: '2025-06-10',
features: [
{
id: 1,
documentId: '1',
title: 'Feature 1',
description: 'Description for feature 1',
sort_order: 1,
createdAt: '',
updatedAt: '',
publishedAt: '',
deployment_type: 'All',
media: {
id: 1,
documentId: 'doc1',
ext: '.webp',
url: '/uploads/feature1.webp',
mime: 'image/webp',
alternativeText: null,
},
},
],
bug_fixes: 'Bug fix details',
updatedAt: '2025-06-09T12:00:00Z',
publishedAt: '2025-06-09T12:00:00Z',
maintenance: 'Maintenance details',
};
describe('ChangelogRenderer', () => {
it('renders release date', () => {
render(<ChangelogRenderer changelog={mockChangelog} />);
expect(screen.getByText('June 10, 2025')).toBeInTheDocument();
});
it('renders features, media, and description', () => {
render(<ChangelogRenderer changelog={mockChangelog} />);
expect(screen.getByText('Feature 1')).toBeInTheDocument();
expect(screen.getByAltText('Media')).toBeInTheDocument();
expect(screen.getByText('Description for feature 1')).toBeInTheDocument();
});
});

View File

@@ -0,0 +1,141 @@
.changelog-renderer {
position: relative;
padding-left: 20px;
.changelog-release-date {
font-size: 14px;
line-height: 20px;
color: var(--text-vanilla-400, #c0c1c3);
}
&-list {
display: flex;
flex-direction: column;
gap: 28px;
}
&-line {
position: absolute;
left: 0;
top: 6px;
bottom: -30px;
width: 1px;
background-color: var(--bg-slate-400, #1d212d);
.inner-ball {
position: absolute;
left: 50%;
width: 6px;
height: 6px;
border-radius: 100%;
transform: translateX(-50%);
background-color: var(--bg-robin-500, #7190f9);
}
}
ul,
ol {
list-style: none;
display: flex;
flex-direction: column;
gap: 16px;
padding-left: 30px;
li {
position: relative;
&::before {
content: '';
position: absolute;
left: -10px;
top: 10px;
width: 20px;
height: 2px;
background-color: var(--bg-robin-500, #7190f9);
transform: translate(-100%, -50%);
}
}
}
li,
p {
font-size: 14px;
line-height: 20px;
color: var(--text-vanilla-400, #c0c1c3);
}
code {
padding: 2px 4px;
background-color: var(--bg-slate-500, #161922);
border-radius: 6px;
font-size: 95%;
vertical-align: middle;
border: 1px solid var(--bg-slate-600, #1c1f2a);
}
a {
color: var(--text-robin-500, #7190f9);
font-weight: 600;
text-decoration: underline;
&:hover {
text-decoration: none;
}
}
h1,
h2,
h3,
h4,
h5,
h6 {
font-weight: 600;
color: var(--text-vanilla-100, #fff);
}
h1 {
font-size: 24px;
line-height: 32px;
}
h2 {
font-size: 20px;
line-height: 28px;
}
.changelog-media-image,
.changelog-media-video {
height: auto;
width: 100%;
overflow: hidden;
border-radius: 4px;
border: 1px solid var(--bg-slate-400, #1d212d);
}
.changelog-media-video {
margin: 12px 0;
}
}
.lightMode {
.changelog-renderer {
.changelog-release-date {
color: var(--text-ink-500);
}
&-line {
background-color: var(--bg-vanilla-300);
}
li,
p {
color: var(--text-ink-500);
}
h1,
h2,
h3,
h4,
h5,
h6 {
color: var(--text-ink-500);
}
}
}

View File

@@ -0,0 +1,89 @@
import './ChangelogRenderer.styles.scss';
import dayjs from 'dayjs';
import ReactMarkdown from 'react-markdown';
import {
ChangelogSchema,
Media,
SupportedImageTypes,
SupportedVideoTypes,
} from 'types/api/changelog/getChangelogByVersion';
interface Props {
changelog: ChangelogSchema;
}
function renderMedia(media: Media): JSX.Element | null {
if (SupportedImageTypes.includes(media.ext)) {
return (
<img
src={media.url}
alt={media.alternativeText || 'Media'}
width={800}
height={450}
className="changelog-media-image"
/>
);
}
if (SupportedVideoTypes.includes(media.ext)) {
return (
<video
autoPlay
controls
controlsList="nodownload noplaybackrate"
loop
className="changelog-media-video"
>
<source src={media.url} type={media.mime} />
<track kind="captions" src="" label="No captions available" default />
Your browser does not support the video tag.
</video>
);
}
return null;
}
function ChangelogRenderer({ changelog }: Props): JSX.Element {
const formattedReleaseDate = dayjs(changelog.release_date).format(
'MMMM D, YYYY',
);
return (
<div className="changelog-renderer">
<div className="changelog-renderer-line">
<div className="inner-ball" />
</div>
<span className="changelog-release-date">{formattedReleaseDate}</span>
{changelog.features && changelog.features.length > 0 && (
<div className="changelog-renderer-list">
{changelog.features.map((feature) => (
<div key={feature.id}>
<h2>{feature.title}</h2>
{feature.media && renderMedia(feature.media)}
<ReactMarkdown>{feature.description}</ReactMarkdown>
</div>
))}
</div>
)}
{changelog.bug_fixes && changelog.bug_fixes.length > 0 && (
<div>
<h2>Bug Fixes</h2>
{changelog.bug_fixes && (
<ReactMarkdown>{changelog.bug_fixes}</ReactMarkdown>
)}
</div>
)}
{changelog.maintenance && changelog.maintenance.length > 0 && (
<div>
<h2>Maintenance</h2>
{changelog.maintenance && (
<ReactMarkdown>{changelog.maintenance}</ReactMarkdown>
)}
</div>
)}
</div>
);
}
export default ChangelogRenderer;

View File

@@ -74,6 +74,7 @@ const formatMap = {
'MM/dd HH:mm': DATE_TIME_FORMATS.SLASH_SHORT,
'MM/DD': DATE_TIME_FORMATS.DATE_SHORT,
'YY-MM': DATE_TIME_FORMATS.YEAR_MONTH,
'MMM d, yyyy, h:mm:ss aaaa': DATE_TIME_FORMATS.DASH_DATETIME,
YY: DATE_TIME_FORMATS.YEAR_SHORT,
};

View File

@@ -194,7 +194,7 @@ function HostMetricTraces({
{!isError && traces.length > 0 && (
<div className="host-metric-traces-table">
<TraceExplorerControls
isLoading={isFetching}
isLoading={isFetching && traces.length === 0}
totalCount={totalCount}
perPageOptions={PER_PAGE_OPTIONS}
showSizeChanger={false}
@@ -203,7 +203,7 @@ function HostMetricTraces({
tableLayout="fixed"
pagination={false}
scroll={{ x: true }}
loading={isFetching}
loading={isFetching && traces.length === 0}
dataSource={traces}
columns={traceListColumns}
onRow={(): Record<string, unknown> => ({

View File

@@ -37,7 +37,7 @@ import {
ScrollText,
X,
} from 'lucide-react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useSelector } from 'react-redux';
import { useSearchParams } from 'react-router-dom-v5-compat';
import { AppState } from 'store/reducers';
@@ -86,8 +86,12 @@ function HostMetricsDetails({
endTime: endMs,
}));
const lastSelectedInterval = useRef<Time | null>(null);
const [selectedInterval, setSelectedInterval] = useState<Time>(
selectedTime as Time,
lastSelectedInterval.current
? lastSelectedInterval.current
: (selectedTime as Time),
);
const [selectedView, setSelectedView] = useState<VIEWS>(
@@ -150,10 +154,11 @@ function HostMetricsDetails({
}, [initialFilters]);
useEffect(() => {
setSelectedInterval(selectedTime as Time);
const currentSelectedInterval = lastSelectedInterval.current || selectedTime;
setSelectedInterval(currentSelectedInterval as Time);
if (selectedTime !== 'custom') {
const { maxTime, minTime } = GetMinMax(selectedTime);
if (currentSelectedInterval !== 'custom') {
const { maxTime, minTime } = GetMinMax(currentSelectedInterval);
setModalTimeRange({
startTime: Math.floor(minTime / 1000000000),
@@ -181,6 +186,7 @@ function HostMetricsDetails({
const handleTimeChange = useCallback(
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
lastSelectedInterval.current = interval as Time;
setSelectedInterval(interval as Time);
if (interval === 'custom' && dateTimeRange) {
@@ -356,6 +362,7 @@ function HostMetricsDetails({
const handleClose = (): void => {
setSelectedInterval(selectedTime as Time);
lastSelectedInterval.current = null;
setSearchParams({});
if (selectedTime !== 'custom') {
@@ -430,9 +437,13 @@ function HostMetricsDetails({
>
{host.active ? 'ACTIVE' : 'INACTIVE'}
</Tag>
<Tag className="infra-monitoring-tags" bordered>
{host.os}
</Tag>
{host.os ? (
<Tag className="infra-monitoring-tags" bordered>
{host.os}
</Tag>
) : (
<Typography.Text>-</Typography.Text>
)}
<div className="progress-container">
<Progress
percent={Number((host.cpu * 100).toFixed(1))}

View File

@@ -15,11 +15,12 @@ import {
} from 'container/TopNav/DateTimeSelectionV2/config';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { useMultiIntersectionObserver } from 'hooks/useMultiIntersectionObserver';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useQueries, UseQueryResult } from 'react-query';
import { QueryFunctionContext, useQueries, UseQueryResult } from 'react-query';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
@@ -53,6 +54,11 @@ function Metrics({
featureFlags?.find((flag) => flag.name === FeatureKeys.DOT_METRICS_ENABLED)
?.active || false;
const {
visibilities,
setElement,
} = useMultiIntersectionObserver(hostWidgetInfo.length, { threshold: 0.1 });
const queryPayloads = useMemo(
() =>
getHostQueryPayload(
@@ -65,11 +71,15 @@ function Metrics({
);
const queries = useQueries(
queryPayloads.map((payload) => ({
queryPayloads.map((payload, index) => ({
queryKey: ['host-metrics', payload, ENTITY_VERSION_V4, 'HOST'],
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
enabled: !!payload,
queryFn: ({
signal,
}: QueryFunctionContext): Promise<
SuccessResponse<MetricRangePayloadProps>
> => GetMetricQueryRange(payload, ENTITY_VERSION_V4, signal),
enabled: !!payload && visibilities[index],
keepPreviousData: true,
})),
);
@@ -143,7 +153,7 @@ function Metrics({
query: UseQueryResult<SuccessResponse<MetricRangePayloadProps>, unknown>,
idx: number,
): JSX.Element => {
if (query.isLoading) {
if ((!query.data && query.isLoading) || !visibilities[idx]) {
return <Skeleton />;
}
@@ -181,7 +191,7 @@ function Metrics({
</div>
<Row gutter={24} className="host-metrics-container">
{queries.map((query, idx) => (
<Col span={12} key={hostWidgetInfo[idx].title}>
<Col ref={setElement(idx)} span={12} key={hostWidgetInfo[idx].title}>
<Typography.Text>{hostWidgetInfo[idx].title}</Typography.Text>
<Card bordered className="host-metrics-card" ref={graphRef}>
{renderCardContent(query, idx)}

View File

@@ -71,7 +71,7 @@ function LogDetail({
const [contextQuery, setContextQuery] = useState<Query | undefined>();
const [filters, setFilters] = useState<TagFilter | null>(null);
const [isEdit, setIsEdit] = useState<boolean>(false);
const { initialDataSource, stagedQuery } = useQueryBuilder();
const { stagedQuery } = useQueryBuilder();
const listQuery = useMemo(() => {
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
@@ -81,7 +81,7 @@ function LogDetail({
const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: initialDataSource || DataSource.LOGS,
dataSource: DataSource.LOGS,
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
});

View File

@@ -5,17 +5,19 @@ import cx from 'classnames';
import { OPERATORS } from 'constants/queryBuilder';
import { FontSize } from 'container/OptionsMenu/types';
import { memo, MouseEvent, ReactNode, useMemo } from 'react';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
function AddToQueryHOC({
fieldKey,
fieldValue,
onAddToQuery,
fontSize,
dataType = DataTypes.EMPTY,
children,
}: AddToQueryHOCProps): JSX.Element {
const handleQueryAdd = (event: MouseEvent<HTMLDivElement>): void => {
event.stopPropagation();
onAddToQuery(fieldKey, fieldValue, OPERATORS['=']);
onAddToQuery(fieldKey, fieldValue, OPERATORS['='], undefined, dataType);
};
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
@@ -35,9 +37,20 @@ function AddToQueryHOC({
export interface AddToQueryHOCProps {
fieldKey: string;
fieldValue: string;
onAddToQuery: (fieldKey: string, fieldValue: string, operator: string) => void;
onAddToQuery: (
fieldKey: string,
fieldValue: string,
operator: string,
isJSON?: boolean,
dataType?: DataTypes,
) => void;
fontSize: FontSize;
dataType?: DataTypes;
children: ReactNode;
}
AddToQueryHOC.defaultProps = {
dataType: DataTypes.EMPTY,
};
export default memo(AddToQueryHOC);

View File

@@ -20,6 +20,7 @@ export function getDefaultCellStyle(isDarkMode?: boolean): CSSProperties {
export const defaultTableStyle: CSSProperties = {
minWidth: '40rem',
maxWidth: '60rem',
};
export const defaultListViewPanelStyle: CSSProperties = {

View File

@@ -1,7 +1,17 @@
import { Tabs, TabsProps } from 'antd';
import {
generatePath,
matchPath,
useLocation,
useParams,
} from 'react-router-dom';
import { RouteTabProps } from './types';
interface Params {
[key: string]: string;
}
function RouteTab({
routes,
activeKey,
@@ -9,6 +19,18 @@ function RouteTab({
history,
...rest
}: RouteTabProps & TabsProps): JSX.Element {
const params = useParams<Params>();
const location = useLocation();
// Find the matching route for the current pathname
const currentRoute = routes.find((route) => {
const routePath = route.route.split('?')[0];
return matchPath(location.pathname, {
path: routePath,
exact: true,
});
});
const onChange = (activeRoute: string): void => {
if (onChangeHandler) {
onChangeHandler(activeRoute);
@@ -17,7 +39,8 @@ function RouteTab({
const selectedRoute = routes.find((e) => e.key === activeRoute);
if (selectedRoute) {
history.push(selectedRoute.route);
const resolvedRoute = generatePath(selectedRoute.route, params);
history.push(resolvedRoute);
}
};
@@ -32,8 +55,8 @@ function RouteTab({
<Tabs
onChange={onChange}
destroyInactiveTabPane
activeKey={activeKey}
defaultActiveKey={activeKey}
activeKey={currentRoute?.key || activeKey}
defaultActiveKey={currentRoute?.key || activeKey}
animated
items={items}
// eslint-disable-next-line react/jsx-props-no-spreading

View File

@@ -30,5 +30,5 @@ export enum LOCALSTORAGE {
SHOW_EXCEPTIONS_QUICK_FILTERS = 'SHOW_EXCEPTIONS_QUICK_FILTERS',
BANNER_DISMISSED = 'BANNER_DISMISSED',
QUICK_FILTERS_SETTINGS_ANNOUNCEMENT = 'QUICK_FILTERS_SETTINGS_ANNOUNCEMENT',
UNEXECUTED_FUNNELS = 'UNEXECUTED_FUNNELS',
FUNNEL_STEPS = 'FUNNEL_STEPS',
}

View File

@@ -0,0 +1,18 @@
export const ORG_PREFERENCES = {
ORG_ONBOARDING: 'org_onboarding',
WELCOME_CHECKLIST_DO_LATER: 'welcome_checklist_do_later',
WELCOME_CHECKLIST_SEND_LOGS_SKIPPED: 'welcome_checklist_send_logs_skipped',
WELCOME_CHECKLIST_SEND_TRACES_SKIPPED: 'welcome_checklist_send_traces_skipped',
WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED:
'welcome_checklist_setup_alerts_skipped',
WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED:
'welcome_checklist_setup_saved_view_skipped',
WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED:
'welcome_checklist_send_infra_metrics_skipped',
WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED:
'welcome_checklist_setup_dashboards_skipped',
WELCOME_CHECKLIST_SETUP_WORKSPACE_SKIPPED:
'welcome_checklist_setup_workspace_skipped',
WELCOME_CHECKLIST_ADD_DATA_SOURCE_SKIPPED:
'welcome_checklist_add_data_source_skipped',
};

View File

@@ -46,4 +46,5 @@ export enum QueryParams {
msgSystem = 'msgSystem',
destination = 'destination',
kindString = 'kindString',
tab = 'tab',
}

View File

@@ -29,12 +29,12 @@ const ROUTES = {
ALERT_OVERVIEW: '/alerts/overview',
ALL_CHANNELS: '/settings/channels',
CHANNELS_NEW: '/settings/channels/new',
CHANNELS_EDIT: '/settings/channels/:id',
CHANNELS_EDIT: '/settings/channels/edit/:channelId',
ALL_ERROR: '/exceptions',
ERROR_DETAIL: '/error-detail',
VERSION: '/status',
MY_SETTINGS: '/my-settings',
SETTINGS: '/settings',
MY_SETTINGS: '/settings/my-settings',
ORG_SETTINGS: '/settings/org-settings',
CUSTOM_DOMAIN_SETTINGS: '/settings/custom-domain-settings',
API_KEYS: '/settings/api-keys',
@@ -52,7 +52,7 @@ const ROUTES = {
LIST_LICENSES: '/licenses',
LOGS_INDEX_FIELDS: '/logs-explorer/index-fields',
TRACE_EXPLORER: '/trace-explorer',
BILLING: '/billing',
BILLING: '/settings/billing',
SUPPORT: '/support',
LOGS_SAVE_VIEWS: '/logs/saved-views',
TRACES_SAVE_VIEWS: '/traces/saved-views',
@@ -60,10 +60,12 @@ const ROUTES = {
TRACES_FUNNELS_DETAIL: '/traces/funnels/:funnelId',
WORKSPACE_LOCKED: '/workspace-locked',
WORKSPACE_SUSPENDED: '/workspace-suspended',
SHORTCUTS: '/shortcuts',
SHORTCUTS: '/settings/shortcuts',
INTEGRATIONS: '/integrations',
MESSAGING_QUEUES_BASE: '/messaging-queues',
MESSAGING_QUEUES_KAFKA: '/messaging-queues/kafka',
MESSAGING_QUEUES_KAFKA_DETAIL: '/messaging-queues/kafka/detail',
INFRASTRUCTURE_MONITORING_BASE: '/infrastructure-monitoring',
INFRASTRUCTURE_MONITORING_HOSTS: '/infrastructure-monitoring/hosts',
INFRASTRUCTURE_MONITORING_KUBERNETES: '/infrastructure-monitoring/kubernetes',
MESSAGING_QUEUES_CELERY_TASK: '/messaging-queues/celery-task',
@@ -71,6 +73,7 @@ const ROUTES = {
METRICS_EXPLORER: '/metrics-explorer/summary',
METRICS_EXPLORER_EXPLORER: '/metrics-explorer/explorer',
METRICS_EXPLORER_VIEWS: '/metrics-explorer/views',
API_MONITORING_BASE: '/api-monitoring',
API_MONITORING: '/api-monitoring/explorer',
METRICS_EXPLORER_BASE: '/metrics-explorer',
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',

View File

@@ -0,0 +1,4 @@
export const USER_PREFERENCES = {
SIDENAV_PINNED: 'sidenav_pinned',
NAV_SHORTCUTS: 'nav_shortcuts',
};

View File

@@ -21,9 +21,9 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
const [action] = useComponentPermission(['new_alert_action'], user.role);
const onClickEditHandler = useCallback((id: string) => {
history.replace(
history.push(
generatePath(ROUTES.CHANNELS_EDIT, {
id,
channelId: id,
}),
);
}, []);

View File

@@ -0,0 +1,4 @@
.alert-channels-container {
width: 90%;
margin: 12px auto;
}

View File

@@ -120,14 +120,19 @@ describe('Create Alert Channel', () => {
expect(screen.getByText('button_test_channel')).toBeInTheDocument();
expect(screen.getByText('button_return')).toBeInTheDocument();
});
it('Should check if saving the form without filling the name displays "Something went wrong"', async () => {
it('Should check if saving the form without filling the name displays error notification', async () => {
const saveButton = screen.getByRole('button', {
name: 'button_save_channel',
});
fireEvent.click(saveButton);
await waitFor(() => expect(showErrorModal).toHaveBeenCalled());
await waitFor(() =>
expect(errorNotification).toHaveBeenCalledWith({
message: 'Error',
description: 'channel_name_required',
}),
);
});
it('Should check if clicking on Test button shows "An alert has been sent to this channel" success message if testing passes', async () => {
server.use(

View File

@@ -1,3 +1,5 @@
import './AllAlertChannels.styles.scss';
import { PlusOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import getAll from 'api/channels/getAll';
@@ -56,7 +58,7 @@ function AlertChannels(): JSX.Element {
}
return (
<>
<div className="alert-channels-container">
<ButtonContainer>
<Paragraph ellipsis type="secondary">
{t('sending_channels_note')}
@@ -87,7 +89,7 @@ function AlertChannels(): JSX.Element {
</ButtonContainer>
<AlertChannelsComponent allChannels={data?.data || []} />
</>
</div>
);
}

View File

@@ -1,8 +1,37 @@
// Earlier we were having app-banner-container class
// we change it to app-banner-wrapper as the adblocker was blocking the app-banner-container class
// Keep an eye on What classnames are used in the codebase
.app-banner-wrapper {
position: relative;
width: 100%;
}
.app-layout {
position: relative;
height: 100%;
width: 100%;
&.isWorkspaceRestricted {
height: calc(100% - 32px);
// same styles as its either trial expired or payment failed
&.isTrialExpired {
height: calc(100% - 64px);
}
&.isPaymentFailed {
height: calc(100% - 64px);
}
}
&.isTrialExpired {
height: calc(100% - 32px);
}
&.isPaymentFailed {
height: calc(100% - 32px);
}
.app-content {
width: calc(100% - 64px); // width of the sidebar
z-index: 0;
@@ -22,6 +51,12 @@
width: 100%;
}
}
&.side-nav-pinned {
.app-content {
width: calc(100% - 240px);
}
}
}
.chat-support-gateway {
@@ -157,3 +192,9 @@
text-underline-offset: 2px;
}
}
.workspace-restricted-banner,
.trial-expiry-banner,
.payment-failed-banner {
height: 32px;
}

View File

@@ -7,6 +7,7 @@ import * as Sentry from '@sentry/react';
import { Flex } from 'antd';
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import getChangelogByVersion from 'api/changelog/getChangelogByVersion';
import logEvent from 'api/common/logEvent';
import manageCreditCardApi from 'api/v1/portal/create';
import getUserLatestVersion from 'api/v1/version/getLatestVersion';
@@ -18,6 +19,7 @@ import { Events } from 'constants/events';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import { USER_PREFERENCES } from 'constants/userPreferences';
import SideNav from 'container/SideNav';
import TopNav from 'container/TopNav';
import dayjs from 'dayjs';
@@ -27,7 +29,6 @@ import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
import { isNull } from 'lodash-es';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { INTEGRATION_TYPES } from 'pages/Integrations/utils';
import { useAppContext } from 'providers/App/App';
import {
ReactNode,
@@ -40,9 +41,10 @@ import {
import { Helmet } from 'react-helmet-async';
import { useTranslation } from 'react-i18next';
import { useMutation, useQueries } from 'react-query';
import { useDispatch } from 'react-redux';
import { matchPath, useLocation } from 'react-router-dom';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
import { Dispatch } from 'redux';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import {
UPDATE_CURRENT_ERROR,
@@ -50,14 +52,16 @@ import {
UPDATE_LATEST_VERSION,
UPDATE_LATEST_VERSION_ERROR,
} from 'types/actions/app';
import { SuccessResponseV2 } from 'types/api';
import { ErrorResponse, SuccessResponse, SuccessResponseV2 } from 'types/api';
import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout';
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
import APIError from 'types/api/error';
import {
LicenseEvent,
LicensePlatform,
LicenseState,
} from 'types/api/licensesV3/getActive';
import AppReducer from 'types/reducer/app';
import { USER_ROLES } from 'types/roles';
import { eventEmitter } from 'utils/getEventEmitter';
import {
@@ -80,6 +84,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
featureFlags,
isFetchingFeatureFlags,
featureFlagsFetchError,
userPreferences,
updateChangelog,
} = useAppContext();
const { notifications } = useNotifications();
@@ -92,6 +98,10 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
const [showSlowApiWarning, setShowSlowApiWarning] = useState(false);
const [slowApiWarningShown, setSlowApiWarningShown] = useState(false);
const { latestVersion } = useSelector<AppState, AppReducer>(
(state) => state.app,
);
const handleBillingOnSuccess = (
data: SuccessResponseV2<CheckoutSuccessPayloadProps>,
): void => {
@@ -128,7 +138,11 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
const { isCloudUser: isCloudUserVal } = useGetTenantLicense();
const [getUserVersionResponse, getUserLatestVersionResponse] = useQueries([
const [
getUserVersionResponse,
getUserLatestVersionResponse,
getChangelogByVersionResponse,
] = useQueries([
{
queryFn: getUserVersion,
queryKey: ['getUserVersion', user?.accessJwt],
@@ -139,6 +153,12 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
queryKey: ['getUserLatestVersion', user?.accessJwt],
enabled: isLoggedIn,
},
{
queryFn: (): Promise<SuccessResponse<ChangelogSchema> | ErrorResponse> =>
getChangelogByVersion(latestVersion),
queryKey: ['getChangelogByVersion', latestVersion],
enabled: isLoggedIn && !isCloudUserVal && Boolean(latestVersion),
},
]);
useEffect(() => {
@@ -197,7 +217,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
if (
getUserVersionResponse.isFetched &&
getUserLatestVersionResponse.isSuccess &&
getUserVersionResponse.isSuccess &&
getUserVersionResponse.data &&
getUserVersionResponse.data.payload
) {
@@ -235,12 +255,31 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
getUserVersionResponse.isLoading,
getUserVersionResponse.isError,
getUserVersionResponse.data,
getUserVersionResponse.isSuccess,
getUserLatestVersionResponse.isFetched,
getUserVersionResponse.isFetched,
getUserLatestVersionResponse.isSuccess,
notifications,
]);
useEffect(() => {
if (
getChangelogByVersionResponse.isFetched &&
getChangelogByVersionResponse.isSuccess &&
getChangelogByVersionResponse.data &&
getChangelogByVersionResponse.data.payload
) {
updateChangelog(getChangelogByVersionResponse.data.payload);
}
}, [
updateChangelog,
getChangelogByVersionResponse.isFetched,
getChangelogByVersionResponse.isLoading,
getChangelogByVersionResponse.isError,
getChangelogByVersionResponse.data,
getChangelogByVersionResponse.isSuccess,
]);
const isToDisplayLayout = isLoggedIn;
const routeKey = useMemo(() => getRouteKey(pathname), [pathname]);
@@ -330,53 +369,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
});
}, [manageCreditCard]);
const isHome = (): boolean => routeKey === 'HOME';
const isLogsView = (): boolean =>
routeKey === 'LOGS' ||
routeKey === 'LOGS_EXPLORER' ||
routeKey === 'LOGS_PIPELINES' ||
routeKey === 'LOGS_SAVE_VIEWS';
const isApiMonitoringView = (): boolean => routeKey === 'API_MONITORING';
const isExceptionsView = (): boolean => routeKey === 'ALL_ERROR';
const isTracesView = (): boolean =>
routeKey === 'TRACES_EXPLORER' || routeKey === 'TRACES_SAVE_VIEWS';
const isMessagingQueues = (): boolean =>
routeKey === 'MESSAGING_QUEUES_KAFKA' ||
routeKey === 'MESSAGING_QUEUES_KAFKA_DETAIL' ||
routeKey === 'MESSAGING_QUEUES_CELERY_TASK' ||
routeKey === 'MESSAGING_QUEUES_OVERVIEW';
const isCloudIntegrationPage = (): boolean =>
routeKey === 'INTEGRATIONS' &&
new URLSearchParams(window.location.search).get('integration') ===
INTEGRATION_TYPES.AWS_INTEGRATION;
const isDashboardListView = (): boolean => routeKey === 'ALL_DASHBOARD';
const isAlertHistory = (): boolean => routeKey === 'ALERT_HISTORY';
const isAlertOverview = (): boolean => routeKey === 'ALERT_OVERVIEW';
const isInfraMonitoring = (): boolean =>
routeKey === 'INFRASTRUCTURE_MONITORING_HOSTS' ||
routeKey === 'INFRASTRUCTURE_MONITORING_KUBERNETES';
const isTracesFunnels = (): boolean => routeKey === 'TRACES_FUNNELS';
const isTracesFunnelDetails = (): boolean =>
!!matchPath(pathname, ROUTES.TRACES_FUNNELS_DETAIL);
const isPathMatch = (regex: RegExp): boolean => regex.test(pathname);
const isDashboardView = (): boolean =>
isPathMatch(/^\/dashboard\/[a-zA-Z0-9_-]+$/);
const isDashboardWidgetView = (): boolean =>
isPathMatch(/^\/dashboard\/[a-zA-Z0-9_-]+\/new$/);
const isTraceDetailsView = (): boolean =>
isPathMatch(/^\/trace\/[a-zA-Z0-9]+(\?.*)?$/);
useEffect(() => {
if (isDarkMode) {
document.body.classList.remove('lightMode');
@@ -593,61 +585,84 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
</div>
);
const sideNavPinned = userPreferences?.find(
(preference) => preference.name === USER_PREFERENCES.SIDENAV_PINNED,
)?.value as boolean;
const SHOW_TRIAL_EXPIRY_BANNER =
showTrialExpiryBanner && !showPaymentFailedWarning;
const SHOW_WORKSPACE_RESTRICTED_BANNER = showWorkspaceRestricted;
const SHOW_PAYMENT_FAILED_BANNER =
!showTrialExpiryBanner && showPaymentFailedWarning;
return (
<Layout className={cx(isDarkMode ? 'darkMode dark' : 'lightMode')}>
<Helmet>
<title>{pageTitle}</title>
</Helmet>
{showTrialExpiryBanner && !showPaymentFailedWarning && (
<div className="trial-expiry-banner">
You are in free trial period. Your free trial will end on{' '}
<span>{getFormattedDate(trialInfo?.trialEnd || Date.now())}.</span>
{user.role === USER_ROLES.ADMIN ? (
<span>
{' '}
Please{' '}
<a className="upgrade-link" onClick={handleUpgrade}>
upgrade
</a>
to continue using SigNoz features.
</span>
) : (
'Please contact your administrator for upgrading to a paid plan.'
{isLoggedIn && (
<div className={cx('app-banner-wrapper')}>
{SHOW_TRIAL_EXPIRY_BANNER && (
<div className="trial-expiry-banner">
You are in free trial period. Your free trial will end on{' '}
<span>{getFormattedDate(trialInfo?.trialEnd || Date.now())}.</span>
{user.role === USER_ROLES.ADMIN ? (
<span>
{' '}
Please{' '}
<a className="upgrade-link" onClick={handleUpgrade}>
upgrade
</a>
to continue using SigNoz features.
</span>
) : (
'Please contact your administrator for upgrading to a paid plan.'
)}
</div>
)}
</div>
)}
{showWorkspaceRestricted && renderWorkspaceRestrictedBanner()}
{SHOW_WORKSPACE_RESTRICTED_BANNER && renderWorkspaceRestrictedBanner()}
{!showTrialExpiryBanner && showPaymentFailedWarning && (
<div className="payment-failed-banner">
Your bill payment has failed. Your workspace will get suspended on{' '}
<span>
{getFormattedDateWithMinutes(
dayjs(activeLicense?.event_queue?.scheduled_at).unix() || Date.now(),
)}
.
</span>
{user.role === USER_ROLES.ADMIN ? (
<span>
{' '}
Please{' '}
<a className="upgrade-link" onClick={handleFailedPayment}>
pay the bill
</a>
to continue using SigNoz features.
</span>
) : (
' Please contact your administrator to pay the bill.'
{SHOW_PAYMENT_FAILED_BANNER && (
<div className="payment-failed-banner">
Your bill payment has failed. Your workspace will get suspended on{' '}
<span>
{getFormattedDateWithMinutes(
dayjs(activeLicense?.event_queue?.scheduled_at).unix() || Date.now(),
)}
.
</span>
{user.role === USER_ROLES.ADMIN ? (
<span>
{' '}
Please{' '}
<a className="upgrade-link" onClick={handleFailedPayment}>
pay the bill
</a>
to continue using SigNoz features.
</span>
) : (
' Please contact your administrator to pay the bill.'
)}
</div>
)}
</div>
)}
<Flex
className={cx('app-layout', isDarkMode ? 'darkMode dark' : 'lightMode')}
className={cx(
'app-layout',
isDarkMode ? 'darkMode dark' : 'lightMode',
sideNavPinned ? 'side-nav-pinned' : '',
SHOW_WORKSPACE_RESTRICTED_BANNER ? 'isWorkspaceRestricted' : '',
SHOW_TRIAL_EXPIRY_BANNER ? 'isTrialExpired' : '',
SHOW_PAYMENT_FAILED_BANNER ? 'isPaymentFailed' : '',
)}
>
{isToDisplayLayout && !renderFullScreen && <SideNav />}
{isToDisplayLayout && !renderFullScreen && (
<SideNav isPinned={sideNavPinned} />
)}
<div
className={cx('app-content', {
'full-screen-content': renderFullScreen,
@@ -657,32 +672,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<LayoutContent data-overlayscrollbars-initialize>
<OverlayScrollbar>
<ChildrenContainer
style={{
margin:
isHome() ||
isLogsView() ||
isTracesView() ||
isDashboardView() ||
isDashboardWidgetView() ||
isDashboardListView() ||
isAlertHistory() ||
isAlertOverview() ||
isMessagingQueues() ||
isCloudIntegrationPage() ||
isInfraMonitoring() ||
isApiMonitoringView() ||
isExceptionsView()
? 0
: '0 1rem',
...(isTraceDetailsView() ||
isTracesFunnels() ||
isTracesFunnelDetails()
? { margin: 0 }
: {}),
}}
>
<ChildrenContainer>
{isToDisplayLayout && !renderFullScreen && <TopNav />}
{children}
</ChildrenContainer>

View File

@@ -1,7 +1,8 @@
.billing-container {
margin-bottom: 40px;
padding-top: 36px;
width: 65%;
width: 90%;
margin: 0 auto;
.billing-summary {
margin: 24px 8px;

View File

@@ -14,7 +14,7 @@ function CloudIntegrationPage(): JSX.Element {
<HeroSection />
<RequestIntegrationBtn
type={IntegrationType.AWS_SERVICES}
message="Cannot find the AWS service you're looking for? Request more integrations"
message="Can't find the AWS service you're looking for? Request more integrations"
/>
<ServicesTabs />
</div>

View File

@@ -60,26 +60,30 @@ function CloudServiceDataCollected({
return (
<div className="cloud-service-data-collected">
<div className="cloud-service-data-collected__table">
<div className="cloud-service-data-collected__table-heading">Logs</div>
<Table
columns={logsColumns}
dataSource={logsData}
// eslint-disable-next-line react/jsx-props-no-spreading
{...tableProps}
className="cloud-service-data-collected__table-logs"
/>
</div>
<div className="cloud-service-data-collected__table">
<div className="cloud-service-data-collected__table-heading">Metrics</div>
<Table
columns={metricsColumns}
dataSource={metricsData}
// eslint-disable-next-line react/jsx-props-no-spreading
{...tableProps}
className="cloud-service-data-collected__table-metrics"
/>
</div>
{logsData && logsData.length > 0 && (
<div className="cloud-service-data-collected__table">
<div className="cloud-service-data-collected__table-heading">Logs</div>
<Table
columns={logsColumns}
dataSource={logsData}
// eslint-disable-next-line react/jsx-props-no-spreading
{...tableProps}
className="cloud-service-data-collected__table-logs"
/>
</div>
)}
{metricsData && metricsData.length > 0 && (
<div className="cloud-service-data-collected__table">
<div className="cloud-service-data-collected__table-heading">Metrics</div>
<Table
columns={metricsColumns}
dataSource={metricsData}
// eslint-disable-next-line react/jsx-props-no-spreading
{...tableProps}
className="cloud-service-data-collected__table-metrics"
/>
</div>
)}
</div>
);
}

View File

@@ -51,6 +51,33 @@ function ServiceStatus({
return <div className={`service-status ${className}`}>{text}</div>;
}
function getTabItems(serviceDetailsData: any): TabsProps['items'] {
const dashboards = serviceDetailsData?.assets.dashboards || [];
const dataCollected = serviceDetailsData?.data_collected || {};
const items: TabsProps['items'] = [];
if (dashboards.length) {
items.push({
key: 'dashboards',
label: `Dashboards (${dashboards.length})`,
children: <CloudServiceDashboards service={serviceDetailsData} />,
});
}
items.push({
key: 'data-collected',
label: 'Data Collected',
children: (
<CloudServiceDataCollected
logsData={dataCollected.logs || []}
metricsData={dataCollected.metrics || []}
/>
),
});
return items;
}
function ServiceDetails(): JSX.Element | null {
const urlQuery = useUrlQuery();
const cloudAccountId = urlQuery.get('cloudAccountId');
@@ -106,23 +133,7 @@ function ServiceDetails(): JSX.Element | null {
return null;
}
const tabItems: TabsProps['items'] = [
{
key: 'dashboards',
label: `Dashboards (${serviceDetailsData?.assets.dashboards.length})`,
children: <CloudServiceDashboards service={serviceDetailsData} />,
},
{
key: 'data-collected',
label: 'Data Collected',
children: (
<CloudServiceDataCollected
logsData={serviceDetailsData?.data_collected.logs || []}
metricsData={serviceDetailsData?.data_collected.metrics || []}
/>
),
},
];
const tabItems = getTabItems(serviceDetailsData);
return (
<div className="service-details">

View File

@@ -34,7 +34,7 @@ describe('Request AWS integration', () => {
expect(
screen.getByText(
/cannot find what youre looking for\? request more integrations/i,
/can't find what youre looking for\? request more integrations/i,
),
).toBeInTheDocument();

View File

@@ -0,0 +1,15 @@
.create-alert-channels-container {
width: 90%;
margin: 12px auto;
border: 1px solid var(--Slate-500, #161922);
background: var(--Ink-400, #121317);
border-radius: 3px;
padding: 16px;
.form-alert-channels-title {
margin-top: 0px;
margin-bottom: 16px;
}
}

View File

@@ -1,3 +1,5 @@
import './CreateAlertChannels.styles.scss';
import { Form } from 'antd';
import createEmail from 'api/channels/createEmail';
import createMsTeamsApi from 'api/channels/createMsTeams';
@@ -136,6 +138,14 @@ function CreateAlertChannels({
);
const onSlackHandler = useCallback(async () => {
if (!selectedConfig.api_url) {
notifications.error({
message: 'Error',
description: t('webhook_url_required'),
});
return;
}
setSavingState(true);
try {
@@ -152,7 +162,7 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [prepareSlackRequest, notifications, t, showErrorModal]);
}, [selectedConfig, notifications, t, prepareSlackRequest, showErrorModal]);
const prepareWebhookRequest = useCallback(() => {
// initial api request without auth params
@@ -190,6 +200,14 @@ function CreateAlertChannels({
}, [notifications, t, selectedConfig]);
const onWebhookHandler = useCallback(async () => {
if (!selectedConfig.api_url) {
notifications.error({
message: 'Error',
description: t('webhook_url_required'),
});
return;
}
setSavingState(true);
try {
const request = prepareWebhookRequest();
@@ -206,7 +224,13 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [prepareWebhookRequest, notifications, t, showErrorModal]);
}, [
selectedConfig.api_url,
notifications,
t,
prepareWebhookRequest,
showErrorModal,
]);
const preparePagerRequest = useCallback(() => {
const validationError = ValidatePagerChannel(selectedConfig as PagerChannel);
@@ -270,6 +294,14 @@ function CreateAlertChannels({
);
const onOpsgenieHandler = useCallback(async () => {
if (!selectedConfig.api_key) {
notifications.error({
message: 'Error',
description: t('api_key_required'),
});
return;
}
setSavingState(true);
try {
await createOpsgenie(prepareOpsgenieRequest());
@@ -285,7 +317,13 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [prepareOpsgenieRequest, notifications, t, showErrorModal]);
}, [
selectedConfig.api_key,
notifications,
t,
prepareOpsgenieRequest,
showErrorModal,
]);
const prepareEmailRequest = useCallback(
() => ({
@@ -299,6 +337,14 @@ function CreateAlertChannels({
);
const onEmailHandler = useCallback(async () => {
if (!selectedConfig.to) {
notifications.error({
message: 'Error',
description: t('to_required'),
});
return;
}
setSavingState(true);
try {
const request = prepareEmailRequest();
@@ -315,7 +361,7 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [prepareEmailRequest, notifications, t, showErrorModal]);
}, [prepareEmailRequest, notifications, t, showErrorModal, selectedConfig.to]);
const prepareMsTeamsRequest = useCallback(
() => ({
@@ -329,6 +375,14 @@ function CreateAlertChannels({
);
const onMsTeamsHandler = useCallback(async () => {
if (!selectedConfig.webhook_url) {
notifications.error({
message: 'Error',
description: t('webhook_url_required'),
});
return;
}
setSavingState(true);
try {
@@ -345,10 +399,24 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [prepareMsTeamsRequest, notifications, t, showErrorModal]);
}, [
selectedConfig.webhook_url,
notifications,
t,
prepareMsTeamsRequest,
showErrorModal,
]);
const onSaveHandler = useCallback(
async (value: ChannelType) => {
if (!selectedConfig.name) {
notifications.error({
message: 'Error',
description: t('channel_name_required'),
});
return;
}
const functionMapper = {
[ChannelType.Slack]: onSlackHandler,
[ChannelType.Webhook]: onWebhookHandler,
@@ -477,26 +545,28 @@ function CreateAlertChannels({
);
return (
<FormAlertChannels
{...{
formInstance,
onTypeChangeHandler,
setSelectedConfig,
type,
onTestHandler,
onSaveHandler,
savingState,
testingState,
title: t('page_title_create'),
initialValue: {
<div className="create-alert-channels-container">
<FormAlertChannels
{...{
formInstance,
onTypeChangeHandler,
setSelectedConfig,
type,
...selectedConfig,
...PagerInitialConfig,
...OpsgenieInitialConfig,
...EmailInitialConfig,
},
}}
/>
onTestHandler,
onSaveHandler,
savingState,
testingState,
title: t('page_title_create'),
initialValue: {
type,
...selectedConfig,
...PagerInitialConfig,
...OpsgenieInitialConfig,
...EmailInitialConfig,
},
}}
/>
</div>
);
}

View File

@@ -28,7 +28,6 @@ import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
import { useCallback, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useParams } from 'react-router-dom';
import APIError from 'types/api/error';
function EditAlertChannels({
@@ -53,7 +52,11 @@ function EditAlertChannels({
const [savingState, setSavingState] = useState<boolean>(false);
const [testingState, setTestingState] = useState<boolean>(false);
const { notifications } = useNotifications();
const { id } = useParams<{ id: string }>();
// Extract channelId from URL pathname since useParams doesn't work in nested routing
const { pathname } = window.location;
const channelIdMatch = pathname.match(/\/settings\/channels\/edit\/([^/]+)/);
const id = channelIdMatch ? channelIdMatch[1] : '';
const [type, setType] = useState<ChannelType>(
initialValue?.type ? (initialValue.type as ChannelType) : ChannelType.Slack,

View File

@@ -1,30 +1,173 @@
.empty-logs-search-container {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 240px;
.empty-logs-search-container-content {
.empty-logs-search {
&__container {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 240px;
}
&__content {
display: flex;
flex-direction: column;
gap: 4px;
color: var(--text-vanilla-400);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 128.571% */
line-height: 18px;
letter-spacing: -0.07px;
align-items: flex-start;
.empty-state-svg {
height: 50px;
width: 50px;
}
}
&__sub-text {
font-weight: 600;
}
.sub-text {
font-weight: 600;
&__container {
&--custom-message {
height: 445px;
.empty-state-svg {
height: 32px;
width: 32px;
}
.empty-logs-search {
&__header {
display: flex;
align-items: center;
gap: 4px;
}
&__title {
color: var(--bg-vanilla-100);
font-size: 14px;
font-weight: 500;
line-height: 20px;
letter-spacing: -0.07px;
}
&__subtitle {
color: var(--bg-vanilla-400);
font-size: 14px;
font-weight: 400;
line-height: 20px;
letter-spacing: -0.07px;
}
&__description {
font-size: 14px;
color: var(--text-vanilla-400);
line-height: 20px;
}
&__description-list {
margin: 0;
margin-top: 8px;
color: var(--bg-vanilla-400);
font-size: 14px;
font-weight: 400;
line-height: 20px;
letter-spacing: -0.07px;
display: flex;
flex-direction: column;
gap: 6px;
list-style: none;
padding: 0;
font-family: Inter;
}
&__description-list li {
position: relative;
padding-left: 20px;
}
&__description-list li::before {
content: '';
font-family: Inter;
position: absolute;
left: 0;
color: var(--bg-robin-400);
font-weight: bold;
font-size: 16px;
line-height: 20px;
}
&__clear-filters-btn {
display: flex;
width: 468px;
font-family: Inter;
padding: 12px;
justify-content: space-between;
align-items: flex-start;
border-radius: 3px;
border: 1px dashed var(--bg-slate-500);
background: transparent;
color: var(--bg-vanilla-400);
font-size: 14px;
font-weight: 400;
line-height: 18px;
letter-spacing: -0.07px;
cursor: pointer;
margin-top: 12px;
}
&__clear-filters-btn-icon {
display: flex;
align-items: center;
gap: 6px;
}
&__row {
display: flex;
flex-direction: row;
align-items: flex-end;
max-width: 825px;
gap: 25px;
justify-content: center;
margin-left: 21px;
}
&__content {
display: flex;
flex-direction: column;
gap: 4px;
min-width: 260px;
}
&__resources-card {
background: var(--bg-ink-400);
border: 1px solid var(--bg-slate-500);
border-radius: 4px;
width: 332px;
}
&__resources-title {
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 11px;
font-weight: 600;
line-height: 18px;
letter-spacing: 0.88px;
text-transform: uppercase;
padding: 16px 16px 12px;
border-bottom: 1px solid var(--bg-slate-500);
height: 46px;
}
&__resources-links {
padding: 16px;
display: flex;
flex-direction: column;
gap: 16px;
.learn-more {
height: 18px;
}
}
}
}
}
}

View File

@@ -2,16 +2,24 @@ import './EmptyLogsSearch.styles.scss';
import { Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import LearnMore from 'components/LearnMore/LearnMore';
import { EmptyLogsListConfig } from 'container/LogsExplorerList/utils';
import { Delete } from 'lucide-react';
import { useEffect, useRef } from 'react';
import { DataSource, PanelTypeKeys } from 'types/common/queryBuilder';
interface EmptyLogsSearchProps {
dataSource: DataSource;
panelType: PanelTypeKeys;
customMessage?: EmptyLogsListConfig;
}
export default function EmptyLogsSearch({
dataSource,
panelType,
}: {
dataSource: DataSource;
panelType: PanelTypeKeys;
}): JSX.Element {
customMessage,
}: EmptyLogsSearchProps): JSX.Element {
const logEventCalledRef = useRef(false);
useEffect(() => {
if (!logEventCalledRef.current) {
@@ -30,18 +38,80 @@ export default function EmptyLogsSearch({
}, []);
return (
<div className="empty-logs-search-container">
<div className="empty-logs-search-container-content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
<Typography.Text>
<span className="sub-text">This query had no results. </span>
Edit your query and try again!
</Typography.Text>
<div
className={cx('empty-logs-search__container', {
'empty-logs-search__container--custom-message': !!customMessage,
})}
>
<div className="empty-logs-search__row">
<div className="empty-logs-search__content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
{customMessage ? (
<>
<div className="empty-logs-search__header">
<Typography.Text className="empty-logs-search__title">
{customMessage.title}
</Typography.Text>
{customMessage.subTitle && (
<Typography.Text className="empty-logs-search__subtitle">
{customMessage.subTitle}
</Typography.Text>
)}
</div>
{Array.isArray(customMessage.description) ? (
<ul className="empty-logs-search__description-list">
{customMessage.description.map((desc) => (
<li key={desc}>{desc}</li>
))}
</ul>
) : (
<Typography.Text className="empty-logs-search__description">
{customMessage.description}
</Typography.Text>
)}
{/* Clear filters button */}
{customMessage.showClearFiltersButton && (
<button
type="button"
className="empty-logs-search__clear-filters-btn"
onClick={customMessage.onClearFilters}
>
{customMessage.clearFiltersButtonText}
<span className="empty-logs-search__clear-filters-btn-icon">
<Delete size={14} />
Clear filters
</span>
</button>
)}
</>
) : (
<Typography.Text>
<span className="empty-logs-search__sub-text">
This query had no results.{' '}
</span>
Edit your query and try again!
</Typography.Text>
)}
</div>
{customMessage?.documentationLinks && (
<div className="empty-logs-search__resources-card">
<div className="empty-logs-search__resources-title">RESOURCES</div>
<div className="empty-logs-search__resources-links">
{customMessage.documentationLinks.map((link) => (
<LearnMore key={link.text} text={link.text} url={link.url} />
))}
</div>
</div>
)}
</div>
</div>
);
}
EmptyLogsSearch.defaultProps = {
customMessage: null,
};

View File

@@ -57,7 +57,9 @@ function FormAlertChannels({
return (
<>
<Typography.Title level={3}>{title}</Typography.Title>
<Typography.Title level={4} className="form-alert-channels-title">
{title}
</Typography.Title>
<Form initialValues={initialValue} layout="vertical" form={formInstance}>
<Form.Item label={t('field_channel_name')} labelAlign="left" name="name">
@@ -147,7 +149,7 @@ function FormAlertChannels({
</Button>
<Button
onClick={(): void => {
history.replace(ROUTES.SETTINGS);
history.replace(ROUTES.ALL_CHANNELS);
}}
>
{t('button_return')}

View File

@@ -212,9 +212,12 @@ function QuerySection({
return null;
}
};
const step2Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '2' : '1';
return (
<>
<StepHeading> {t('alert_form_step2')}</StepHeading>
<StepHeading> {t('alert_form_step2', { step: step2Label })}</StepHeading>
<FormContainer>
<div>{renderTabs(alertType)}</div>
{renderQuerySection(currentTab)}

View File

@@ -371,9 +371,11 @@ function RuleOptions({
selectedCategory?.name,
);
const step3Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '3' : '2';
return (
<>
<StepHeading>{t('alert_form_step3')}</StepHeading>
<StepHeading>{t('alert_form_step3', { step: step3Label })}</StepHeading>
<FormContainer>
{queryCategory === EQueryType.PROM && renderPromRuleOptions()}
{queryCategory !== EQueryType.PROM &&

View File

@@ -85,7 +85,13 @@ function LabelSelect({
}, [handleBlur]);
const handleLabelChange = (event: ChangeEvent<HTMLInputElement>): void => {
setCurrentVal(event.target?.value.replace(':', ''));
// Remove the colon if it's the last character.
// As the colon is used to separate the key and value in the query.
setCurrentVal(
event.target?.value.endsWith(':')
? event.target?.value.slice(0, -1)
: event.target?.value,
);
};
const handleClose = (key: string): void => {

View File

@@ -2,6 +2,7 @@
overflow: auto;
margin: 8px -8px;
margin-right: 0;
margin-bottom: 64px;
.react-grid-layout {
border: none !important;

View File

@@ -109,7 +109,7 @@ describe('GridCardLayout Utils', () => {
builder: {
queryData: [
{
stepInterval: 30,
stepInterval: 60,
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
queryName: 'A',
@@ -181,7 +181,7 @@ describe('GridCardLayout Utils', () => {
expect(result.builder.queryData).toHaveLength(2);
expect(result.builder.queryData[0].stepInterval).toBe(180);
expect(result.builder.queryData[1].stepInterval).toBe(180);
expect(result.builder.queryData[1].stepInterval).toBe(45); // 45 is the stepInterval of the second query - custom value
});
it('should use calculated stepInterval when original is undefined', () => {

View File

@@ -2,7 +2,7 @@ import { FORMULA_REGEXP } from 'constants/regExp';
import { isEmpty, isEqual } from 'lodash-es';
import { Layout } from 'react-grid-layout';
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
export const removeUndefinedValuesFromLayout = (layout: Layout[]): Layout[] =>
layout.map((obj) =>
@@ -99,6 +99,12 @@ export function updateStepInterval(
): Query {
const stepIntervalPoints = getStepIntervalPoints(minTime, maxTime);
// if user haven't enter anything manually, that is we have default value of 60 then do the interval adjustment for bar otherwise apply the user's value
const getSteps = (queryData: IBuilderQuery): number =>
queryData.stepInterval === 60
? stepIntervalPoints || 60
: queryData?.stepInterval || 60;
return {
...query,
builder: {
@@ -106,7 +112,7 @@ export function updateStepInterval(
queryData: [
...(query?.builder?.queryData ?? []).map((queryData) => ({
...queryData,
stepInterval: stepIntervalPoints || queryData?.stepInterval || 60,
stepInterval: getSteps(queryData),
})),
],
},

View File

@@ -12,6 +12,7 @@ import Header from 'components/Header/Header';
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import { ORG_PREFERENCES } from 'constants/orgPreferences';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import ROUTES from 'constants/routes';
@@ -184,18 +185,25 @@ export default function Home(): JSX.Element {
);
const processUserPreferences = (userPreferences: UserPreference[]): void => {
const checklistSkipped = userPreferences?.find(
(preference) => preference.name === 'welcome_checklist_do_later',
)?.value;
const checklistSkipped = Boolean(
userPreferences?.find(
(preference) =>
preference.name === ORG_PREFERENCES.WELCOME_CHECKLIST_DO_LATER,
)?.value,
);
const updatedChecklistItems = cloneDeep(checklistItems);
const newChecklistItems = updatedChecklistItems.map((item) => {
const newItem = { ...item };
newItem.isSkipped =
const isSkipped = Boolean(
userPreferences?.find(
(preference) => preference.name === item.skippedPreferenceKey,
)?.value || false;
)?.value,
);
newItem.isSkipped = isSkipped || false;
return newItem;
});
@@ -239,7 +247,7 @@ export default function Home(): JSX.Element {
setUpdatingUserPreferences(true);
updateUserPreference({
name: 'welcome_checklist_do_later',
name: ORG_PREFERENCES.WELCOME_CHECKLIST_DO_LATER,
value: true,
});
};

View File

@@ -1,17 +1,19 @@
import { ORG_PREFERENCES } from 'constants/orgPreferences';
import ROUTES from 'constants/routes';
import { ChecklistItem } from './HomeChecklist/HomeChecklist';
export const checkListStepToPreferenceKeyMap = {
WILL_DO_LATER: 'welcome_checklist_do_later',
SEND_LOGS: 'welcome_checklist_send_logs_skipped',
SEND_TRACES: 'welcome_checklist_send_traces_skipped',
SEND_INFRA_METRICS: 'welcome_checklist_send_infra_metrics_skipped',
SETUP_DASHBOARDS: 'welcome_checklist_setup_dashboards_skipped',
SETUP_ALERTS: 'welcome_checklist_setup_alerts_skipped',
SETUP_SAVED_VIEWS: 'welcome_checklist_setup_saved_view_skipped',
SETUP_WORKSPACE: 'welcome_checklist_setup_workspace_skipped',
ADD_DATA_SOURCE: 'welcome_checklist_add_data_source_skipped',
WILL_DO_LATER: ORG_PREFERENCES.WELCOME_CHECKLIST_DO_LATER,
SEND_LOGS: ORG_PREFERENCES.WELCOME_CHECKLIST_SEND_LOGS_SKIPPED,
SEND_TRACES: ORG_PREFERENCES.WELCOME_CHECKLIST_SEND_TRACES_SKIPPED,
SEND_INFRA_METRICS:
ORG_PREFERENCES.WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED,
SETUP_DASHBOARDS: ORG_PREFERENCES.WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED,
SETUP_ALERTS: ORG_PREFERENCES.WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED,
SETUP_SAVED_VIEWS: ORG_PREFERENCES.WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED,
SETUP_WORKSPACE: ORG_PREFERENCES.WELCOME_CHECKLIST_SETUP_WORKSPACE_SKIPPED,
ADD_DATA_SOURCE: ORG_PREFERENCES.WELCOME_CHECKLIST_ADD_DATA_SOURCE_SKIPPED,
};
export const DOCS_LINKS = {

View File

@@ -96,11 +96,41 @@ function HostsList(): JSX.Element {
};
}, [pageSize, currentPage, filters, minTime, maxTime, orderBy]);
const queryKey = useMemo(() => {
if (selectedHostName) {
return [
'hostList',
String(pageSize),
String(currentPage),
JSON.stringify(filters),
JSON.stringify(orderBy),
];
}
return [
'hostList',
String(pageSize),
String(currentPage),
JSON.stringify(filters),
JSON.stringify(orderBy),
String(minTime),
String(maxTime),
];
}, [
pageSize,
currentPage,
filters,
orderBy,
selectedHostName,
minTime,
maxTime,
]);
const { data, isFetching, isLoading, isError } = useGetHostList(
query as HostListPayload,
{
queryKey: ['hostList', query],
queryKey,
enabled: !!query,
keepPreviousData: true,
},
);
@@ -212,6 +242,7 @@ function HostsList(): JSX.Element {
<HostsListControls
filters={filters}
handleFiltersChange={handleFiltersChange}
showAutoRefresh={!selectedHostData}
/>
</div>
<HostsListTable

View File

@@ -11,9 +11,11 @@ import { DataSource } from 'types/common/queryBuilder';
function HostsListControls({
handleFiltersChange,
filters,
showAutoRefresh,
}: {
handleFiltersChange: (value: IBuilderQuery['filters']) => void;
filters: IBuilderQuery['filters'];
showAutoRefresh: boolean;
}): JSX.Element {
const currentQuery = initialQueriesMap[DataSource.METRICS];
const updatedCurrentQuery = useMemo(
@@ -58,7 +60,7 @@ function HostsListControls({
<div className="time-selector">
<DateTimeSelectionV2
showAutoRefresh
showAutoRefresh={showAutoRefresh}
showRefreshText={false}
hideShareModal
/>

View File

@@ -93,9 +93,13 @@ export default function HostsListTable({
const showHostsEmptyState =
!isFetching &&
!isLoading &&
formattedHostMetricsData.length === 0 &&
(!sentAnyHostMetricsData || isSendingIncorrectK8SAgentMetrics) &&
!filters.items.length;
const showTableLoadingState =
(isLoading || isFetching) && formattedHostMetricsData.length === 0;
if (isError) {
return <Typography>{data?.error || 'Something went wrong'}</Typography>;
}
@@ -127,7 +131,7 @@ export default function HostsListTable({
);
}
if (isLoading || isFetching) {
if (showTableLoadingState) {
return (
<div className="hosts-list-loading-state">
<Skeleton.Input
@@ -155,7 +159,7 @@ export default function HostsListTable({
return (
<Table
className="hosts-list-table"
dataSource={isLoading || isFetching ? [] : formattedHostMetricsData}
dataSource={showTableLoadingState ? [] : formattedHostMetricsData}
columns={columns}
pagination={{
current: currentPage,
@@ -170,7 +174,7 @@ export default function HostsListTable({
}}
scroll={{ x: true }}
loading={{
spinning: isFetching || isLoading,
spinning: showTableLoadingState,
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
}}
tableLayout="fixed"

View File

@@ -172,6 +172,13 @@
.ant-table-cell:nth-child(n + 3) {
padding-right: 24px;
}
.memory-usage-header {
display: flex;
align-items: center;
justify-content: flex-end;
gap: 4px;
margin-right: 4px;
}
.column-header-right {
text-align: right;
}

View File

@@ -0,0 +1,43 @@
import { render, screen } from '@testing-library/react';
import HostsEmptyOrIncorrectMetrics from '../HostsEmptyOrIncorrectMetrics';
describe('HostsEmptyOrIncorrectMetrics', () => {
it('shows no data message when noData is true', () => {
render(<HostsEmptyOrIncorrectMetrics noData incorrectData={false} />);
expect(
screen.getByText('No host metrics data received yet.'),
).toBeInTheDocument();
expect(
screen.getByText(/Infrastructure monitoring requires the/),
).toBeInTheDocument();
});
it('shows incorrect data message when incorrectData is true', () => {
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData />);
expect(
screen.getByText(
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
),
).toBeInTheDocument();
});
it('does not show no data message when noData is false', () => {
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
expect(
screen.queryByText('No host metrics data received yet.'),
).not.toBeInTheDocument();
expect(
screen.queryByText(/Infrastructure monitoring requires the/),
).not.toBeInTheDocument();
});
it('does not show incorrect data message when incorrectData is false', () => {
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
expect(
screen.queryByText(
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
),
).not.toBeInTheDocument();
});
});

View File

@@ -0,0 +1,166 @@
/* eslint-disable react/button-has-type */
import { render } from '@testing-library/react';
import ROUTES from 'constants/routes';
import * as useGetHostListHooks from 'hooks/infraMonitoring/useGetHostList';
import * as appContextHooks from 'providers/App/App';
import * as timezoneHooks from 'providers/Timezone';
import { QueryClient, QueryClientProvider } from 'react-query';
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import store from 'store';
import { LicenseEvent } from 'types/api/licensesV3/getActive';
import HostsList from '../HostsList';
jest.mock('lib/getMinMax', () => ({
__esModule: true,
default: jest.fn().mockImplementation(() => ({
minTime: 1713734400000,
maxTime: 1713738000000,
isValidTimeFormat: jest.fn().mockReturnValue(true),
})),
}));
jest.mock('components/CustomTimePicker/CustomTimePicker', () => ({
__esModule: true,
default: ({ onSelect, selectedTime, selectedValue }: any): JSX.Element => (
<div data-testid="custom-time-picker">
<button onClick={(): void => onSelect('custom')}>
{selectedTime} - {selectedValue}
</button>
</div>
),
}));
const queryClient = new QueryClient();
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
jest.mock('react-redux', () => ({
...jest.requireActual('react-redux'),
useSelector: (): any => ({
globalTime: {
selectedTime: {
startTime: 1713734400000,
endTime: 1713738000000,
},
maxTime: 1713738000000,
minTime: 1713734400000,
},
}),
}));
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: jest.fn().mockReturnValue({
pathname: ROUTES.INFRASTRUCTURE_MONITORING_HOSTS,
}),
}));
jest.mock('react-router-dom-v5-compat', () => {
const actual = jest.requireActual('react-router-dom-v5-compat');
return {
...actual,
useSearchParams: jest
.fn()
.mockReturnValue([
{ get: jest.fn(), entries: jest.fn().mockReturnValue([]) },
jest.fn(),
]),
useNavigationType: (): any => 'PUSH',
};
});
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): any => ({
safeNavigate: jest.fn(),
}),
}));
jest.spyOn(timezoneHooks, 'useTimezone').mockReturnValue({
timezone: {
offset: 0,
},
browserTimezone: {
offset: 0,
},
} as any);
jest.spyOn(useGetHostListHooks, 'useGetHostList').mockReturnValue({
data: {
payload: {
data: {
records: [
{
hostName: 'test-host',
active: true,
cpu: 0.75,
memory: 0.65,
wait: 0.03,
},
],
isSendingK8SAgentMetrics: false,
sentAnyHostMetricsData: true,
},
},
},
isLoading: false,
isError: false,
} as any);
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
user: {
role: 'admin',
},
activeLicenseV3: {
event_queue: {
created_at: '0',
event: LicenseEvent.NO_EVENT,
scheduled_at: '0',
status: '',
updated_at: '0',
},
license: {
license_key: 'test-license-key',
license_type: 'trial',
org_id: 'test-org-id',
plan_id: 'test-plan-id',
plan_name: 'test-plan-name',
plan_type: 'trial',
plan_version: 'test-plan-version',
},
},
} as any);
describe('HostsList', () => {
it('renders hosts list table', () => {
const { container } = render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<HostsList />
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(container.querySelector('.hosts-list-table')).toBeInTheDocument();
});
it('renders filters', () => {
const { container } = render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<HostsList />
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(container.querySelector('.filters')).toBeInTheDocument();
});
});

View File

@@ -0,0 +1,38 @@
import { render, screen } from '@testing-library/react';
import HostsListControls from '../HostsListControls';
jest.mock('container/QueryBuilder/filters/QueryBuilderSearch', () => ({
__esModule: true,
default: (): JSX.Element => (
<div data-testid="query-builder-search">Search</div>
),
}));
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
__esModule: true,
default: (): JSX.Element => (
<div data-testid="date-time-selection">Date Time</div>
),
}));
describe('HostsListControls', () => {
const mockHandleFiltersChange = jest.fn();
const mockFilters = {
items: [],
op: 'AND',
};
it('renders search and date time filters', () => {
render(
<HostsListControls
handleFiltersChange={mockHandleFiltersChange}
filters={mockFilters}
showAutoRefresh={false}
/>,
);
expect(screen.getByTestId('query-builder-search')).toBeInTheDocument();
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
});
});

View File

@@ -0,0 +1,167 @@
/* eslint-disable react/jsx-props-no-spreading */
import { render, screen } from '@testing-library/react';
import HostsListTable from '../HostsListTable';
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
const EMPTY_STATE_CONTAINER_CLASS = '.hosts-empty-state-container';
describe('HostsListTable', () => {
const mockHost = {
hostName: 'test-host-1',
active: true,
cpu: 0.75,
memory: 0.65,
wait: 0.03,
load15: 1.5,
os: 'linux',
};
const mockTableData = {
payload: {
data: {
hosts: [mockHost],
},
},
};
const mockOnHostClick = jest.fn();
const mockSetCurrentPage = jest.fn();
const mockSetOrderBy = jest.fn();
const mockSetPageSize = jest.fn();
const mockProps = {
isLoading: false,
isError: false,
isFetching: false,
tableData: mockTableData,
hostMetricsData: [mockHost],
filters: {
items: [],
op: 'AND',
},
onHostClick: mockOnHostClick,
currentPage: 1,
setCurrentPage: mockSetCurrentPage,
pageSize: 10,
setOrderBy: mockSetOrderBy,
setPageSize: mockSetPageSize,
} as any;
it('renders loading state if isLoading is true and tableData is empty', () => {
const { container } = render(
<HostsListTable
{...mockProps}
isLoading
hostMetricsData={[]}
tableData={{ payload: { data: { hosts: [] } } }}
/>,
);
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
});
it('renders loading state if isFetching is true and tableData is empty', () => {
const { container } = render(
<HostsListTable
{...mockProps}
isFetching
hostMetricsData={[]}
tableData={{ payload: { data: { hosts: [] } } }}
/>,
);
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
});
it('renders error state if isError is true', () => {
render(<HostsListTable {...mockProps} isError />);
expect(screen.getByText('Something went wrong')).toBeTruthy();
});
it('renders empty state if no hosts are found', () => {
const { container } = render(
<HostsListTable
{...mockProps}
hostMetricsData={[]}
tableData={{
payload: {
data: { hosts: [] },
},
}}
/>,
);
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
});
it('renders empty state if sentAnyHostMetricsData is false', () => {
const { container } = render(
<HostsListTable
{...mockProps}
hostMetricsData={[]}
tableData={{
...mockTableData,
payload: {
...mockTableData.payload,
data: {
...mockTableData.payload.data,
sentAnyHostMetricsData: false,
hosts: [],
},
},
}}
/>,
);
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
});
it('renders empty state if isSendingIncorrectK8SAgentMetrics is true', () => {
const { container } = render(
<HostsListTable
{...mockProps}
hostMetricsData={[]}
tableData={{
...mockTableData,
payload: {
...mockTableData.payload,
data: {
...mockTableData.payload.data,
isSendingIncorrectK8SAgentMetrics: true,
hosts: [],
},
},
}}
/>,
);
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
});
it('renders table data', () => {
const { container } = render(
<HostsListTable
{...mockProps}
tableData={{
...mockTableData,
payload: {
...mockTableData.payload,
data: {
...mockTableData.payload.data,
isSendingIncorrectK8SAgentMetrics: false,
sentAnyHostMetricsData: true,
},
},
}}
/>,
);
expect(container.querySelector('.hosts-list-table')).toBeTruthy();
});
});

View File

@@ -0,0 +1,104 @@
import { render } from '@testing-library/react';
import { formatDataForTable, GetHostsQuickFiltersConfig } from '../utils';
const PROGRESS_BAR_CLASS = '.progress-bar';
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
describe('InfraMonitoringHosts utils', () => {
describe('formatDataForTable', () => {
it('should format host data correctly', () => {
const mockData = [
{
hostName: 'test-host',
active: true,
cpu: 0.95,
memory: 0.85,
wait: 0.05,
load15: 2.5,
os: 'linux',
},
] as any;
const result = formatDataForTable(mockData);
expect(result[0].hostName).toBe('test-host');
expect(result[0].wait).toBe('5%');
expect(result[0].load15).toBe(2.5);
// Test active tag rendering
const activeTag = render(result[0].active as JSX.Element);
expect(activeTag.container.textContent).toBe('ACTIVE');
expect(activeTag.container.querySelector('.active')).toBeTruthy();
// Test CPU progress bar
const cpuProgress = render(result[0].cpu as JSX.Element);
const cpuProgressBar = cpuProgress.container.querySelector(
PROGRESS_BAR_CLASS,
);
expect(cpuProgressBar).toBeTruthy();
// Test memory progress bar
const memoryProgress = render(result[0].memory as JSX.Element);
const memoryProgressBar = memoryProgress.container.querySelector(
PROGRESS_BAR_CLASS,
);
expect(memoryProgressBar).toBeTruthy();
});
it('should handle inactive hosts', () => {
const mockData = [
{
hostName: 'test-host',
active: false,
cpu: 0.3,
memory: 0.4,
wait: 0.02,
load15: 1.2,
os: 'linux',
cpuTimeSeries: [],
memoryTimeSeries: [],
waitTimeSeries: [],
load15TimeSeries: [],
},
] as any;
const result = formatDataForTable(mockData);
const inactiveTag = render(result[0].active as JSX.Element);
expect(inactiveTag.container.textContent).toBe('INACTIVE');
expect(inactiveTag.container.querySelector('.inactive')).toBeTruthy();
});
});
describe('GetHostsQuickFiltersConfig', () => {
it('should return correct config when dotMetricsEnabled is true', () => {
const result = GetHostsQuickFiltersConfig(true);
expect(result[0].attributeKey.key).toBe('host.name');
expect(result[1].attributeKey.key).toBe('os.type');
expect(result[0].aggregateAttribute).toBe('system.cpu.load_average.15m');
});
it('should return correct config when dotMetricsEnabled is false', () => {
const result = GetHostsQuickFiltersConfig(false);
expect(result[0].attributeKey.key).toBe('host_name');
expect(result[1].attributeKey.key).toBe('os_type');
expect(result[0].aggregateAttribute).toBe('system_cpu_load_average_15m');
});
});
});

View File

@@ -1,7 +1,8 @@
import './InfraMonitoring.styles.scss';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Color } from '@signozhq/design-tokens';
import { Progress, TabsProps, Tag } from 'antd';
import { Progress, TabsProps, Tag, Tooltip } from 'antd';
import { ColumnType } from 'antd/es/table';
import {
HostData,
@@ -93,7 +94,14 @@ export const getHostsListColumns = (): ColumnType<HostRowData>[] => [
align: 'right',
},
{
title: <div className="column-header-right">Memory Usage</div>,
title: (
<div className="column-header-right memory-usage-header">
Memory Usage
<Tooltip title="Excluding cache memory">
<InfoCircleOutlined />
</Tooltip>
</div>
),
dataIndex: 'memory',
key: 'memory',
width: 100,
@@ -210,6 +218,10 @@ export function GetHostsQuickFiltersConfig(
? 'system.cpu.load_average.15m'
: 'system_cpu_load_average_15m';
const environmentKey = dotMetricsEnabled
? 'deployment.environment'
: 'deployment_environment';
return [
{
type: FiltersType.CHECKBOX,
@@ -241,5 +253,17 @@ export function GetHostsQuickFiltersConfig(
dataSource: DataSource.METRICS,
defaultOpen: true,
},
{
type: FiltersType.CHECKBOX,
title: 'Environment',
attributeKey: {
key: environmentKey,
dataType: DataTypes.String,
type: 'resource',
isColumn: false,
isJSON: false,
},
defaultOpen: true,
},
];
}

View File

@@ -38,7 +38,7 @@ import {
ScrollText,
X,
} from 'lucide-react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useSelector } from 'react-redux';
import { useSearchParams } from 'react-router-dom-v5-compat';
import { AppState } from 'store/reducers';
@@ -85,8 +85,12 @@ function ClusterDetails({
endTime: endMs,
}));
const lastSelectedInterval = useRef<Time | null>(null);
const [selectedInterval, setSelectedInterval] = useState<Time>(
selectedTime as Time,
lastSelectedInterval.current
? lastSelectedInterval.current
: (selectedTime as Time),
);
const [searchParams, setSearchParams] = useSearchParams();
@@ -195,10 +199,11 @@ function ClusterDetails({
}, [initialFilters, initialEventsFilters]);
useEffect(() => {
setSelectedInterval(selectedTime as Time);
const currentSelectedInterval = lastSelectedInterval.current || selectedTime;
setSelectedInterval(currentSelectedInterval as Time);
if (selectedTime !== 'custom') {
const { maxTime, minTime } = GetMinMax(selectedTime);
if (currentSelectedInterval !== 'custom') {
const { maxTime, minTime } = GetMinMax(currentSelectedInterval);
setModalTimeRange({
startTime: Math.floor(minTime / 1000000000),
@@ -226,6 +231,7 @@ function ClusterDetails({
const handleTimeChange = useCallback(
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
lastSelectedInterval.current = interval as Time;
setSelectedInterval(interval as Time);
if (interval === 'custom' && dateTimeRange) {
@@ -462,6 +468,7 @@ function ClusterDetails({
};
const handleClose = (): void => {
lastSelectedInterval.current = null;
setSelectedInterval(selectedTime as Time);
if (selectedTime !== 'custom') {

View File

@@ -51,8 +51,8 @@ export const getClusterMetricsQueryPayload = (
const getKey = (dotKey: string, underscoreKey: string): string =>
dotMetricsEnabled ? dotKey : underscoreKey;
const k8sPodCpuUtilizationKey = getKey(
'k8s.pod.cpu.utilization',
'k8s_pod_cpu_utilization',
'k8s.pod.cpu.usage',
'k8s_pod_cpu_usage',
);
const k8sNodeAllocatableCpuKey = getKey(
'k8s.node.allocatable_cpu',
@@ -146,7 +146,7 @@ export const getClusterMetricsQueryPayload = (
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_pod_cpu_utilization--float64--Gauge--true',
id: 'k8s_pod_cpu_usage--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sPodCpuUtilizationKey,
@@ -189,7 +189,7 @@ export const getClusterMetricsQueryPayload = (
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_pod_cpu_utilization--float64--Gauge--true',
id: 'k8s_pod_cpu_usage--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sPodCpuUtilizationKey,
@@ -232,7 +232,7 @@ export const getClusterMetricsQueryPayload = (
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_pod_cpu_utilization--float64--Gauge--true',
id: 'k8s_pod_cpu_usage--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sPodCpuUtilizationKey,
@@ -731,7 +731,7 @@ export const getClusterMetricsQueryPayload = (
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
graphType: PANEL_TYPES.TABLE,
query: {
builder: {
queryData: [
@@ -751,7 +751,7 @@ export const getClusterMetricsQueryPayload = (
filters: {
items: [
{
id: 'd7779183',
id: 'a7da59c7',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -786,12 +786,12 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'available',
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
},
@@ -804,14 +804,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDeploymentDesiredKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: 'd7779183',
id: '55110885',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -846,14 +846,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'desired',
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'avg',
},
],
queryFormulas: [],
@@ -890,13 +890,13 @@ export const getClusterMetricsQueryPayload = (
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
formatForWeb: true,
start,
end,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
graphType: PANEL_TYPES.TABLE,
query: {
builder: {
queryData: [
@@ -909,14 +909,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetCurrentPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'max',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: 'd7779183',
id: '3c57b4d1',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -951,14 +951,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'current',
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'max',
},
{
aggregateAttribute: {
@@ -969,14 +969,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetDesiredPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'max',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: 'd7779183',
id: '0f49fe64',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1011,14 +1011,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'desired',
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'max',
},
{
aggregateAttribute: {
@@ -1029,14 +1029,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetReadyPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'max',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'C',
filters: {
items: [
{
id: 'd7779183',
id: '0bebf625',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1071,14 +1071,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'ready',
limit: null,
orderBy: [],
queryName: 'C',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'max',
},
{
aggregateAttribute: {
@@ -1089,14 +1089,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetUpdatedPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'max',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'D',
filters: {
items: [
{
id: 'd7779183',
id: '1ddacbbe',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1131,14 +1131,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'updated',
limit: null,
orderBy: [],
queryName: 'D',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'max',
},
],
queryFormulas: [],
@@ -1199,13 +1199,13 @@ export const getClusterMetricsQueryPayload = (
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
formatForWeb: true,
start,
end,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
graphType: PANEL_TYPES.TABLE,
query: {
builder: {
queryData: [
@@ -1218,14 +1218,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetCurrentScheduledNodesKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: 'd7779183',
id: 'e0bea554',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1250,24 +1250,16 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
legend: 'current_nodes',
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'avg',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'avg',
},
{
aggregateAttribute: {
@@ -1278,14 +1270,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetDesiredScheduledNodesKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: 'd7779183',
id: '741052f7',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1310,24 +1302,16 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
legend: 'desired_nodes',
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'avg',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'avg',
},
{
aggregateAttribute: {
@@ -1338,14 +1322,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetReadyNodesKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'C',
filters: {
items: [
{
id: 'd7779183',
id: 'f23759f2',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1370,24 +1354,16 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
legend: 'ready_nodes',
limit: null,
orderBy: [],
queryName: 'C',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'avg',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'avg',
},
],
queryFormulas: [],
@@ -1436,316 +1412,7 @@ export const getClusterMetricsQueryPayload = (
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
start,
end,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
query: {
builder: {
queryData: [
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_active_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobActivePodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_successful_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobSuccessfulPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_failed_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobFailedPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'C',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'C',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_desired_successful_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobDesiredSuccessfulPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'D',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'D',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
],
queryFormulas: [],
},
clickhouse_sql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
id: v4(),
promql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
formatForWeb: true,
start,
end,
},
@@ -1777,7 +1444,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'k8s_cluster_name',
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
@@ -1837,7 +1504,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'k8s_cluster_name',
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
@@ -1897,7 +1564,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'k8s_cluster_name',
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
@@ -1957,7 +1624,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'k8s_cluster_name',
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
@@ -2005,6 +1672,24 @@ export const getClusterMetricsQueryPayload = (
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
id: v4(),
promql: [
@@ -2014,6 +1699,24 @@ export const getClusterMetricsQueryPayload = (
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},

View File

@@ -189,6 +189,32 @@ function K8sClustersList({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [minTime, maxTime, orderBy, selectedRowData, groupBy]);
const groupedByRowDataQueryKey = useMemo(() => {
if (selectedClusterName) {
return [
'clusterList',
JSON.stringify(queryFilters),
JSON.stringify(orderBy),
JSON.stringify(selectedRowData),
];
}
return [
'clusterList',
JSON.stringify(queryFilters),
JSON.stringify(orderBy),
JSON.stringify(selectedRowData),
String(minTime),
String(maxTime),
];
}, [
queryFilters,
orderBy,
selectedClusterName,
minTime,
maxTime,
selectedRowData,
]);
const {
data: groupedByRowData,
isFetching: isFetchingGroupedByRowData,
@@ -198,7 +224,7 @@ function K8sClustersList({
} = useGetK8sClustersList(
fetchGroupedByRowDataQuery as K8sClustersListPayload,
{
queryKey: ['clusterList', fetchGroupedByRowDataQuery],
queryKey: groupedByRowDataQueryKey,
enabled: !!fetchGroupedByRowDataQuery && !!selectedRowData,
},
undefined,
@@ -254,11 +280,44 @@ function K8sClustersList({
return groupedByRowData?.payload?.data?.records || [];
}, [groupedByRowData, selectedRowData]);
const queryKey = useMemo(() => {
if (selectedClusterName) {
return [
'clusterList',
String(pageSize),
String(currentPage),
JSON.stringify(queryFilters),
JSON.stringify(orderBy),
JSON.stringify(groupBy),
];
}
return [
'clusterList',
String(pageSize),
String(currentPage),
JSON.stringify(queryFilters),
JSON.stringify(orderBy),
JSON.stringify(groupBy),
String(minTime),
String(maxTime),
];
}, [
selectedClusterName,
pageSize,
currentPage,
queryFilters,
orderBy,
groupBy,
minTime,
maxTime,
]);
const { data, isFetching, isLoading, isError } = useGetK8sClustersList(
query as K8sClustersListPayload,
{
queryKey: ['clusterList', query],
queryKey,
enabled: !!query,
keepPreviousData: true,
},
undefined,
dotMetricsEnabled,
@@ -583,6 +642,9 @@ function K8sClustersList({
});
};
const showTableLoadingState =
(isFetching || isLoading) && formattedClustersData.length === 0;
return (
<div className="k8s-list">
<K8sHeader
@@ -595,12 +657,13 @@ function K8sClustersList({
handleGroupByChange={handleGroupByChange}
selectedGroupBy={groupBy}
entity={K8sCategory.NODES}
showAutoRefresh={!selectedClusterData}
/>
{isError && <Typography>{data?.error || 'Something went wrong'}</Typography>}
<Table
className="k8s-list-table clusters-list-table"
dataSource={isFetching || isLoading ? [] : formattedClustersData}
dataSource={showTableLoadingState ? [] : formattedClustersData}
columns={columns}
pagination={{
current: currentPage,
@@ -612,26 +675,25 @@ function K8sClustersList({
}}
scroll={{ x: true }}
loading={{
spinning: isFetching || isLoading,
spinning: showTableLoadingState,
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
}}
locale={{
emptyText:
isFetching || isLoading ? null : (
<div className="no-filtered-hosts-message-container">
<div className="no-filtered-hosts-message-content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
emptyText: showTableLoadingState ? null : (
<div className="no-filtered-hosts-message-container">
<div className="no-filtered-hosts-message-content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
<Typography.Text className="no-filtered-hosts-message">
This query had no results. Edit your query and try again!
</Typography.Text>
</div>
<Typography.Text className="no-filtered-hosts-message">
This query had no results. Edit your query and try again!
</Typography.Text>
</div>
),
</div>
),
}}
tableLayout="fixed"
onChange={handleTableChange}

View File

@@ -33,7 +33,7 @@ import {
ScrollText,
X,
} from 'lucide-react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useSelector } from 'react-redux';
import { useSearchParams } from 'react-router-dom-v5-compat';
import { AppState } from 'store/reducers';
@@ -84,8 +84,12 @@ function DaemonSetDetails({
endTime: endMs,
}));
const lastSelectedInterval = useRef<Time | null>(null);
const [selectedInterval, setSelectedInterval] = useState<Time>(
selectedTime as Time,
lastSelectedInterval.current
? lastSelectedInterval.current
: (selectedTime as Time),
);
const [searchParams, setSearchParams] = useSearchParams();
@@ -211,10 +215,11 @@ function DaemonSetDetails({
}, [initialFilters, initialEventsFilters]);
useEffect(() => {
setSelectedInterval(selectedTime as Time);
const currentSelectedInterval = lastSelectedInterval.current || selectedTime;
setSelectedInterval(currentSelectedInterval as Time);
if (selectedTime !== 'custom') {
const { maxTime, minTime } = GetMinMax(selectedTime);
if (currentSelectedInterval !== 'custom') {
const { maxTime, minTime } = GetMinMax(currentSelectedInterval);
setModalTimeRange({
startTime: Math.floor(minTime / 1000000000),
@@ -242,6 +247,7 @@ function DaemonSetDetails({
const handleTimeChange = useCallback(
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
lastSelectedInterval.current = interval as Time;
setSelectedInterval(interval as Time);
if (interval === 'custom' && dateTimeRange) {
@@ -476,6 +482,7 @@ function DaemonSetDetails({
};
const handleClose = (): void => {
lastSelectedInterval.current = null;
setSelectedInterval(selectedTime as Time);
if (selectedTime !== 'custom') {

View File

@@ -33,8 +33,8 @@ export const getDaemonSetMetricsQueryPayload = (
dotMetricsEnabled: boolean,
): GetQueryResultsProps[] => {
const k8sPodCpuUtilizationKey = dotMetricsEnabled
? 'k8s.pod.cpu.utilization'
: 'k8s_pod_cpu_utilization';
? 'k8s.pod.cpu.usage'
: 'k8s_pod_cpu_usage';
const k8sContainerCpuRequestKey = dotMetricsEnabled
? 'k8s.container.cpu_request'
@@ -84,7 +84,7 @@ export const getDaemonSetMetricsQueryPayload = (
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_pod_cpu_utilization--float64--Gauge--true',
id: 'k8s_pod_cpu_usage--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sPodCpuUtilizationKey,

Some files were not shown because too many files have changed in this diff Show More