Compare commits

..

359 Commits

Author SHA1 Message Date
Srikanth Chekuri
04a68ad444 chore: adjust timestamps 2024-05-21 08:06:49 +05:30
Srikanth Chekuri
0c0f9a0481 chore: skip test 2024-05-21 07:30:39 +05:30
Srikanth Chekuri
3c27d59ad4 chore: add for only formula 2024-05-21 06:22:22 +05:30
Srikanth Chekuri
a891dd4b50 chore: fix timestamps 2024-05-20 21:49:23 +05:30
Srikanth Chekuri
10530582ab fix: add zero value for missing timestamps in alert eval 2024-05-20 15:22:19 +05:30
Srikanth Chekuri
9ff0e34038 chore: migrate alerts to v4 for supported operators (#5010) 2024-05-17 07:45:03 +05:30
Vikrant Gupta
d313f44556 fix: multiple widgets getting created and hence blocking the delete (#5015)
* fix: multiple widgets getting created and hence blocking the delete

* fix: allow multiple deletes when multiple widgets present with same id

* chore: use the avg for limit

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2024-05-17 07:44:33 +05:30
Raj Kamal Singh
5a778dcb18 Chore: integrations: populate updatedAt for integration dashboards (#5019)
* chore: add test for updatedAt value being populated in integration dashboards and get it passing

* chore: also populate createdAt, createBy and updateBy for instaled integration dashboards

* chore: update clickhouse integration config instructions
2024-05-16 21:44:46 +05:30
Srikanth Chekuri
7e31b4ca01 fix: several issues (#5001) 2024-05-15 18:52:01 +05:30
Raj Kamal Singh
3efd9801a1 Chore: restrict logs connection test for integrations to use log attributes for identifying logs (#4977)
* chore: change logs connection test spec to be based on an attrib value

* chore: disallow unknown fields while unmarshalling JSON for an integration

* chore: add description field to collected metric spec

* chore: update logs connection test for builtin integrations

* chore: update logic for calculating logs connection status
2024-05-15 14:36:52 +05:30
Vishal Sharma
0cbaa17d9f chore: allow unlimited dashboards and alerts in community version (#4989)
* chore: allow unlimited dashboards and alerts in community version

* chore: update ee plan
2024-05-14 18:05:59 +05:30
Nityananda Gohain
30bfad527f chore: enable limits for trace queries (#4997) 2024-05-14 17:03:29 +05:30
Srikanth Chekuri
9f1c45bc32 chore: add toUnixTimestamp to supported functions (#4877) 2024-05-14 10:34:43 +05:30
Vikrant Gupta
51becf7cfb fix: added right padding to the notifications bar to show cancel button (#4969) 2024-05-12 16:45:16 +05:30
Vibhu Pandey
7460e650af feat(workflow): integrate with workflow identity pool (#4945)
* feat(workflows): add wif workflow
* feat(workflows): add name of compute instance
* feat(workflows): fix permissions
* feat(workflows):  add an OR true since github runs with -e
* ci(testing-deployment): include GITHUB envs
* ci(testing-deployment): move GCP information to secrets
* ci(staging-deployment): wif workflow

---------

Co-authored-by: Prashant Shahi <prashant@signoz.io>
2024-05-10 23:23:31 +05:30
Vikrant Gupta
211fe4fdd5 fix: prevent page from crashing in case items in filters is null (#4964)
* fix: prevent page from crashing in case items in filters is null

* fix: added null check for filters as well
2024-05-06 19:18:27 +05:30
Vikrant Gupta
e2992b42c1 fix: make integrations available for the ee cloud user (#4963) 2024-05-06 19:17:50 +05:30
Nityananda Gohain
3957d91a9b fix: add read-in-order config (#4918) 2024-05-06 15:01:53 +05:30
Vishal Sharma
967aa16f21 feat: sort tags and events in trace detail (#4962) 2024-05-05 09:03:31 +05:30
Vikrant Gupta
08b1a87cb5 Revert "fix: step interval not getting updated on time range change (#4944)" (#4955)
This reverts commit 5c1c09c790.
2024-05-01 22:46:32 +05:30
SagarRajput-7
03ddcdd20e feat: added test cases for pipeline pages (#4872)
* feat: added test cases for pipeline pages

* feat: added test cases for changeHistory

* chore: change history table test case added

* chore: added create pipeline button test cases

* chore: updated useAnalytics mocking
2024-05-01 18:49:04 +05:30
SagarRajput-7
1aec7f3ca6 feat: added tooltips for facing issue btn (#4948) 2024-05-01 18:36:56 +05:30
Vikrant Gupta
241edcb88a fix: text change for saved views in traces (#4953) 2024-05-01 18:28:05 +05:30
Srikanth Chekuri
27d12871af chore: disallow small step intervals for large durations (#4950) 2024-05-01 17:03:46 +05:30
Yunus M
e78e1d4b63 fix: add safety checks to handle null response from query range API (#4939) 2024-05-01 15:49:30 +05:30
Yunus M
64bf580323 feat: show milliseconds in timestamp in logs views (#4949)
* feat: show milliseconds in timestamp in logs views

* fix: remove console log

---------

Co-authored-by: Vikrant Gupta <vikrant.thomso@gmail.com>
2024-05-01 15:27:48 +05:30
SagarRajput-7
152aa4b518 fix: fixed facing issue btn alignment issue (#4936)
* fix: fixed facing issue btn alignment issue

* fix: fixed facing issue btn alignment issue

* fix: moved intercom help messages to util file
2024-05-01 14:49:42 +05:30
Vikrant Gupta
b3d5831574 fix: ch queries sending builder as query type in query range api for exceptions alerts (#4941)
* fix: ch queries sending builder as query type in query range api for exceptions alerts

* fix: ch queries sending builder as query type in query range api for exceptions alerts

* fix: alerts routing from logs explorer and dashboards
2024-05-01 14:39:39 +05:30
Vikrant Gupta
b85b9f42ed fix: time interval not getting updated in case of edit dashboard (#4940) 2024-05-01 13:00:18 +05:30
Vikrant Gupta
5c1c09c790 fix: step interval not getting updated on time range change (#4944) 2024-05-01 12:47:33 +05:30
Vishal Sharma
33960b05fd chore: update facing issues text (#4942) 2024-04-30 23:38:15 +05:30
Vikrant Gupta
191d9b0648 feat: introducing collapsable rows for dashboards (#4806)
* feat: dashboard panel grouping initial setup

* feat: added panel map to the dashboard response and subsequent types for the same

* feat: added panel map to the dashboard response and subsequent types for the same

* feat: added settings modal

* feat: handle panel collapse and open changes

* feat: handle creating panel map when dashboard layout changes

* feat: handle creating panel map when dashboard layout changes

* feat: refactor code

* feat: handle multiple collapsable rows

* fix: type issues

* feat: handle row collapse button and scroll

* feat: handle y axis movement for rows

* feat: handle delete row

* feat: handle settings name change

* feat: disable collapse/uncollapse when dashboard loading to avoid async states

* feat: decrease the height of the grouping row

* fix: row height management

* fix: handle empty row case

* feat: remove resize handle from the row

* feat: handle re-arrangement of panels

* feat: increase height of default new widget

* feat: added safety checks
2024-04-30 14:36:47 +05:30
Srikanth Chekuri
7d81bc3417 fix: value panel restriction should be on enabled queries (#4934) 2024-04-30 09:53:03 +05:30
Srikanth Chekuri
506916661d fix: metric limit works with cache (#4935) 2024-04-30 01:25:50 +05:30
Nityananda Gohain
5326f2d23b fix: dont enrich if non empty keys are not same (#4930)
* fix: dont enrich if non empty keys are not same

* fix: update if any of the type and dataType is empty but other is matching
2024-04-29 22:40:40 +05:30
dependabot[bot]
dfaa344dce chore(deps): bump express from 4.18.2 to 4.19.2 in /frontend (#4840)
Bumps [express](https://github.com/expressjs/express) from 4.18.2 to 4.19.2.
- [Release notes](https://github.com/expressjs/express/releases)
- [Changelog](https://github.com/expressjs/express/blob/master/History.md)
- [Commits](https://github.com/expressjs/express/compare/4.18.2...4.19.2)

---
updated-dependencies:
- dependency-name: express
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-04-29 21:29:27 +05:30
SagarRajput-7
882b540a0b chore: [SIG-583]: Jest coverage collection config (#4920)
* chore: [SIG-583]: Jest coverage collection config

* fix: added missing attribute
2024-04-27 11:31:37 +05:30
Ankit Nayan
1c4b579c3d fix: frontend/package.json & frontend/yarn.lock to reduce vulnerabilities (#4341)
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-AXIOS-6144788

Co-authored-by: snyk-bot <snyk-bot@snyk.io>
Co-authored-by: Prashant Shahi <prashant@signoz.io>
2024-04-26 15:00:06 +05:30
Yunus M
706f25cc5d fix: frontend/Dockerfile to reduce vulnerabilities (#4913)
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-ALPINE318-EXPAT-6241039
- https://snyk.io/vuln/SNYK-ALPINE318-LIBX11-6042398
- https://snyk.io/vuln/SNYK-ALPINE318-LIBXML2-6245694
- https://snyk.io/vuln/SNYK-ALPINE318-OPENSSL-6032386
- https://snyk.io/vuln/SNYK-ALPINE318-OPENSSL-6032386

Co-authored-by: snyk-bot <snyk-bot@snyk.io>
Co-authored-by: Prashant Shahi <prashant@signoz.io>
2024-04-26 11:41:53 +05:30
Raj Kamal Singh
e6e0a59f5f Feat: integrations: clickhouse (#4879)
* chore: get built-in clickhouse integration started

* chore: update config pre-requisites for clickhouse integration

* chore: add details of metrics data collected for clickhouse integration

* chore: clickhouse integration: move list of data-collected to its own file

* chore: clickhouse integration: get overview dashboard started

* chore: start with logs collection instructions for clickhouse

* chore: regex parsing for clickhouse text logs

* chore: timestamp parsing for clickhouse logs

* chore: severity parsing for clickhouse logs

* chore: clickhouse logs parsing: move parsed message to body if available

* chore: update pre-reqs for collecting from system.query_log table

* feat: add instructions for collecting from system.query_log table

* feat: add logs attribs collected

* chore: some cleanup of clickhouse overview dashboard

* feat: finish up with clickhouse overview dashboard for clickhouse integration
2024-04-26 09:45:57 +05:30
Prashant Shahi
b2c170c752 Merge pull request #4919 from SigNoz/release/v0.44
post-release: sync release changes in develop
2024-04-25 22:13:44 +05:30
Prashant Shahi
453be9074d Merge branch 'main' into release/v0.44 2024-04-25 18:07:31 +05:30
Prashant Shahi
3272444e13 chore(signoz): 📌 pin versions: SigNoz 0.44.0
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-04-25 18:01:10 +05:30
Vishal Sharma
71b3e6d522 fix: rate in table panel (#4916)
* fix: rate in table panel

* test: added test cases for rate operation in table panel
2024-04-25 14:15:33 +05:30
Prashant Shahi
6cf7cc9f4f chore: bump SigNoz/prometheus from v1.10.1 to v1.11.0 (#4912)
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-04-25 13:12:30 +05:30
Prashant Shahi
5ec2f17d09 chore: pin SigNoz OtelCollector 0.88.21 and update ClickHouse dsn (#4909)
* chore: 📌 pin versions: SigNoz OtelCollector 0.88.21
* chore(clickhouse): update dsn as per the new parsing logic

---------

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-04-24 21:15:07 +05:30
Prashant Shahi
a45fb8ec0c fix(clickhouse): update endpoint of the healthcheck in deployment files (#4908)
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-04-24 19:06:05 +05:30
SagarRajput-7
bd148bbd5a fix: restrict visibility of facing-issue button to only cloud users with intercom setup (#4907)
* fix: restrict visibilty of facing-issue button to only cloud users with intercom setup

* fix: restrict visibilty of facing-issue button to only cloud users with intercom setup

* fix: added a comment

* fix: added chat support feature flag condition

* fix: added a comment

* fix: changed folder structure
2024-04-24 18:56:19 +05:30
SagarRajput-7
1306e99ca8 fix: alert threshold form is resetting to default query option on stage & run (#4876)
* fix: alert threshold form is resetting to default query option on stage & run

* fix: alert threshold - added safety check when the queryOption is deleted
2024-04-24 15:58:59 +05:30
SagarRajput-7
1a8f063b4b feat: [SIG-585]: Added facingIssueBtn at Dashboard list, detail and alert list, detail etc. pages (#4899)
* feat: [SIG-585]: Added facingIssueBtn at Dashboard list, detail and alert list, detail etc. pages

* feat: [SIG-585]: Added facingIssueBtn for dashboard & alert listing

* feat: [SIG-585]: Added facingIssueBtn for dashboard & alert detail and dashboard panel edit

* feat: [SIG-585]: Code cleanup

* feat: [SIG-585]: Changed logEvent attribute and event content

* feat: [SIG-585]: Changed alignment of button and button text

* feat: [SIG-585]: Changed button color to amber

* feat: [SIG-585]: Code cleanup
2024-04-24 15:48:48 +05:30
SagarRajput-7
c7668b9a78 chore: added jest coverage setup (#4871)
* chore: added jest coverage setup

* chore: added sample test changes

* chore: revert sample code change

* chore: changed script to add yarn install handle and removed checkouts to develop

* chore: added fix for checkout issue

* chore: added fix for checkout issue

* chore: added fix for checkout issue

* chore: added fix for checkout issue

* chore: added sample test changes

* chore: revert sample test cases

* chore: adding coverage threshold

* chore: added coverage threshold and sample test code

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: testing fetch and checkout

* chore: code cleanup and threshold adjustment

* chore: testing fetch and checkout
2024-04-23 20:06:02 +05:30
Vikrant Gupta
5e3dba2587 fix: do not save dashboard panel on creating a new panel if discard is pressed (#4884)
* fix: do not save dashboard panel on creating a new panel if discard is pressed

* fix: remove console log
2024-04-23 19:39:41 +05:30
Vikrant Gupta
374f30e0cd fix: billing container scroll issue when trial banner present (#4893) 2024-04-22 19:31:54 +05:30
Vikrant Gupta
38d2833931 fix: handle the old variables by name instead of id (#4890) 2024-04-20 17:54:29 +05:30
SagarRajput-7
731eacbbca feat: [SIG-584]: moved facing issue btn tracking from trackEvent to logEvent (#4888)
* feat: [SIG-584]: moved facing issue btn tracking from trackEvent to logEvent

* feat: [SIG-584]: removed logEvent from useAnalytic hook
2024-04-20 16:40:47 +05:30
dependabot[bot]
a63bb139bf chore(deps): bump golang.org/x/net from 0.21.0 to 0.23.0 (#4889)
Bumps [golang.org/x/net](https://github.com/golang/net) from 0.21.0 to 0.23.0.
- [Commits](https://github.com/golang/net/compare/v0.21.0...v0.23.0)

---
updated-dependencies:
- dependency-name: golang.org/x/net
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-04-20 07:52:44 +05:30
Vikrant Gupta
a140bef0e6 fix: handle the case where the functions are recieved as undefined in the query response (#4880) 2024-04-18 11:17:28 +05:30
Vikrant Gupta
48e5436167 fix: handle the edge cases for alerts create form (#4875) 2024-04-17 16:40:21 +05:30
Yunus M
0fc664a387 feat: show timestamp in list view of trace explorer (#4860)
Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2024-04-16 12:41:13 +05:30
Prashant Shahi
5817d50652 Merge pull request #4853 from SigNoz/release/v0.43.x
Release/v0.43.x
2024-04-15 20:07:57 +05:45
Prashant Shahi
bb318cf52a Merge pull request #4852 from SigNoz/release/v0.43.x
chore(pre-release): 📌 pin versions: SigNoz 0.43.0, OtelCollector 0.88.20
2024-04-15 19:50:20 +05:45
Prashant Shahi
ec0185da61 Merge branch 'develop' into release/v0.43.x 2024-04-15 18:24:21 +05:45
Srikanth Chekuri
fc2bdb610f chore: make send resolved notifs configurable (#4833) 2024-04-15 13:46:12 +05:30
Srikanth Chekuri
a9464de62d chore: use last 1day data for apdex latency metric meta (#4846) 2024-04-15 13:37:08 +05:30
Yunus M
57bfdedfe1 feat: send event if users click in facing issues button in get started (#4859)
Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2024-04-15 13:26:20 +05:30
SagarRajput-7
7bdc9c0cb0 fix: fixed sidenav alignment with and without get-started (#4829) 2024-04-15 11:40:40 +05:30
SagarRajput-7
0d5934d56b chore: added test cases for Logs (#4828)
* chore: add test cases for Logs

* chore: add test cases for Logs - explorer

* chore: add test cases for Logs - toolbarAction

* chore: add test cases for Logs - list and table view

* chore: add test cases for Logs - list and table view

* chore: code fix
2024-04-15 11:30:25 +05:30
Vikrant Gupta
3a5a61aff9 fix: wrong payload being sent in the dashboard payload (#4854)
* fix: wrong payload being sent in the dashboard payload

* fix: sync the update set dashboard function

* fix: syncronise the var updates

* fix: jest test cases

* fix: added review comments

* fix: do not make query range API call until the queue is empty

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2024-04-15 11:11:14 +05:30
Rajat Dabade
a54b7baa7d feat: add support for pie chart panel type (#4751) 2024-04-13 09:55:02 +05:30
Prashant Shahi
cd63dd972d chore(pre-release): 📌 pin versions: SigNoz 0.43.0, SigNoz OtelCollector 0.88.20
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-04-10 23:55:39 +05:45
Nityananda Gohain
389058b9b4 feat: allow query restrictions for log queries (#4778)
* feat: allow query restrictions for log queries

* fix: error check

* fix: set default only if not present

* chore: add error log for query restriction error

* fix: add limtations for traces

* fix: fix wrapper

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2024-04-10 17:25:57 +05:30
Rajat Dabade
27e412d1ee refactor: removed the error body (#4850)
* refactor: removed the error body

* refactor: check for error response body condition

* refactor: empty object check
2024-04-10 15:19:01 +05:30
Vikrant Gupta
03dccb0101 fix: trace page breaking (#4844)
* fix: trace page breaking

* fix: crash on reload

* fix: hide update button for viewer role in collapsed mode
2024-04-09 19:20:08 +05:30
Rajat Dabade
25b74b48a5 [Fix]: selected time in right container to be used in query-range (#4842) 2024-04-09 13:48:54 +05:30
Vikrant Gupta
6815a96d29 feat: support of changing panel type in dashboards (#4759)
* feat: support of changing panel type in dashboards

* feat: add handle query change function

* feat: last bit of minor change

* feat: apply current query updates to superset query

* feat: pr cleanup

* feat: handle list type change

* fix: build issues

* fix: changes required due to refactor

* fix: handle offset and page size for list queries

* feat: handle functions propagation

* feat: handle the spaceAggregation value to retain

* fix: handle list panel type changes

* feat: handle removing the graph list from the side selection in case of metrics

* feat: handle list type qb changes

* feat: handle page breaking

* feat: pick dataSource from newQUeryItem

* feat: handle page reload
2024-04-09 13:36:19 +05:30
SagarRajput-7
e9bb05cc5d fix: added billing page condition to not show when its not either cloud or enterprise (#4827) 2024-04-09 12:05:39 +05:30
dependabot[bot]
31c0b94ae6 chore(deps): bump webpack-dev-middleware in /frontend (#4838)
Bumps [webpack-dev-middleware](https://github.com/webpack/webpack-dev-middleware) from 5.3.3 to 5.3.4.
- [Release notes](https://github.com/webpack/webpack-dev-middleware/releases)
- [Changelog](https://github.com/webpack/webpack-dev-middleware/blob/v5.3.4/CHANGELOG.md)
- [Commits](https://github.com/webpack/webpack-dev-middleware/compare/v5.3.3...v5.3.4)

---
updated-dependencies:
- dependency-name: webpack-dev-middleware
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-04-09 11:58:39 +05:30
Vikrant Gupta
59c242961f feat: revamp integration flow (#4832)
* feat: revamp integration flow

* feat: final design changes

* feat: make test connection button grey
2024-04-09 11:29:54 +05:30
Yunus M
872ed9e963 fix: time preference from the panel should be used to fetch data (#4836) 2024-04-09 11:05:49 +05:30
Yunus M
d6cd155988 feat: update styles for toolbar (#4824) 2024-04-08 10:52:58 +05:30
Prashant Shahi
7f4a61ffb1 Merge pull request #4822 from SigNoz/fix/heartbeat-ratelimit
fix: avoid rate limiting heartbeat events
2024-04-06 01:24:36 +05:45
makeavish
7737d513a7 fix: avoid rate limiting heartbeat events 2024-04-06 00:54:14 +05:30
Vishal Sharma
2bd666efae fix: query range API validation (#4821) 2024-04-05 23:57:11 +05:30
Srikanth Chekuri
d98265f345 chore: add log comment for prom queries (#4819) 2024-04-05 21:11:53 +05:30
Vikrant Gupta
b480ff1e48 revert: prevent stage and run query to apply legends (#4816) 2024-04-05 02:58:09 +05:30
Vishal Sharma
af353b9340 fix: sidebar jitter (#4815) 2024-04-04 20:53:36 +05:30
Raj Kamal Singh
96e7505922 Chore: logs pipelines cover all available processors with tests (#4454)
* chore: add explicit happy case test for regex parsing processor

* chore: add explicit happy case test for grok parsing processor

* chore: add explicit happy case test for JSON parsing processor

* chore: log pipelines: move trace parser test to processors_test.go

* chore: add explicit happy case test for ADD processor

* chore: add explicit happy case test for remove processor

* chore: add explicit happy case test for copy processor

* chore: add explicit happy case test for move processor

* fix: fix broken grok parser test: change test int value to int64

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2024-04-04 18:48:01 +05:30
Yunus M
8f6f2f0018 feat: views toolbar visibility, light mode (#4813) 2024-04-04 18:47:41 +05:30
Yunus M
1f25d386df feat: retry on chunk load error (#4803)
* feat: retry on chunk load error

* feat: consistent return

* feat: use lazyRetry for help tooltip

* feat: enable sourcemaps
2024-04-04 18:00:50 +05:30
SagarRajput-7
2d7a3733da fix: [SIG-578]: changed color of function block connector (#4800) 2024-04-04 16:45:59 +05:30
SagarRajput-7
ff2a3bc4b0 fix: [SIG-577]: QB functions - states not clearing (#4810) 2024-04-04 16:45:15 +05:30
Vikrant Gupta
33383a4503 fix: alerts tag popup vibrating (#4812) 2024-04-04 13:58:26 +05:30
CheetoDa
f05b94c01e Merge pull request #4799 from SigNoz/java-instructions-update
chore: fixed java instructions
2024-04-04 13:14:18 +05:30
CheetoDa
fd632f9952 Merge pull request #4798 from SigNoz/otel-version-change
feat: updated otel version number
2024-04-04 13:14:04 +05:30
CheetoDa
fd84d7b492 Merge pull request #4797 from SigNoz/docker-instructions
Docker instructions
2024-04-04 13:13:48 +05:30
Vikrant Gupta
e4808e585a fix: app content overlapping sidenav (#4811) 2024-04-04 12:46:33 +05:30
CheetoDa
5cfeb56f9c Created a variable for OTel version 2024-04-04 12:13:19 +05:30
CheetoDa
b947f823d7 chore: minor fix 2024-04-04 12:02:52 +05:30
CheetoDa
1520c1c57d Merge branch 'develop' into otel-version-change 2024-04-04 12:00:09 +05:30
CheetoDa
f8477981d8 chore: fixed review comments 2024-04-04 11:53:46 +05:30
Vishal Sharma
9b1d596816 chore: update events (#4808)
* chore: update events

* Update ee/query-service/app/server.go

Co-authored-by: Prashant Shahi <prashant@signoz.io>

---------

Co-authored-by: Prashant Shahi <prashant@signoz.io>
2024-04-04 11:31:27 +05:30
Yunus M
6a4aa9a956 QB - Logs - Enable TimeShift function (#4792)
* feat: qb - logs - enable time shift function

* feat: qb - logs - enable time shift function

* feat: show functions for logs in v3 version too
2024-04-04 11:05:58 +05:30
Nityananda Gohain
a7b0ef55ad fix: querier v2 synced and tablePanel result processor updated (#4807) 2024-04-03 17:52:45 +05:30
Srikanth Chekuri
87534b6fb6 fix: incorrect error rate query (#4805) 2024-04-03 16:42:00 +05:30
Vikrant Gupta
c76cef47ba fix: remove integrations page view and add event for the same (#4802) 2024-04-03 12:31:38 +05:30
CheetoDa
3276dfa03e chore: fixed java instructions 2024-04-03 04:42:19 +05:30
CheetoDa
1a14cc305c feat: updated otel version number 2024-04-03 04:28:15 +05:30
CheetoDa
0c7e63d735 chore: saved unsaved files 2024-04-03 04:08:46 +05:30
CheetoDa
eb74cb4c5e feat: docker completed 2024-04-03 04:03:00 +05:30
hulk
a47d3289d0 fix: typo in the log message (#4769) 2024-04-02 20:39:05 +05:30
Yunus M
8ad827130e feat: docked sidebar (#4794)
* feat: docked sidebar

* feat: update styles
2024-04-02 19:43:03 +05:30
CheetoDa
93bdfd3d83 chore: merged latest develop 2024-04-02 18:19:20 +05:30
CheetoDa
22d8889a07 feat: added docker instructions 2024-04-02 18:05:39 +05:30
Vikrant Gupta
7c93944d40 fix: chunk load webpack error (#4795) 2024-04-02 17:55:36 +05:30
Rajat Dabade
ec9dbb6853 Dashboard Clean up and list view improvement. (#4675)
* refactor: initial setup

* refactor: created panelWrapper to separate panel data

* fix: type error

* fix: the dimension issue for graphs

* refactor: done with table value uplot panels

* refactor: done with logs panel component

* refactor: updated props for log panel component

* fix: query range duplicate issue for logs

* refactor: trace list view done

* fix: full view support

* refactor: done with edit mode for panels

* refactor: type and props

* refactor: reduce an extra api call on edit for list view

* refactor: done with full graph visibility handler

* refactor: removed commented code

* refactor: removed commented code

* fix: build failure

* refactor: updated service layer graphs

* refactor: updated top level oparation query key

* refactor: added drag select

* refactor: done with drag select in chart

* refactor: code cleanup

* refactor: legend should not need stage and run query
2024-04-02 16:40:41 +05:30
Vikrant Gupta
7a7d814288 fix: sidenav items overlapping in small screens (#4789) 2024-04-02 12:38:10 +05:30
Vikrant Gupta
3babce3ecf fix: added dashboard and QB shortcuts to the sidenav (#4791) 2024-04-02 11:31:42 +05:30
Yunus M
1610b95b84 feat: onboarding flow - enable users to submit request for a new data… (#4786)
* feat: onboarding flow - enable users to submit request for a new data source , environment

* chore: request data source to be available for all modules

* chore: remove hardcoded value
2024-04-01 19:09:16 +05:30
Vishal Sharma
8c02f8ec31 chore: rate limit param (#4785) 2024-04-01 15:06:38 +05:30
Nityananda Gohain
5e0e9da6c4 fix: hotfix bug in enhance query (#4783) 2024-04-01 14:51:40 +05:30
Vikrant Gupta
51abe71421 fix: do not move to next step if env not selected in onboarding (#4784) 2024-04-01 13:56:59 +05:30
Vikrant Gupta
00d74bfebb feat: add integrations to the side-nav for cloud users (#4756)
* feat: add integrations to the side-nav for cloud users

* feat: change the route from integrations/installed to /integrations

* feat: light mode table color

* feat: increase the width of the integrations panel by 25 percent

* feat: added telemetry constants and page view

* feat: added telemetry events for integrations

* feat: address review comments
2024-04-01 12:40:15 +05:30
Raj Kamal Singh
39e0ef68ca chore: integration instructions: add typical log file locations on macOS (#4779) 2024-04-01 12:06:08 +05:30
Ankit Nayan
cff20f88cd merging main 2024-03-30 18:30:46 +01:00
Nityananda Gohain
a34c59762b feat: allow characters in attribute names (#4775) 2024-03-30 17:57:01 +05:30
Nityananda Gohain
397da5857f fix: enrich all queries with non materialized attributes (#4772) 2024-03-30 08:55:46 +05:30
Vikrant Gupta
43ceb052d8 feat: do not retry query range API's with i/o timeout error (#4768)
* feat: do not retry query range API's with i/o timeout error

* feat: do not retry query range API's with i/o timeout error
2024-03-29 16:00:22 +05:30
Yunus M
6eced60bf5 feat: update time range selection flows to handle relative and absolu… (#4742)
* feat: update time range selection flows to handle relative and absolute times

* fix: lint error

* fix: lint error

* feat: update logic to handle custom relative times on load and standardize relative time formats

* fix: type issue

* fix: handle light mode and on custom time range select

* chore: update alert frequency corresponding times

* chore: update copy URL

* feat: update styles
2024-03-29 14:53:48 +05:30
Rajat Dabade
7c2f5352d2 [Refactor]: Table Grid Formula issue. (#4758)
* refactor: change the logic to match data from another query

* refactor: updated logic

* refactor: clean up

* refactor: updated case to handle formula

* chore: nit

* refactor: isEqual instead of nested loops

* chore: added comments

* refactor: updated logic

* refactor: clean up

* refactor: updated case to handle formula

* chore: nit

* refactor: isEqual instead of nested loops
2024-03-29 14:41:16 +05:30
Vikrant Gupta
e6e377beff fix: billing graph page crash (#4764) 2024-03-29 11:08:33 +05:30
Prashant Shahi
6da9de6591 Merge pull request #4588 from SigNoz/chore/send-language-service-as-list
chore: send language and service name events as list
2024-03-28 22:21:55 +05:45
Vishal Sharma
7549aee656 Merge branch 'develop' into chore/send-language-service-as-list 2024-03-28 21:54:16 +05:30
Vishal Sharma
da4a6266c5 feat: add events API (#4761) 2024-03-28 21:43:41 +05:30
Vishal Sharma
6ac938f2a6 Merge branch 'develop' into chore/send-language-service-as-list 2024-03-28 21:40:57 +05:30
Raj Kamal Singh
990fc83269 Feat/integrations v0 mongo and nginx (#4763)
* feat: flesh out pre-requisites for collecting mongodb logs and metrics

* chore: remove stale pipelines in bundled integrations

* chore: clean up 'collect metrics' step for mongodb

* feat: add instructions for collecting and parsing mongodb logs

* feat: add metrics and logs attributes to mongodb data collected list

* feat: nginx logs collection instructions and some other cleanup

* feat: add list of parsed log attributes to data collected list for nginx

* chore: do not run pipeline population integration test if no built-in integration has a pipeline
2024-03-28 19:57:07 +05:30
Yunus M
5d5ff47d5e fix: update devtool property to eval-source-map (#4760) 2024-03-28 16:58:35 +05:30
Yunus M
9f30bba9a8 feat: add support to pin attributes in logs details view (#4692)
* feat: add support to pin attributes in logs details view

* feat: add safety checks

* feat: update styles

* feat: update styles

* feat: move json parsing in try catch block
2024-03-28 16:55:59 +05:30
Yunus M
6014bb76b6 feat: support drag select in chart - alerts page (#4618)
* feat: support drag select in chart - alerts page

* feat: handle back navigation after drag select
2024-03-28 16:51:29 +05:30
Vikrant Gupta
e25b54f86a fix: 404 resource not found issues (#4757) 2024-03-28 16:46:16 +05:30
Vikrant Gupta
5959963b9d fix: [SIG-575]: no data in new trace explorer page specific scenario (#4748)
Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2024-03-28 16:34:09 +05:30
Prashant Shahi
4fbb71484d Merge pull request #4755 from SigNoz/release/v0.42.0
Release/v0.42.0
2024-03-27 23:40:08 +05:45
Prashant Shahi
f8e8132b58 chore(signoz): 📌 pin versions: SigNoz 0.42.0, SigNoz OtelCollector 0.88.17
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-03-27 23:23:15 +05:45
Prashant Shahi
a1dd170641 Merge branch 'main' into release/v0.42.0 2024-03-27 23:17:30 +05:45
Prashant Shahi
fe2ddf9d60 Merge branch 'develop' into main-public 2024-03-27 23:15:57 +05:45
Prashant Shahi
dfc99a7756 Revert "Revert "Explorer Toolbar maximised and minimised (#4656)" (#4705)"
This reverts commit c04d0e9419.
2024-03-27 23:15:40 +05:45
Prashant Shahi
c2556facc2 Merge branch 'main' into release/v0.42.0 2024-03-27 22:49:48 +05:45
Srikanth Chekuri
31b1d58a70 chore: fix alerting options (#4752) 2024-03-27 20:25:18 +05:30
Raj Kamal Singh
0ac9f6f663 Feat: QS: redis integration v0: instructions for collecting and parsing logs (#4753)
* chore: minor cleanups to postgres integration instructions

* chore: update instructions for connecting redis integration

* feat: add instructions for collecting redis logs

* chore: flesh out prerequisites for connecting redis integration

* chore: add list of metrics collected for redis
2024-03-27 20:03:27 +05:30
Yunus M
a30b75a2a8 feat: show environments in a separate dropdown (#4717)
* feat: show environments in a separate dropdown
2024-03-27 18:46:05 +05:30
SagarRajput-7
dbd4363ff8 feat: [SIG-573]: Fixed billing page issues (#4744)
* feat: [SIG-573]: Fixed billing page issues

* feat: [SIG-573]: Fixed jest test case
2024-03-27 11:55:28 +05:30
SagarRajput-7
ad1b01f225 feat: [SIG-566]: Added message to alert user about their past due - subscription status (#4724)
* feat: [SIG-566]: Added message to alert user about their past due - subscription status

* feat: [SIG-566]: Added message string to billings.json

* feat: [SIG-566]: Added strings to billings.json

* feat: [SIG-566]: updated test cases

* feat: [SIG-566]: updated message text

* feat: [SIG-566]: code fix

* feat: [SIG-566]: code fix
2024-03-27 10:23:57 +05:30
Vikrant Gupta
e1679790f7 fix: log chips not forming making filtering not work (#4749)
* fix: log chips not forming making filtering not work

* fix: remove console log
2024-03-27 01:01:24 +05:30
Srikanth Chekuri
ae594061e9 chore: fix query-service logging (#4696) 2024-03-27 00:07:29 +05:30
Vikrant Gupta
9e02147d4c fix: [SIG-574]: support __ in the groupBy clause (#4747) 2024-03-26 23:54:31 +05:30
Tan Wei Been
2b3d1c8ee5 [Fix]: Using exported dashboards as input to dashboard provisioning #2 (#4726)
* fix(be,fe): upsert dashboard on provision, export with uuid from frontend

* chore(fe): formatting in dashboard description

* fix: miss out while merging

---------

Co-authored-by: Håvard <haavard.markhus@inmeta.no>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
Co-authored-by: Haavasma <61970295+Haavasma@users.noreply.github.com>
2024-03-26 17:09:59 +05:30
Rajat Dabade
4c91dbcff0 Explorer Toolbar maximised and minimised (#4721) 2024-03-26 17:09:13 +05:30
Srikanth Chekuri
83f68f13db feat: add ability to customize alert frequency (#4697) 2024-03-26 12:40:53 +05:30
Vibhu Pandey
994814864c fix: send 403 on wrong password entry during change password operation (#4733) 2024-03-26 06:20:35 +05:30
Raj Kamal Singh
f24135f5b0 Feat: QS: postgres integration: instructions for collecting and parsing logs (#4738)
* chore: offer metrics config instructions for signoz cloud only

* chore: some more cleanups

* chore: get log collection instructions started

* feat: flesh out log collection otel config for postgres

* chore: some cleanup

* chore: some more cleanup

* chore: some more cleanup
2024-03-23 11:39:28 +05:30
Vikrant Gupta
5745727031 fix: [SIG-565]: design feedback for integrations (#4723)
* fix: [SIG-565]: design feedback for integrations

* feat: added dotted line in the test connection modal

* feat: handle the URL change for integration details page to support back navigation

* feat: added ghost loading states

* feat: added margin for details header

* feat: added margin for details header

* feat: increase the list sizes to 20

* fix: handle icons

* fix: remove unused classes
2024-03-22 14:59:43 +05:30
Vikrant Gupta
ae0d685b29 feat: [SIG-572]: allow number of lines changing in the logs list view (#4737)
* feat: [SIG-572]: allow number of lines changing in the logs list view

* feat: [SIG-572]: allow number of lines changing in the logs list view

* feat: added options to change row values in table view

* fix: build issues
2024-03-22 13:40:55 +05:30
Vikrant Gupta
f34a49e19c fix: [SIG-570]: handle case where - being present in the key (#4735) 2024-03-22 13:40:43 +05:30
Vikrant Gupta
9e557a0ebe feat: [SIG-571]: added support for has and nhas operator for json filter (#4736)
* feat: [SIG-571]: added support for has and nhas operator for json filter

* fix: address review comments

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2024-03-22 13:39:47 +05:30
Vikrant Gupta
0df3c26f04 feat: implement download logs feature for logs explorer new design (#4728)
* feat: implement download logs feature for logs explorer new design

* feat: address review comments

* feat: added timestamp and body to the start

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2024-03-22 13:28:38 +05:30
SagarRajput-7
0df86454ce fix: [SIG-567]: prevented stage-&-run API on legend change (#4720)
* fix: prevented stage-&-run API on legend change

* fix: code refactor

---------

Co-authored-by: Sagar Rajput <sagarrajput@192.168.1.2>
2024-03-21 16:31:59 +05:30
Vishal Sharma
63f0ae1c7c chore: update events (#4725)
* chore: update events

* chore: disable TELEMETRY_EVENT_QUERY_RANGE_API for saas

* chore: don't use mustCompile as it can cause panics
2024-03-20 19:59:28 +05:30
CheetoDa
d9f232683d Merge pull request #4688 from SigNoz/php-onboarding-docs
feat:php flow
2024-03-20 09:06:34 +05:30
CheetoDa
ad9d77d33f feat:php flow 2024-03-19 17:04:11 +05:30
CheetoDa
5a8479f4e9 feat:php flow 2024-03-19 17:04:11 +05:30
Raj Kamal Singh
f4e94c0ad1 chore: update postgres config instructions to work for both signoz cloud and self-hosted (#4718) 2024-03-19 12:28:22 +05:30
Raj Kamal Singh
6f3183823f Feat: postgres integration v0 (#4704)
* chore: update annotations for pre blocks in configuration instructions

* chore: update list of collected metrics for postgres integration

* chore: change non-string units to string in metrics collected list

* chore: some cleanups for postgres config instructions

* chore: some cleanup to metrics connection status resource labels

* chore: remove stub pipeline in postgres integration - no interesting log parsing to be done
2024-03-18 18:20:12 +05:30
Raj Kamal Singh
01bb39da6a chore: some cleanups in plumbing for integration connection status (#4716) 2024-03-18 15:22:31 +05:30
Raj Kamal Singh
43f9830e8d Feat: integrations v0 metrics connection status (#4715)
* chore: add test expectations for integration metrics connection status

* chore: reorg logs connection status calculation for parallelization

* chore: add interface for reader.GetMetricLastReceivedTsMillis

* chore: add plumbing for calculating integration metrics connection status

* chore: impl and test mocks for reader.GetMetricReceivedLatest

* chore: wrap things up and get test passing

* chore: some cleanup

* chore: some more cleanup

* chore: use prom metric names for integration connection test
2024-03-18 10:01:53 +05:30
Yunus M
4c2174958f chore: remove share invite link message (#4691) 2024-03-15 13:38:43 +05:30
Yunus M
07747e73d6 fix: context filter input overflow issue, min height for logs list view (#4710) 2024-03-15 13:25:06 +05:30
Vikrant Gupta
60946b5e9d feat: remove disabled in case of dashboard locked (#4709) 2024-03-15 12:28:03 +05:30
SagarRajput-7
0365fa5421 feat: handled inactive tab handling by removing the display flex override (#4708)
Co-authored-by: Sagar Rajput <sagarrajput@192.168.1.2>
2024-03-15 12:19:07 +05:30
Prashant Shahi
2a7ad596a1 Merge pull request #4707 from SigNoz/release/v0.41.1
chore(pre-release): 📌 pin versions: SigNoz 0.41.1
2024-03-15 02:19:23 +05:30
Prashant Shahi
6c455ab5ce Merge branch 'main' into release/v0.41.1 2024-03-15 02:11:34 +05:30
Prashant Shahi
7c062163a1 chore(release): 📌 pin versions: SigNoz 0.41.1
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-03-15 02:24:09 +05:45
Prashant Shahi
d6a256247c Merge pull request #4706 from SigNoz/release/v0.41.1
Release/v0.41.1
2024-03-15 02:00:37 +05:30
Prashant Shahi
0e2c699518 Merge branch 'main' into release/v0.41.1 2024-03-15 01:46:02 +05:30
Vikrant Gupta
c04d0e9419 Revert "Explorer Toolbar maximised and minimised (#4656)" (#4705)
This reverts commit aadb962b6c.

(cherry picked from commit cf22039562)
2024-03-15 01:57:03 +05:45
Vikrant Gupta
cf22039562 Revert "Explorer Toolbar maximised and minimised (#4656)" (#4705)
This reverts commit aadb962b6c.
2024-03-15 01:26:31 +05:30
Vikrant Gupta
2a62982885 feat: support case insensitive operators (#4379) 2024-03-14 13:33:35 +05:30
Vikrant Gupta
1e1624ed4c fix: [GH-3932]: do not retry API's in case of 4XX status code (#4376)
* fix: [GH-3932]: do not retry API's in case of 400 status code

* feat: do not retry 4XX response status
2024-03-14 12:07:47 +05:30
Prashant Shahi
d0feff00a7 Merge pull request #4690 from SigNoz/release/v0.41.0
Release/v0.41.0
2024-03-14 00:50:36 +05:30
Prashant Shahi
6c2a3d5d43 chore(signoz): 📌 pin versions: SigNoz 0.41.0, SigNoz OtelCollector 0.88.15
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-03-14 00:23:51 +05:45
Prashant Shahi
914b035b3f Merge branch 'main' into release/v0.41.0 2024-03-14 00:23:02 +05:45
SagarRajput-7
6b3af78873 Merge pull request #4689 from SagarRajput-7/billing-graph
feat: added isFetching condition and changed series color
2024-03-13 17:58:48 +05:30
Sagar Rajput
6adeef7e70 feat: added isFetching condition and changed series color 2024-03-13 17:50:17 +05:30
Rajat Dabade
44dc55c5ac FE: Design update for log context tab (#4601)
* refactor: initial setup for context view

* refactor: updated design for log context view

* refactor: updated comments and remove commented code

* refactor: updated comments

* refactor: handle hight issue

* refactor: initial setup for context view

* refactor: updated design for log context view

* refactor: updated comments and remove commented code

* refactor: updated comments

* refactor: handle hight issue

* refactor: added api version

* refactor: height set to parent height and remove unnessarry code

* refactor: removed commented code

---------

Co-authored-by: Yunus M <myounis.ar@live.com>
2024-03-13 17:37:48 +05:30
Srikanth Chekuri
3c419677e1 feat: add support for alerting on absent metric (#3245) 2024-03-13 17:37:32 +05:30
Rajat Dabade
aadb962b6c Explorer Toolbar maximised and minimised (#4656)
* refactor: done log explorer minimising save view toolbar

* refactor: local storage visibility support and done with traces toolbar

* refactor: added toolbar and removed commented code

* chore: css updates

* refactor: removed the background hightlighted code for droppable area

* refactor: merge conflict resolve and updated variable name
2024-03-13 17:28:09 +05:30
SagarRajput-7
c6080ca02e feat: [SIG-557]: added Billing usage graph - daily and weekly (#4686)
* feat: added Billing usage graph - daily and weekly

* feat: removed mocked response

* feat: removed weekly chart and fixed data transformations

* feat: added loading states

* feat: moved function to util file

* feat: fixed review comments

* feat: fixed JEST test case

* feat: test fix - commit

* feat: test fix - commit

* feat: test fix - commit

* feat: edited title conditionally

* feat: edited tooltip content

* feat: removed time from tooltip content and skeleton for cycleInfo Alert

---------

Co-authored-by: Sagar Rajput <sagarrajput@192.168.1.2>
2024-03-13 14:30:49 +05:30
Vishal Sharma
506448fe61 chore: dashboard info update (#4684) 2024-03-13 01:47:51 +05:30
Srikanth Chekuri
a42176599f chore: add day wise usage breakdown (#4648) 2024-03-12 22:45:31 +05:30
Srikanth Chekuri
adef0a4138 chore: add LogCommentEnricher middleware (#4681) 2024-03-12 18:39:28 +05:30
Srikanth Chekuri
c9816cce18 fix: use same time window as the logs/traces query (#4682) 2024-03-12 17:30:01 +05:30
Srikanth Chekuri
c6c2b9d809 chore: limit number of top level operations in services list (#4666) 2024-03-12 17:22:48 +05:30
Rajat Dabade
d9b379ae51 refactor: onblur convert to tag (#4662)
* refactor: onblur convert to tag

* refactor: on blur log body contains

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2024-03-12 14:34:10 +05:30
Rajat Dabade
dd2afe19f6 [Fix]: full view bar chart (#4667) 2024-03-12 14:22:27 +05:30
Rajat Dabade
0326a4d42a [Refactor]: Height for log explorer list panel (#4657)
* refactor: added fullview and fix height issue for logs

* refactor: hide full view button on full view mode

* refactor: removed fullview for logs

* refactor: remove max with for body
2024-03-12 12:20:45 +05:30
Srikanth Chekuri
b4d12966f3 fix: interpret missing point as zero value in formula evaluation (#4668) 2024-03-11 23:47:19 +05:30
Raj Kamal Singh
5a2d729ba9 Feat: qs autopopulate installed integration dashboards (#4680)
* chore: add test for dashboards for installed integrations

* feat: include dashboards for installed integrations in API response

* chore: add test expectation for getting installed integration dashboard by id

* feat: add support for retrieving installed integration dashboards by id

* chore: add dashboard id validation for integrations
2024-03-11 20:06:59 +05:30
Yunus M
666916fae2 fix: handle defaults for apm and aws (#4678) 2024-03-11 16:59:04 +05:30
Vishal Sharma
4b4008642d chore: extract dashboard/alert query info and send event (#4665)
* chore: extract dashboard/alert query info and send event

* chore: add totalDashboardsWithPanelAndName attribute in event
2024-03-11 16:45:06 +05:30
Srikanth Chekuri
7c2007faa3 fix: remove early return for services call in usage explorer (#4669) 2024-03-11 15:20:59 +05:30
Vikrant Gupta
6b87118fc6 feat: [SIG-546]: user with viewer roles can only view saved views (#4663)
* feat: [SIG-543]: Users with VIEWER access can create/edit/delete views for logs and traces

* feat: [SIG-543]: remove extra code

* feat: [SIG-543]: role changes in the save views toolbar

* feat: [SIG-543]: role changes in the save views toolbar

* feat: remove the save feature / dashboard / alert feature for viewer roles

* feat: remove the save feature / dashboard / alert feature for viewer roles

* fix: address review comments
2024-03-11 14:49:10 +05:30
Vikrant Gupta
49aba4fb1c feat: [SIG-543]: add time selection in the custom date selection (#4658)
* feat: [SIG-543]: inital commit

* feat: [SIG-543]: refactor date time modal to separate component

* feat: [SIG-543]: refactor date time modal to separate component

* feat: add back the time support according to the older designs in the date time picker

* fix: custom time picker minor UI fixes
2024-03-11 14:39:17 +05:30
Raj Kamal Singh
9ace374855 Feat: QS: Log Pipelines for installed integrations (#4674)
* chore: refactor: inject sqlx.DB into opamp.initDB instead of DB file name

* chore: reorganize test utils a little

* chore: add test validating pipelines for installed integrations show up in pipelines list

* chore: get basic integration pipelines testcase passing

* chore: reconcile experimental changes with latest state of develop

* chore: add integration test for reordering of pipelines

* chore: marker for integration pipelines using Id

* chore: hookup propagation of installed integration pipelines by opamp

* chore: add util for mapping slices

* chore: add support for reordering integration pipelines

* chore: exclude user saved integration pipelines if no longer installed

* chore: flesh out rest of intgeration pipelines scenarios

* chore: handle scenario when an integration is installed before any pipelines exist

* chore: notify agentConf of update after uninstalling an integration

* chore: some minor cleanup

* chore: some more cleanup

* chore: update ee server for changed controllers

* chore: some more cleanup

* chore: change builtin integration id prefix to avoid using colons that break yaml

* chore: update builtin integration id in test
2024-03-11 14:15:11 +05:30
Vikrant Gupta
a4d5774ae3 fix: update the md file for integrations config (#4677) 2024-03-11 11:48:36 +05:30
Nityananda Gohain
d0d10daa44 feat: support for timeshift in logs (#4607)
* feat: support for timeshift in logs

* fix: post process the timeshift function result

* fix: start and end times adjusted

* fix: only apply functions instead of entire post process

* fix: unnecessary error handling removed

* fix: apply functions for all sources

* feat: test added for timeshift

* fix: comments corrected

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2024-03-08 21:12:53 +05:30
Rajat Dabade
e519539468 Graph visibility state management. (#4632)
* refactor: change the state handling of graph visibility

* refactor: removed commented code
2024-03-08 14:11:38 +05:30
Vikrant Gupta
7051831539 fix: [SIG-542]: handle special characters in servicename (#4649)
* fix: [SIG-542]: handle special characters in servicename

* feat: handle . operator in the URL

* chore: left files to update
2024-03-08 00:08:12 +05:30
Raj Kamal Singh
c842e68288 chore: minor integrations UI cleanups (#4661) 2024-03-07 19:57:14 +05:30
Raj Kamal Singh
a295bf2fb6 Feat: QS: structure for built in integrations (#4655)
* feat: get builtin integrations started with nginx

* feat: get started with embedding and parsing of builtin integrations

* chore: add icons for nginx and redis integrations

* chore: stash current state of work

* chore: remove all yaml annotations since moved to JSON assets for bundled integrations

* chore: add file uri hydration in integration spec

* chore: refactor file uri hydration logic

* chore: add support for referencing JSON files with file uri

* chore: bring in initial integration assets

* chore: hookup builtin integrations and get all tests passing

* chore: update icons for postgres and mongo and some cleanup

* chore: some more cleanup

---------

Co-authored-by: Raj Singh <raj@Rajs-MacBook-Pro.local>
2024-03-07 19:26:20 +05:30
Vikrant Gupta
4cd40391c5 feat: route to the onboarding flow when clicking the sending logs in case of no logs (#4600)
* feat: refactor onboarding flow to add path params when selecting any module

* feat: added re-route to the onboarding flow in case of no logs and no traces

* chore: remove console logs

* chore: increase type safety

* chore: updated tab names

* chore: remove development conditions

* chore: handle cloud user

* feat: handle aws monitoring cases

* fix: apm framework not getting selected

* fix: apm framework not getting selected

* fix: apm framework not getting selected
2024-03-07 14:23:28 +05:30
Vikrant Gupta
7af4ba34af chore: [SIG-526]: Improve the light theme designs for integrations UI (#4659) 2024-03-07 14:12:06 +05:30
Rajat Dabade
54c69311ed Logs Strip color according to severity_text (#4643)
* refactor: initial setup

* refactor: done with setup

* refactor: done with severity text split color

* refactor: initial setup

* refactor: done with setup

* refactor: done with severity text split color

* chore: added unit test case

* refactor
: pointed to the correct variable

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2024-03-07 12:25:00 +05:30
Vikrant Gupta
62af836554 fix: [SIG-549]: bring the ts config for pre-commit hook in sync with github CI (#4660)
* chore: dummy commit

* chore: dummy commit

* chore: dummy commit
2024-03-07 11:53:05 +05:30
Yunus M
f9b3ca01f9 feat: add clone query functionality (#4617)
* feat: add clone query functionality

* feat: update ui
2024-03-07 11:41:29 +05:30
Vikrant Gupta
0c4149225f feat: [SIG-526]: UI Integrations V0 (#4595)
* feat: integrations v0 base setup routes and components

* chore: typecheck fix

* feat: integrations landing page changes

* feat: initial header setup

* feat: integrations list page setup

* feat: integrations details content root setup

* feat: integration detail content setup

* feat: added overview tab

* feat: added data tab

* feat: handle configuration tab

* feat: add min height for the container

* feat: generate apis and hooks for usage

* feat: added remove integration modal

* feat: added remove integration modal

* feat: added remove integration modal

* feat: added test connection bars

* chore: add bottom margins

* feat: added test connection modal

* feat: add all types of test connection

* feat: add all types of test connection

* fix: address review comments

* fix: address review comments

* feat: added get all integrations API and search bar implemnetation

* feat: navigate to overview section in case of row click and configure in btn

* feat: integrate get integration details api

* feat: handle integration details page gracefully

* feat: integrate uninstall API and the connection states

* feat: add install integration API call

* feat: added api error handling

* feat: handle error states for list and details api

* feat: handle the logs and metrics columns

* feat: add TODOs for pending tasks

* feat: comment from side nav

* feat: added support for custom tags in react markdown

* chore: revert the temporary change for merge

* feat: integrate the status api calls and polling logic

* chore: add markdown components and correct the polling issue

* chore: handle light mode

* chore: remove integrations from sideNav

* fix: address review comments

* fix: address review comments
2024-03-06 22:25:02 +05:30
Raj Kamal Singh
7136ecc2fe chore: accept connection test lookback seconds as request param (#4650)
Also removes connectionStatus from integration details.
2024-03-06 11:06:04 +05:30
Vikrant Gupta
0c14145ef9 fix: [SIG-532]: Copy Log Link Functionality for new designs (#4644)
* fix: [SIG-532]: timeRange not updating correctly for copy log link

* fix: [SIG-532]: use virtuoso props to scroll to some intiial position rather than API hit

* fix: added styles for highlighted colors

* fix: handle colors for copy log link

* fix: update colors for copy log lines
2024-03-05 21:31:15 +05:30
Srikanth Chekuri
6618b47123 chore: bump github.com/SigNoz/prometheus to v1.9.79-0.1 (#4651) 2024-03-05 21:04:35 +05:30
Raj Kamal Singh
ab5285dee6 Feat: qs api integration connection status (#4628)
* chore: add integration attribs for connection tests and status

* chore: add connection status to integration details response

* chore: update integration lifecycle test to check for connection status too

* feat: add GetIntegrationConnectionTests to integrations manager and controller

* chore: add tests for querying integration connection status

* feat: add http API support for integration connection status

* chore: some cleanups

* chore: use PostableRule for integration alerts

* chore: some more cleanup
2024-03-05 15:23:56 +05:30
Vikrant Gupta
fdd7e022e9 fix: offset added hinders the timestamps for the traces (#4642)
* fix: offset added hinders the timestamps for the traces

* fix: offset added hinders the timestamps for the traces
2024-03-04 18:47:38 +05:30
Rajat Dabade
90d7f0200a FE: updated routes and modal for create alert css (#4602)
* refactor: updated routes and modal for create alert css

* refactor: button color changed

* refactor: redirect old routes to new routes
2024-03-04 17:42:17 +05:30
Vikrant Gupta
2713e186d3 fix: dashboard variables reset on tab visibility change (#4619) 2024-03-04 13:54:55 +05:30
Raj Kamal Singh
ffdb4cfff0 chore: update version for clickhouse mock dependency (#4640) 2024-03-04 12:00:05 +05:30
Srikanth Chekuri
b3b7522250 chore: update APM metrics to use v4 query range (#4638) 2024-03-04 10:15:43 +05:30
Raj Kamal Singh
0870030d1c Feat: qs integrations http api (#4622)
* chore: add http api test for signoz integrations

* chore: add controller for integrations

* chore: add http API handlers for integrations API

* chore: hook up integrations API in new servers

* chore: add remaining fields in Integration DTO

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2024-03-02 10:11:51 +05:30
Raj Kamal Singh
3fece44aef chore: update dependency github.com/srikanthccv/ClickHouse-go-mock from v0.4.0 -> v0.6.0 (#4637) 2024-03-01 15:55:15 +05:30
Yunus M
e5de35a769 fix: default version to v4 if version param not present in url params (#4636) 2024-03-01 15:37:55 +05:30
Yunus M
44ff1517d1 fix: update date picker styles (#4635) 2024-03-01 15:20:21 +05:30
Srikanth Chekuri
d77389abe3 feat: add support for email alert channel (#4599) 2024-03-01 15:05:28 +05:30
Yunus M
1a62a13aea chore: update query builder to support spatial aggregations and functions (#4569) 2024-03-01 14:51:50 +05:30
Rajat Dabade
97fdba0fae Update panel type on selecting new panel on dashboards. (#4634) 2024-03-01 13:04:53 +05:30
Vishal Sharma
5c2a9e8362 Merge branch 'develop' into chore/send-language-service-as-list 2024-02-29 15:58:24 +05:30
Nityananda Gohain
1aaafa4638 feat: use attribute if present for json query (#4458)
* feat: use attribute if present for json query

* fix: refractor test cases
2024-02-29 15:32:37 +05:30
Prashant Shahi
71c4fcc382 Merge pull request #4616 from SigNoz/release/v0.40.0
Release/v0.40.0
2024-02-28 19:46:06 +05:30
Prashant Shahi
9af1c2320b chore(signoz): 📌 pin versions: SigNoz 0.40.0, SigNoz OtelCollector 0.88.14
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-02-28 19:13:23 +05:45
Prashant Shahi
cdabf9060e Merge branch 'main' into release/v0.40.0 2024-02-28 19:11:34 +05:45
CheetoDa
4eb1948e4c feat: added aws monitoring section (#4614)
* feat: added aws monitoring section

* chore: fix lint issues

* chore: fix lint issues

* feat: handle redirect for aws monitoring

---------

Co-authored-by: Yunus M <myounis.ar@live.com>
2024-02-28 18:20:15 +05:30
Vishal Sharma
fe0ba5e3ba fix: create PAT not null error (#4613)
* fix: create PAT not null error
allow all admins to view all pats

* fix: allow revoking of token by all admin users
2024-02-28 17:37:30 +05:30
Srikanth Chekuri
8add13743a fix: remove unknown setting from connection string (#4612) 2024-02-28 15:06:47 +05:30
Rajat Dabade
9964e3425a Feat: Bar chart (#4562)
* feat: added bar panel and configuration for bar chart
2024-02-28 14:56:50 +05:30
Raj Kamal Singh
ddaa464d97 feat: QS package for integrations (#4578)
* chore: bring in latest state of QS api work for integrations

* chore: integrations v0 qs API: refactor installed integration struct

* chore: finish up with integration lifecycle tests

* chore: some cleanup

* chore: some more cleanup

* chore: some more cleanup

* chore: some more cleanup

* chore: some more cleanup

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2024-02-28 09:54:50 +05:30
Hayden
8f9d643923 Add basic support for secure clickhouse connections (#4178) 2024-02-27 23:41:00 +05:30
Prashant Shahi
d9ab100da3 ci(telemetry): include environment variables for ee build (#4603)
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-02-27 17:15:23 +05:30
Yunus M
7d32c63398 feat: frontend telemetry setup (#4560) 2024-02-27 16:40:29 +05:30
Yunus M
89c6eba913 feat: update naming to API keys to Access Tokens (#4597)
* feat: update naming to API keys to Access Tokens

* feat: update api-keys route to access-tokens
2024-02-27 13:38:43 +05:30
Vikrant Gupta
c38247abe4 fix: [SIG-528]: precommit typescript check for md files (#4596) 2024-02-26 18:17:34 +05:30
Yunus M
f9eddc9b18 fix: update no logs text and link based on the datasource (#4594) 2024-02-26 12:09:31 +05:30
Yunus M
17de5836bd feat: send only required details in billingevents (#4587) 2024-02-23 22:53:09 +05:30
Vikrant Gupta
fe37a2e7e0 fix: traceID link not opening from log details page (#4590) 2024-02-23 22:19:45 +05:30
makeavish
aad840da59 chore: send language and service name events as list 2024-02-23 14:08:17 +05:30
Yunus M
f2d5d21581 fix: redirect old logs routes to new routes (#4584) 2024-02-22 16:57:06 +05:30
Vikrant Gupta
f3bc1a8f8a fix: date time value initialising to start of day in case of typing (#4585) 2024-02-22 16:32:30 +05:30
Prashant Shahi
f069ecdb76 Merge pull request #4582 from SigNoz/release/v0.39.1
Release/v0.39.1
2024-02-21 17:53:50 +05:30
Prashant Shahi
493aef0241 chore(signoz): 📌 pin versions: SigNoz 0.39.1, SigNoz OtelCollector 0.88.13
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-02-21 16:22:48 +05:45
Yunus M
7bca847f11 fix: show expired token label (#4581)
* fix: show expired token label

* fix: handle no expiry

---------

Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2024-02-21 15:38:18 +05:30
Vishal Sharma
0cb60e1c10 chore: update heartbeat, language event and add serviceName event (#4571)
* chore: update heartbeat, language event and add serviceName event

* chore: update tagsInfo
2024-02-21 14:49:33 +05:30
Rajat Dabade
ecd5ce92c2 List View for Dashboard (#4517)
* refactor: initial setup for list view logs

* feat: done with basic functionality panel view logs

* feat: added panel view

* fix: discard and edit issue

* refactor: removed not required params from uselogdata

* feat: trace list view

* fix: loader

* refactor: traces table component css update

* refactor: added open san font and udpated css

* fix: full view traces issue and search column css update

* refactor: remove consoles

* refactor: removed commented code and updated logic

* chore: build failure

* refactor: icons change for apdd panels

* refactor: rebased to develop

* refactor: added support for light mode

* refactor: fix tsc

* fix: query select issue

* chore: table column to lower case

* refactor: updated styling for both log and traces tables

* chore: removed comment code

* chore: remove the resizable block from table header traces

* refactor: log table header and body stayling updated

* fix: query range on every column add

* refactor: styling updates

* fix: query range log respect global time

* refactor: css update log table header

* refactor: removed unnecessary code

* refactor: log query range respect globaltime

* refactor: dropdown support to qb

* refactor: remove creating alert for list view

* refactor: fix the height of column select dropdown

* fix: dropdown suggestion for orderby

* refactor: remove the commented code

* refactor: full view respect global time

* refactor: css updates

* refactor: should fire query range on variable change

* refactor: css updates for log list view

* refactor: removed the unused changes

* refactor: handle error state for exploere columns

* refactor: handle error state for explorer columns

* chore: generate yarn lock file

* refactor: pagination for order by timestamp

* fix: full text body contain issue

* refactor: inverted the operator for next and previous button config

* refactor: rename variable handle light mode

* fix: no log issue

* chore: renamed variables

---------

Co-authored-by: Vikrant Gupta <vikrant.thomso@gmail.com>
2024-02-20 16:21:07 +05:30
Srikanth Chekuri
aa67b47053 fix: address gaps in alert to notification link (#4573) 2024-02-20 10:42:30 +05:30
Yunus M
e2669eb370 API ingestion keys - CRUD (#4524)
* feat: api keys crud - integration v0.1

* feat: add test cases

* fix: add review comments

* feat: api integration and ui updates

* feat: update test cases

* feat: update expiriesAt request payload

* feat: ui feedback updates

* feat: api keys crud - integration v0.1

* feat: add test cases

* fix: add review comments

* feat: api integration and ui updates

* feat: update test cases

* feat: update expiriesAt request payload

* feat: ui feedback updates

* feat: handle light mode styles

* feat: hide pagination on single page

* feat: do not show last used if not present or 0

* feat: show tooltip on role

---------

Co-authored-by: Rajat Dabade <rajat@signoz.io>
2024-02-19 09:19:06 +05:30
Srikanth Chekuri
c4bbbf372c fix: change the order of local and distributed table (#4565) 2024-02-17 22:05:33 +05:30
Yunus M
0c59953cb5 feat: open left nav items in new tab on cmd ctrl click (#4561) 2024-02-17 14:59:49 +05:30
Vikrant Gupta
b10f17de78 chore: Added jest cases for logs explorer page (#4553)
* chore: base file for logs explorer jest test cases

* chore: added base setup for logs explorer jest fixing the uplot/d3-interpolate/antd-config errors

* chore: added test for rendering of logs explorer page without API calls

* chore: added test for rendering of logs with API call

* chore: used virutoso mock to render items on the screen

* chore: used virutoso mock to render items on the screen

* chore: update dummy data
2024-02-16 18:22:33 +05:30
Ankit Nayan
bbf9787fb3 merging main 2024-02-16 14:13:37 +05:30
Vishal Sharma
d11c1eb439 feat: api management (#4557)
* feat: api management

* chore: address review comments and typos

* chore: add sort and created by user object on create

* chore: replace expiresAt with expiresInDays for request body
2024-02-16 12:46:33 +05:30
Vikrant Gupta
548c531956 fix: dashboard panel light theme (#4556)
* fix: dashboard panel light theme

* fix: logs pipeline page crashing on opening context for stimulated logs

* fix: logs pipeline page crashing on opening context for stimulated logs
2024-02-15 16:25:55 +05:30
Yunus M
4e75479831 feat: ui updates - traces explorer (#4555) 2024-02-15 14:38:21 +05:30
Vikrant Gupta
633b551e5d fix: qb search not respecting the saved views panel type (#4554) 2024-02-15 14:13:05 +05:30
Srikanth Chekuri
f734142419 chore: add ExponentialHistogram support for metrics v4 query range (#4525) 2024-02-14 23:33:19 +05:30
Yunus M
aa9a3e9349 fix: remove duplicate settings tab (#4552) 2024-02-14 22:50:18 +05:30
Yunus M
ab950135ff fix: disable cloud features for oss (#4551) 2024-02-14 22:00:17 +05:30
Prashant Shahi
b4e0e89b05 Merge pull request #4549 from SigNoz/release/v0.39.0
Release/v0.39.0
2024-02-14 18:23:52 +05:30
Prashant Shahi
12a33960ff chore: update go.mod
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-02-14 18:27:30 +05:45
Prashant Shahi
65ed0c0c05 chore(signoz): 📌 pin versions: SigNoz 0.39, SigNoz OtelCollector 0.88.12
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-02-14 18:17:28 +05:45
Prashant Shahi
6eb7693294 Merge branch 'main' into release/v0.39.0 2024-02-14 18:15:20 +05:45
Yunus M
7ec25b4f62 fix: invalid custom time value (#4547) 2024-02-14 17:26:33 +05:30
Vikrant Gupta
b3bc78d23c fix: date time value retain on location switch (#4546)
* fix: date time value retain on location switch

* chore: added inline comments

* feat: added shortcut strings based on user os

* feat: added shortcut strings based on user os

* feat: added shortcut strings based on user os
2024-02-14 16:47:39 +05:30
Yunus M
bd4786f128 feat: add borders to keyboard shortcut and settings tables (#4545) 2024-02-14 15:27:01 +05:30
Vikrant Gupta
81241170e5 feat: support zoom in for bar chart logs explorer (#4542)
* feat: support zoom in for bar chart logs explorer

* feat: add back navigation support for bar chart zoom in
2024-02-14 15:18:04 +05:30
Vikrant Gupta
e0df371a8d fix: added missing updated time on first load for back navigation (#4544)
* fix: added missing updated time on first load for back navigation

* fix: cleanup console
2024-02-14 14:00:17 +05:30
Vikrant Gupta
cfea51d9ee fix: relative time preferences not respected (#4543) 2024-02-14 12:56:54 +05:30
Vikrant Gupta
037f5ae4c8 feat: added focus for qb search for last search bar (#4534) 2024-02-14 12:49:24 +05:30
Yunus M
d6b7587bbe feat: show retention info for cloud users and show count next to team… (#4536)
* feat: show retention info for cloud users and show count next to team members and pending invites

* feat: add safety check for saved views response
2024-02-14 12:42:36 +05:30
Vikrant Gupta
0dffd86287 fix: number attributes not showing up in raw view (#4541) 2024-02-14 12:22:12 +05:30
Nityananda Gohain
c75a44c620 fix: orderby not working for attributes fixed (#4540) 2024-02-14 10:49:31 +05:30
Ankit Nayan
cbf3041dde Update Saved Views mutation to require Editor access(#4538) 2024-02-13 21:50:52 +05:30
Rajat Dabade
d0b43f3802 fix: button border color for tab in log explorer (#4535) 2024-02-13 19:18:00 +05:30
CheetoDa
1ee672c020 feat: onboarding flows for rust swift and elixir (#4507)
* feat: onboarding flows for rust swift and elixir

* fix: fixed some issues

* fix: spellcheck done

* fix: feedback incorporated

* chore: fixed swift docs

* chore: minor fixes

* fix: lint errors

---------

Co-authored-by: Yunus M <myounis.ar@live.com>
2024-02-13 16:52:28 +05:30
Rajat Dabade
ad8924ed13 [Refactor]: added extra param to check isdepend on query build (#4523)
* refactor: added extra param to check isdepend on query build

* refactor: added extra param to check isdepend on query build

* refactor: updated enabled query condition

* refactor: added extra param to check isdepend on query build

* refactor: updated enabled query condition

* chore: clean code
2024-02-13 15:19:15 +05:30
Vikrant Gupta
cff0e1cf1e fix: old and new dashboards in sync with local storage (#4520)
* fix: old and new dashboards in sync with local storage

* fix: remove code

* fix: remove console log

* fix: remove console log
2024-02-13 13:57:24 +05:30
Vikrant Gupta
02f83e4b4a fix: shortcuts break when there are multiple queries (#4532) 2024-02-13 01:40:11 +05:30
Rajat Dabade
6bc5ceac3e [Fix]: delete formula issue (#4526) 2024-02-12 21:42:56 +05:30
Vikrant Gupta
3a20862d0c feat: added shortcuts page in the side nav (#4506)
* feat: added shortcuts page in the side nav

* fix: update shortcuts for add to dashboard and alerts

* fix: cmd+enter should stage and run query

* chore: refactor the shortcuts utils

* feat: support run query even when input is focussed

* fix: dropdown visibility change

* feat: add shortcuts for sideNav

* feat: auto focus logs explorer search bar with hotkey

* fix: update the shortcuts for sideNav and dependencies

* fix: remove dashboard and alert shortcuts

* fix: minor typo changes
2024-02-12 19:53:35 +05:30
Nityananda Gohain
0e331dd177 feat: support cache in logs (#4516)
* feat: support cache in logs

* fix: revert fluxinterval changes

* feat: support for limit queries

* feat: support for formula

* fix: refractor code and don't return all points

* fix: add nil params check

* fix: error handling updated

* fix: start and end params fix
2024-02-12 18:45:21 +05:30
Yunus M
ab4f6adb19 Logs explorer design update (#4352)
* feat: logs explorer - new design

* feat: update styles

* feat: added new toolbar for logs explorer (#4336)

* feat: logs list view changes (#4348)

* feat: logs list view changes

* fix: list view and toolbar styles

* feat: side btns

* feat: added auto refresh handler

* feat: handle popover close for btn click date time

* feat: extract the common log actions btn component

* feat: update the button for log line actions

* fix: event propagation from context button

* feat: use styles from ui-library

* Query builder design update (#4359)

* feat: QB design update

* fix: add functionality and light mode styles

* fix: ts issues

* fix: update all css color variables to correct names

* fix: lint errors

* feat: new table view for logs explorer list section  (#4353)

* feat: table view changes for logs list

* feat: code refactor to support log line actions

* feat: code refactor to support log line actions

* fix: the positioning of the btns

* feat: fix the table onclick

* fix: header issue

* fix: on hover

* fix: type issue

* fix: eslint error

* fix: type errors (#4360)

* feat: handle light theme for logs explorer design changes (#4363)

* feat: handle light theme for list tables and dateTime selection

* feat: handle light theme for popover

* fix: address review comments

* feat: date time custom time modal to render inside the new popover (#4366)

* feat: single calender for range picker

* fix: edgecases

* feat: integrate date time selector across app

* fix: remove dangling border after element removal

* feat: handle qb design changes across the application

* feat: handle light theme

* feat: handle light theme

* fix: virtuoso scroll refresh issue

* feat: handle new typing changes for date time picker v2 (#4386)

Co-authored-by: Yunus M <myounis.ar@live.com>

* chore: styles improvement across new design (#4389)

* fix: improve date time styles

* feat: table view changes according to new design

* fix: button visibility in clickhouse and promQL headers (#4390)

* feat: change the tabs to new design buttons for query builder

* Settings theme change (#4368)

* feat: settings theme change

* [Refactor]: New design for Log details page (#4362)

New design for Log details page 

Co-authored-by: Vikrant Gupta <vikrant.thomso@gmail.com>
Co-authored-by: Yunus M <myounis.ar@live.com>

* feat: save view for new design (#4392)

* feat: save view for new design

* refactor: done with save view

* feat: update styles for logs detail view (#4407)

* feat: update styles for logs detail view

* feat: update styles for logs detail view

* feat: add raw view attributes in the logs list view (#4422)

* feat: add raw view attributes in the logs list view

* feat: add raw view attributes in the logs list view

* fix: raw attributes

* fix: logs UI improvements (#4426)

* fix: remove fixed times from the date time picker v2

* fix: added old logs explorer CTA in new designs

* feat: handle active logs indicator update

* fix: address review comments

* fix: old logs explorer page

* fix: remove info text and add relative time buttons

* fix: update logs explorer tab designs

* fix: update logs explorer tab designs

* fix: update logs explorer tab designs

* refactor: New design for Save views. (#4435)

* feat: [GH-4436]: date range enhancements (#4448)

* feat: [GH-4436]: when selecting custom time range it should be from start of day to end of date

* fix: custom time width and refresh text visibility issues (#4428)

---------

Co-authored-by: Yunus M <myounis.ar@live.com>

* feat: update ui (#4449)

* feat: added loading and error states for logs design (#4452)

* feat: added loading and error states for logs design

* feat: added error states for table view and time series view

* feat: handle error and loading states

* feat: loading states

* [Refactor]: Tab Switch deplay issue and UI improvement for Clickhouse (#4409)

* fix: switching between logs display tabs (#4457)

* [Feat]: View in Traces (#4450)

* refactor: datetime selector beside run query removed add to dashboard

* refactor: added tab for traces view details page

* refactor: done with the save view in traces

* fix: the gittery effect when navigatigating from views

* refactor: view tab view title light mode support

* refactor: removed console

* fix: gittery effect when switch view from views tabs

* refactor: separate traces routes

* refactor: remove query params

* chore: fix tsc issues

* fix: jest config issues

* fix: update TODO and remove extra braces

* feat: handle loading states and incorporate ui feedback (#4479)

* UI feedback updates (#4482)

* feat: handle loading and fix ui issues

* feat: ui updates

* fix: logs explorer issues (#4483)

* fix: logs explorer issues

* fix: jest test cases

* feat: support custom times unique to pages new design changes (#4485)

* fix: loading states for list log view (#4486)

* fix: logs search view query fix, logs details view - attribute tags alignment fix (#4489)

* fix: delete empty file

* fix: chart loading when scrolling logs (#4495)

* fix: chart should not load when scrolling the logs as it is already fetched

* fix: make the search bar as default rather than advanced options

* fix: rename show context to show in context

* fix: query range api not triggering on default select first load (#4498)

* Refactor: Log Explorer UI changes.  (#4502)

* refactor: used selected view enum

* refactor: updated hight of switch old button and tab border

* refactor: import fixes

* refactor: query builder border and button groups

* refactor: removed hypen from refreshed

* refactor: show delete button only when there is more than one query

* refactor: sqaure up the query build button groups

* refactor: updated css

* fix: additional filter color button shadow

* refactor: removed commented code and used selected panel enum

* refactor: updated typecheck script

* refactor: used enum selected view (#4504)

* fix: retain the current query on date time change (#4510)

* feat: added new icon for promQL and added tooltips for dashboards and alerts (#4512)

* feat: added new icon for promQL and added tooltips for dashboards and alerts

* fix: styles at 1440 px zoom

* fix: rename clickhouse to clickHouse

---------

Co-authored-by: Vikrant Gupta <54737045+Vikrant2520@users.noreply.github.com>
Co-authored-by: Vikrant Gupta <vikrant.thomso@gmail.com>
Co-authored-by: Rajat Dabade <rajat@signoz.io>
2024-02-12 00:23:19 +05:30
Vikrant Gupta
50834be4db fix: update the extend trial from default mailto client to notification panel (#4514) 2024-02-11 23:55:54 +05:30
Srikanth Chekuri
260d21afd0 fix: update prom rule to use range query (#4461) 2024-02-11 22:31:46 +05:30
Srikanth Chekuri
3b98073ad4 chore: add time shift function and some refactoring (#4445) 2024-02-11 00:31:47 +05:30
Liran Tal
6bd2c1ba74 docs: use the proper Node.js written convention form (#3140) 2024-02-10 17:05:29 +05:30
Lars Lehtonen
968cc0eb82 chore: fix dropped errors in tests
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2024-02-10 15:56:54 +05:30
Ankit Nayan
3ce385ef23 fix: ee/query-service/Dockerfile to reduce vulnerabilities (#4443)
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-ALPINE318-OPENSSL-6152404
- https://snyk.io/vuln/SNYK-ALPINE318-OPENSSL-6152404
- https://snyk.io/vuln/SNYK-ALPINE318-OPENSSL-6160000
- https://snyk.io/vuln/SNYK-ALPINE318-OPENSSL-6160000
- https://snyk.io/vuln/SNYK-ALPINE318-OPENSSL-6191692

Co-authored-by: snyk-bot <snyk-bot@snyk.io>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2024-02-10 15:48:22 +05:30
Srikanth Chekuri
c6581782d0 chore: add formula eval in query-service (#4402) 2024-02-06 22:29:12 +05:30
Srikanth Chekuri
61977ebe86 chore: bump clickhouse-server to 24.1.2-alpine (#4492)
Co-authored-by: Prashant Shahi <prashant@signoz.io>
2024-02-06 18:53:50 +05:30
Prashant Shahi
56b71d0f02 ci(testing-deploy): update workflow to handle force-push scenerio (#4503)
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-02-06 16:49:23 +05:30
Vikrant Gupta
f6ab060545 feat: setup the context for keyboard hotkeys (#4493)
* feat: setup the context for keyboard hotkeys

* feat: add error handling for duplicate callbacks

* feat: supported added for caps and document the return value

* feat: added shortcut for cmd+b for sideNav open and close

* feat: added jest test

* fix: address review comments

* fix: block the browser default actions wherever possible

* fix: remove browser ovverides prevention code
2024-02-06 14:17:27 +05:30
Nityananda Gohain
554c4332c4 fix: don't throw error while fetching orgname (#4496)
* fix: don't throw error while fetching orgname

* fix: don't ignore the error
2024-02-05 20:23:20 +05:30
Prashant Shahi
9d689693b4 chore(gh-workflows): 💚 bump up GH action versions (#3702)
* chore(gh-workflows): 💚 bump up GH action versions

* ci(e2e-k3s): fix the test environment

* chore: upgrade checkout/setup-node to v4

* chore: upgrade appleboy/ssh-action to v1.0.3

---------

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-02-05 14:46:06 +05:30
Vikrant Gupta
26bc94fc46 feat: support dashboard local state (#4475) 2024-02-05 12:07:55 +05:30
Nityananda Gohain
6837c41090 fix: remove sending_queue and retry_on_failure settings (#3815)
Co-authored-by: Prashant Shahi <prashant@signoz.io>
2024-02-05 11:26:45 +05:30
Srikanth Chekuri
8fe0e60208 fix: make label compliant with prometheus spec (#4488) 2024-02-03 06:31:10 +05:30
Srikanth Chekuri
00b111fbe3 feat: add alerts to explorer link in notification (#4446) 2024-02-02 21:16:14 +05:30
Prashant Shahi
0bebd3e338 Merge pull request #4487 from SigNoz/release/v0.38.2
Release/v0.38.2
2024-02-02 17:59:34 +05:30
Prashant Shahi
d5e0a26f55 chore(signoz): 📌 pin versions: SigNoz OtelCollector 0.88.11
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-02-02 17:58:55 +05:45
Prashant Shahi
48ebe91713 chore(signoz): 📌 pin versions: SigNoz 0.38.2
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-02-02 17:44:59 +05:45
Prashant Shahi
5bc3c074f8 Merge branch 'main' into release/v0.38.2 2024-02-02 17:44:16 +05:45
Yunus M
f5b5a9a657 fix: maintain existing pagination configs (#4484)
* fix: maintain existing pagination configs

* fix: pass pagination info services table

* fix: general setting - cloud - add email mailto link
2024-02-02 16:44:27 +05:30
Vikrant Gupta
ac835c80e9 fix: custom range should be unique to pages (#4460)
* fix: custom range should be unique to pages

* fix: added type safety

* fix: added type safety
2024-02-02 14:43:59 +05:30
Vikrant Gupta
2cf0bb4fa5 feat: add typecheck on pre-commit hook (#4472)
* feat: add typecheck on pre-commit hook
2024-02-02 12:58:14 +05:30
Yunus M
0f44246795 feat: all line series with same labels should have same color in a dashboard (#4478) 2024-02-01 18:06:32 +05:30
Yunus M
64307f323f feat: show info message in general settings for cloud users to reachout for retention change (#4476) 2024-02-01 04:04:19 +05:30
Srikanth Chekuri
616b8e0a45 Merge pull request #4474 from SigNoz/release/v0.38.1
Release v0.38.1
2024-02-01 01:07:16 +05:30
Srikanth Chekuri
2c0690a8ee chore: bump version to 0.38.1 2024-02-01 00:55:56 +05:30
Ankit Nayan
2f361de693 merging main 2024-02-01 00:50:21 +05:30
Yunus M
457380c065 Revert "fix: Logs UI: querybuildersearch: avoid emptying out query on sourceK…" (#4473)
This reverts commit 085cf34a49.
2024-02-01 00:13:42 +05:30
Prashant Shahi
96e3d00e74 Merge pull request #4469 from SigNoz/release/v0.38.0
Release/v0.38.0
2024-01-31 18:28:11 +05:30
Prashant Shahi
d224e08145 Merge branch 'main' into release/v0.38.0 2024-01-31 17:34:02 +05:45
Prashant Shahi
13ced00a35 chore(signoz): 📌 pin versions: SigNoz 0.38, SigNoz OtelCollector 0.88.9
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-01-31 17:27:41 +05:45
Yunus M
5c60a862e5 fix: update only showTotal property by spreading pagination object (#4467)
* fix: update only showTotal property by spreading pagination object
2024-01-31 15:59:40 +05:30
Yunus M
78c9330666 fix: set light bg for full screen in dashboard (#4465) 2024-01-31 14:25:27 +05:30
Vikrant Gupta
01fc7a7fd4 fix: [GH-4451]: custom time range modal closed on focussing closed date (#4456)
* fix: [GH-4451]: custom time range modal closed on focussing closed date

* fix: jest test
2024-01-30 18:50:02 +05:30
Yunus M
0200fb3a21 fix: close delete modal on delete success (#4459) 2024-01-30 16:47:58 +05:30
Yunus M
e977963763 feat: show total items count in table (#4453) 2024-01-29 18:21:51 +05:30
Yunus M
824d9aaf85 feat: users should choose either to broadcast all or enter specific channels to alert (#4441)
* feat: users should choose either to broadcast all or enter specific channels to alert

* fix: remove console logs
2024-01-29 11:12:41 +05:30
Srikanth Chekuri
4db3e5e542 chore: include status (#4447) 2024-01-29 00:42:19 +05:30
Yunus M
a8b293a510 fix: variable selection flow - dependent variable option not updated … (#4438)
* fix: variable selection flow - dependent variable option not updated on change

* fix: dropdown width and parent element update

* fix: add key to variable item inputs
2024-01-27 12:59:28 +05:30
Srikanth Chekuri
4a4f48cec8 chore: support p9{9,5,0},75,50 for space aggregation (#4382) 2024-01-26 17:07:23 +05:30
Vikrant Gupta
7e5cf65ea3 fix: [GH-4434]: dashboard variables performance issues (#4437) 2024-01-25 23:12:19 +05:30
Keshav Gupta
bb7417ffbd fix:edit the nameon sign up page if name is blank (#4216)
Co-authored-by: keshav <keshav.gupta@jarvis.consulting>
2024-01-25 19:42:14 +05:30
Raj Kamal Singh
085cf34a49 fix: Logs UI: querybuildersearch: avoid emptying out query on sourceKeys update if tags are yet to be populated (#4355)
* fix: querybuildersearch: do not call query onChange from sourceKeys useEffect if tags is empty

* chore: add comment explaining change
2024-01-25 12:35:36 +05:30
Srikanth Chekuri
be27a92fc9 chore: add functions support (#4381) 2024-01-25 01:14:45 +05:30
Vikrant Gupta
253137a6b8 fix: center align the dashboard delete modal (#4429) 2024-01-25 01:05:47 +05:30
Prashanth Banda
fce7ab7d24 fix(frontend,serviceMap): dynamically truncate service map node label (#4365) 2024-01-25 00:47:09 +05:30
Prashant Shahi
71f6b355c4 Merge pull request #4430 from SigNoz/release/v0.37.2
Release/v0.37.2
2024-01-24 23:19:16 +05:30
Prashant Shahi
110b545454 chore(signoz): 📌 pin versions: SigNoz 0.37.2, SigNoz OtelCollector 0.88.9
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-01-24 23:15:12 +05:45
Yunus M
5b0e3d375a fix: custom time width and refresh text visibility issues (#4428) 2024-01-24 16:09:32 +05:30
Rajat Dabade
9e05cb48fe refactor: fill span for full view and dashboard view (#4424)
* refactor: fill span for full view and dashboard view

* refactor: fill span works in full view and dashboard
2024-01-24 15:37:34 +05:30
Yunus M
6d67ca72a0 fix: update search logic in dashboard to search for title, description, tags (#4427) 2024-01-24 14:18:15 +05:30
Vikrant Gupta
0626081eee feat: added log attributes in the raw view old designs (#4423)
* feat: added log attributes in the raw view old designs

* feat: support it in old explorer page
2024-01-24 11:32:48 +05:30
Rajat Dabade
199d52b39f refactor: added null check while searching for dashboard (#4421)
* refactor: added null check while searching for dashboard

* refactor: flitering null value out

* chore: removed extra space

* refactor: remove unnecessary null check
2024-01-23 16:36:25 +05:30
Ankit Nayan
204cad8448 merging main 2024-01-22 23:45:18 +05:30
Yunus M
8c6096d60e fix: reset env on data source select, set logo center aligned (#4417) 2024-01-22 21:47:55 +05:30
Prashant Shahi
9de9fb5863 Merge pull request #4413 from SigNoz/release/v0.37.1
Release/v0.37.1
2024-01-22 19:54:11 +05:30
1213 changed files with 84138 additions and 8821 deletions

View File

@@ -19,4 +19,4 @@ jobs:
- name: 'Dependency Review'
with:
fail-on-severity: high
uses: actions/dependency-review-action@v2
uses: actions/dependency-review-action@v3

View File

@@ -15,6 +15,11 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup golang
uses: actions/setup-go@v4
with:
go-version: "1.21"
- name: Build query-service image
env:
DEV_BUILD: 1
@@ -65,9 +70,9 @@ jobs:
- name: Kick off a sample-app workload
run: |
# start the locust swarm
kubectl -n sample-application run strzal --image=djbingham/curl \
--restart='OnFailure' -i --rm --command -- curl -X POST -F \
'locust_count=6' -F 'hatch_rate=2' http://locust-master:8089/swarm
kubectl --namespace sample-application run strzal --image=djbingham/curl \
--restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \
'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm
- name: Get short commit SHA, display tunnel URL and IP Address of the worker node
id: get-subdomain

View File

@@ -0,0 +1,31 @@
name: Jest Coverage - changed files
on:
pull_request:
branches: develop
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: "refs/heads/develop"
token: ${{ secrets.GITHUB_TOKEN }} # Provide the GitHub token for authentication
- name: Fetch branch
run: git fetch origin ${{ github.event.pull_request.head.ref }}
- run: |
git checkout ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Install dependencies
run: cd frontend && npm install -g yarn && yarn
- name: npm run test:changedsince
run: cd frontend && npm run i18n:generate-hash && npm run test:changedsince

View File

@@ -20,13 +20,13 @@ jobs:
with:
go-version: "1.21"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
with:
version: latest
- name: Login to DockerHub
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -64,13 +64,13 @@ jobs:
with:
go-version: "1.21"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
with:
version: latest
- name: Login to DockerHub
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -115,11 +115,11 @@ jobs:
run: npm run lint
continue-on-error: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
with:
version: latest
- name: Login to DockerHub
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -152,6 +152,12 @@ jobs:
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'CLARITY_PROJECT_ID="${{ secrets.CLARITY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
- name: Install dependencies
working-directory: frontend
run: yarn install
@@ -164,11 +170,11 @@ jobs:
run: npm run lint
continue-on-error: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
with:
version: latest
- name: Login to DockerHub
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}

View File

@@ -9,34 +9,46 @@ jobs:
name: Deploy latest develop branch to staging
runs-on: ubuntu-latest
environment: staging
permissions:
contents: 'read'
id-token: 'write'
steps:
- name: Executing remote ssh commands using ssh key
uses: appleboy/ssh-action@v0.1.8
env:
GITHUB_BRANCH: develop
GITHUB_SHA: ${{ github.sha }}
- id: 'auth'
uses: 'google-github-actions/auth@v2'
with:
host: ${{ secrets.HOST_DNS }}
username: ${{ secrets.USERNAME }}
key: ${{ secrets.SSH_KEY }}
envs: GITHUB_BRANCH,GITHUB_SHA
command_timeout: 60m
script: |
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
export OTELCOL_TAG="main"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
docker system prune --force
docker pull signoz/signoz-otel-collector:main
docker pull signoz/signoz-schema-migrator:main
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout ${GITHUB_BRANCH}
git pull
make build-ee-query-service-amd64
make build-frontend-amd64
make run-signoz
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
- name: 'sdk'
uses: 'google-github-actions/setup-gcloud@v2'
- name: 'ssh'
shell: bash
env:
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
GITHUB_SHA: ${{ github.sha }}
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
GCP_ZONE: ${{ secrets.GCP_ZONE }}
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
run: |
read -r -d '' COMMAND <<EOF || true
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
export OTELCOL_TAG="main"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
docker system prune --force
docker pull signoz/signoz-otel-collector:main
docker pull signoz/signoz-schema-migrator:main
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout ${GITHUB_BRANCH}
git pull
make build-ee-query-service-amd64
make build-frontend-amd64
make run-signoz
EOF
gcloud compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"

View File

@@ -9,32 +9,47 @@ jobs:
runs-on: ubuntu-latest
environment: testing
if: ${{ github.event.label.name == 'testing-deploy' }}
permissions:
contents: 'read'
id-token: 'write'
steps:
- name: Executing remote ssh commands using ssh key
uses: appleboy/ssh-action@v0.1.8
- id: 'auth'
uses: 'google-github-actions/auth@v2'
with:
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
- name: 'sdk'
uses: 'google-github-actions/setup-gcloud@v2'
- name: 'ssh'
shell: bash
env:
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
GITHUB_SHA: ${{ github.sha }}
with:
host: ${{ secrets.HOST_DNS }}
username: ${{ secrets.USERNAME }}
key: ${{ secrets.SSH_KEY }}
envs: GITHUB_BRANCH,GITHUB_SHA
command_timeout: 60m
script: |
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
export DEV_BUILD="1"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
docker system prune --force
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout ${GITHUB_BRANCH}
git pull
make build-ee-query-service-amd64
make build-frontend-amd64
make run-signoz
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
GCP_ZONE: ${{ secrets.GCP_ZONE }}
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
run: |
read -r -d '' COMMAND <<EOF || true
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
export DEV_BUILD="1"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
docker system prune --force
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout develop
git pull
# This is added to include the scenerio when new commit in PR is force-pushed
git branch -D ${GITHUB_BRANCH}
git checkout --track origin/${GITHUB_BRANCH}
make build-ee-query-service-amd64
make build-frontend-amd64
make run-signoz
EOF
gcloud compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"

View File

@@ -108,7 +108,7 @@ We support [OpenTelemetry](https://opentelemetry.io) as the library which you ca
- Java
- Python
- NodeJS
- Node.js
- Go
- PHP
- .NET

View File

@@ -1,7 +1,7 @@
version: "3.9"
x-clickhouse-defaults: &clickhouse-defaults
image: clickhouse/clickhouse-server:23.11.1-alpine
image: clickhouse/clickhouse-server:24.1.2-alpine
tty: true
deploy:
restart_policy:
@@ -22,7 +22,7 @@ x-clickhouse-defaults: &clickhouse-defaults
"wget",
"--spider",
"-q",
"localhost:8123/ping"
"0.0.0.0:8123/ping"
]
interval: 30s
timeout: 5s
@@ -133,7 +133,7 @@ services:
# - ./data/clickhouse-3/:/var/lib/clickhouse/
alertmanager:
image: signoz/alertmanager:0.23.4
image: signoz/alertmanager:0.23.5
volumes:
- ./data/alertmanager:/data
command:
@@ -146,7 +146,7 @@ services:
condition: on-failure
query-service:
image: signoz/query-service:0.37.1
image: signoz/query-service:0.44.0
command:
[
"-config=/root/config/prometheus.yml",
@@ -160,7 +160,7 @@ services:
- ../dashboards:/root/config/dashboards
- ./data/signoz/:/var/lib/signoz/
environment:
- ClickHouseUrl=tcp://clickhouse:9000/?database=signoz_traces
- ClickHouseUrl=tcp://clickhouse:9000
- ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/
- SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db
- DASHBOARDS_PATH=/root/config/dashboards
@@ -186,7 +186,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:0.37.1
image: signoz/frontend:0.44.0
deploy:
restart_policy:
condition: on-failure
@@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.88.8
image: signoz/signoz-otel-collector:0.88.21
command:
[
"--config=/etc/otel-collector-config.yaml",
@@ -237,7 +237,7 @@ services:
- query-service
otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.88.8
image: signoz/signoz-schema-migrator:0.88.21
deploy:
restart_policy:
condition: on-failure

View File

@@ -98,6 +98,7 @@ processors:
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA
enable_exp_histogram: true
dimensions:
- name: service.namespace
default: default
@@ -110,28 +111,20 @@ processors:
exporters:
clickhousetraces:
datasource: tcp://clickhouse:9000/?database=signoz_traces
datasource: tcp://clickhouse:9000/signoz_traces
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
endpoint: tcp://clickhouse:9000/signoz_metrics
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
endpoint: tcp://clickhouse:9000/signoz_metrics
# logging: {}
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/
dsn: tcp://clickhouse:9000/signoz_logs
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
timeout: 5s
sending_queue:
queue_size: 100
retry_on_failure:
enabled: true
initial_interval: 5s
max_interval: 30s
max_elapsed_time: 300s
timeout: 10s
extensions:
health_check:
endpoint: 0.0.0.0:13133

View File

@@ -22,4 +22,4 @@ rule_files:
scrape_configs: []
remote_read:
- url: tcp://clickhouse:9000/?database=signoz_metrics
- url: tcp://clickhouse:9000/signoz_metrics

View File

@@ -19,7 +19,7 @@ services:
- ZOO_AUTOPURGE_INTERVAL=1
clickhouse:
image: clickhouse/clickhouse-server:23.7.3-alpine
image: clickhouse/clickhouse-server:24.1.2-alpine
container_name: signoz-clickhouse
# ports:
# - "9000:9000"
@@ -46,7 +46,7 @@ services:
"wget",
"--spider",
"-q",
"localhost:8123/ping"
"0.0.0.0:8123/ping"
]
interval: 30s
timeout: 5s
@@ -54,7 +54,7 @@ services:
alertmanager:
container_name: signoz-alertmanager
image: signoz/alertmanager:0.23.4
image: signoz/alertmanager:0.23.5
volumes:
- ./data/alertmanager:/data
depends_on:
@@ -66,7 +66,7 @@ services:
- --storage.path=/data
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.8}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.21}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.88.8
image: signoz/signoz-otel-collector:0.88.21
command:
[
"--config=/etc/otel-collector-config.yaml",

View File

@@ -3,7 +3,7 @@ version: "2.4"
x-clickhouse-defaults: &clickhouse-defaults
restart: on-failure
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
image: clickhouse/clickhouse-server:23.11.1-alpine
image: clickhouse/clickhouse-server:24.1.2-alpine
tty: true
depends_on:
- zookeeper-1
@@ -21,7 +21,7 @@ x-clickhouse-defaults: &clickhouse-defaults
"wget",
"--spider",
"-q",
"localhost:8123/ping"
"0.0.0.0:8123/ping"
]
interval: 30s
timeout: 5s
@@ -149,7 +149,7 @@ services:
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
alertmanager:
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.4}
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.5}
container_name: signoz-alertmanager
volumes:
- ./data/alertmanager:/data
@@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.37.1}
image: signoz/query-service:${DOCKER_TAG:-0.44.0}
container_name: signoz-query-service
command:
[
@@ -179,7 +179,7 @@ services:
- ../dashboards:/root/config/dashboards
- ./data/signoz/:/var/lib/signoz/
environment:
- ClickHouseUrl=tcp://clickhouse:9000/?database=signoz_traces
- ClickHouseUrl=tcp://clickhouse:9000
- ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/
- SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db
- DASHBOARDS_PATH=/root/config/dashboards
@@ -203,7 +203,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.37.1}
image: signoz/frontend:${DOCKER_TAG:-0.44.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.8}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.21}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -229,7 +229,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.8}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.21}
container_name: signoz-otel-collector
command:
[

View File

@@ -101,6 +101,7 @@ processors:
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA
enable_exp_histogram: true
dimensions:
- name: service.namespace
default: default
@@ -121,28 +122,20 @@ extensions:
exporters:
clickhousetraces:
datasource: tcp://clickhouse:9000/?database=signoz_traces
datasource: tcp://clickhouse:9000/signoz_traces
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
endpoint: tcp://clickhouse:9000/signoz_metrics
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
# logging: {}
endpoint: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/
dsn: tcp://clickhouse:9000/signoz_logs
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
timeout: 5s
sending_queue:
queue_size: 100
retry_on_failure:
enabled: true
initial_interval: 5s
max_interval: 30s
max_elapsed_time: 300s
timeout: 10s
# logging: {}
service:
telemetry:

View File

@@ -22,4 +22,4 @@ rule_files:
scrape_configs: []
remote_read:
- url: tcp://clickhouse:9000/?database=signoz_metrics
- url: tcp://clickhouse:9000/signoz_metrics

View File

@@ -1,5 +1,5 @@
# use a minimal alpine image
FROM alpine:3.18.5
FROM alpine:3.18.6
# Add Maintainer Info
LABEL maintainer="signoz"

View File

@@ -10,6 +10,7 @@ import (
"go.signoz.io/signoz/ee/query-service/license"
"go.signoz.io/signoz/ee/query-service/usage"
baseapp "go.signoz.io/signoz/pkg/query-service/app"
"go.signoz.io/signoz/pkg/query-service/app/integrations"
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
"go.signoz.io/signoz/pkg/query-service/cache"
baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
@@ -31,6 +32,7 @@ type APIHandlerOptions struct {
UsageManager *usage.Manager
FeatureFlags baseint.FeatureLookup
LicenseManager *license.Manager
IntegrationsController *integrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Cache cache.Cache
// Querier Influx Interval
@@ -56,6 +58,7 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
AppDao: opts.AppDao,
RuleManager: opts.RulesManager,
FeatureFlags: opts.FeatureFlags,
IntegrationsController: opts.IntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
Cache: opts.Cache,
FluxInterval: opts.FluxInterval,
@@ -149,12 +152,12 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metrics/query_range", am.ViewAccess(ah.queryRangeMetricsV2)).Methods(http.MethodPost)
// PAT APIs
router.HandleFunc("/api/v1/pat", am.OpenAccess(ah.createPAT)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/pat", am.OpenAccess(ah.getPATs)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/pat/{id}", am.OpenAccess(ah.deletePAT)).Methods(http.MethodDelete)
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.getPATs)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.updatePAT)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.revokePAT)).Methods(http.MethodDelete)
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)

View File

@@ -74,7 +74,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
requestBody, err := io.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("received no input in api\n", err)
zap.L().Error("received no input in api", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -82,7 +82,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
err = json.Unmarshal(requestBody, &req)
if err != nil {
zap.S().Errorf("received invalid user registration request", zap.Error(err))
zap.L().Error("received invalid user registration request", zap.Error(err))
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
return
}
@@ -90,13 +90,13 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
// get invite object
invite, err := baseauth.ValidateInvite(ctx, req)
if err != nil {
zap.S().Errorf("failed to validate invite token", err)
zap.L().Error("failed to validate invite token", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
if invite == nil {
zap.S().Errorf("failed to validate invite token: it is either empty or invalid", err)
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
return
}
@@ -104,7 +104,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
// get auth domain from email domain
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
if apierr != nil {
zap.S().Errorf("failed to get domain from email", apierr)
zap.L().Error("failed to get domain from email", zap.Error(apierr))
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
}
@@ -205,24 +205,24 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
ctx := context.Background()
if !ah.CheckFeature(model.SSO) {
zap.S().Errorf("[receiveGoogleAuth] sso requested but feature unavailable %s in org domain %s", model.SSO)
zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
q := r.URL.Query()
if errType := q.Get("error"); errType != "" {
zap.S().Errorf("[receiveGoogleAuth] failed to login with google auth", q.Get("error_description"))
zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description")))
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently)
return
}
relayState := q.Get("state")
zap.S().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
parsedState, err := url.Parse(relayState)
if err != nil || relayState == "" {
zap.S().Errorf("[receiveGoogleAuth] failed to process response - invalid response from IDP", err, r)
zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
@@ -244,14 +244,14 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
identity, err := callbackHandler.HandleCallback(r)
if err != nil {
zap.S().Errorf("[receiveGoogleAuth] failed to process HandleCallback ", domain.String(), zap.Error(err))
zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email)
if err != nil {
zap.S().Errorf("[receiveGoogleAuth] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
@@ -266,14 +266,14 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
if !ah.CheckFeature(model.SSO) {
zap.S().Errorf("[receiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO)
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
err := r.ParseForm()
if err != nil {
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
@@ -281,11 +281,11 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
// the relay state is sent when a login request is submitted to
// Idp.
relayState := r.FormValue("RelayState")
zap.S().Debug("[receiveML] relay state", zap.String("relayState", relayState))
zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState))
parsedState, err := url.Parse(relayState)
if err != nil || relayState == "" {
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
@@ -302,34 +302,34 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
sp, err := domain.PrepareSamlRequest(parsedState)
if err != nil {
zap.S().Errorf("[receiveSAML] failed to prepare saml request for domain (%s): %v", domain.String(), err)
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse"))
if err != nil {
zap.S().Errorf("[receiveSAML] failed to retrieve assertion info from saml response for organization (%s): %v", domain.String(), err)
zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
if assertionInfo.WarningInfo.InvalidTime {
zap.S().Errorf("[receiveSAML] expired saml response for organization (%s): %v", domain.String(), err)
zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
email := assertionInfo.NameID
if email == "" {
zap.S().Errorf("[receiveSAML] invalid email in the SSO response (%s)", domain.String())
zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String()))
handleSsoError(w, r, redirectUri)
return
}
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email)
if err != nil {
zap.S().Errorf("[receiveSAML] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}

View File

@@ -12,6 +12,20 @@ import (
"go.uber.org/zap"
)
type DayWiseBreakdown struct {
Type string `json:"type"`
Breakdown []DayWiseData `json:"breakdown"`
}
type DayWiseData struct {
Timestamp int64 `json:"timestamp"`
Count float64 `json:"count"`
Size float64 `json:"size"`
UnitPrice float64 `json:"unitPrice"`
Quantity float64 `json:"quantity"`
Total float64 `json:"total"`
}
type tierBreakdown struct {
UnitPrice float64 `json:"unitPrice"`
Quantity float64 `json:"quantity"`
@@ -21,9 +35,10 @@ type tierBreakdown struct {
}
type usageResponse struct {
Type string `json:"type"`
Unit string `json:"unit"`
Tiers []tierBreakdown `json:"tiers"`
Type string `json:"type"`
Unit string `json:"unit"`
Tiers []tierBreakdown `json:"tiers"`
DayWiseBreakdown DayWiseBreakdown `json:"dayWiseBreakdown"`
}
type details struct {
@@ -40,6 +55,7 @@ type billingDetails struct {
BillingPeriodEnd int64 `json:"billingPeriodEnd"`
Details details `json:"details"`
Discount float64 `json:"discount"`
SubscriptionStatus string `json:"subscriptionStatus"`
} `json:"data"`
}
@@ -175,7 +191,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
url := fmt.Sprintf("%s/trial?licenseKey=%s", constants.LicenseSignozIo, currentActiveLicenseKey)
req, err := http.NewRequest("GET", url, nil)
if err != nil {
zap.S().Error("Error while creating request for trial details", err)
zap.L().Error("Error while creating request for trial details", zap.Error(err))
// If there is an error in fetching trial details, we will still return the license details
// to avoid blocking the UI
ah.Respond(w, resp)
@@ -184,7 +200,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
trialResp, err := hClient.Do(req)
if err != nil {
zap.S().Error("Error while fetching trial details", err)
zap.L().Error("Error while fetching trial details", zap.Error(err))
// If there is an error in fetching trial details, we will still return the license details
// to avoid incorrectly blocking the UI
ah.Respond(w, resp)
@@ -195,7 +211,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
trialRespBody, err := io.ReadAll(trialResp.Body)
if err != nil || trialResp.StatusCode != http.StatusOK {
zap.S().Error("Error while fetching trial details", err)
zap.L().Error("Error while fetching trial details", zap.Error(err))
// If there is an error in fetching trial details, we will still return the license details
// to avoid incorrectly blocking the UI
ah.Respond(w, resp)
@@ -206,7 +222,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
var trialRespData model.SubscriptionServerResp
if err := json.Unmarshal(trialRespBody, &trialRespData); err != nil {
zap.S().Error("Error while decoding trial details", err)
zap.L().Error("Error while decoding trial details", zap.Error(err))
// If there is an error in fetching trial details, we will still return the license details
// to avoid incorrectly blocking the UI
ah.Respond(w, resp)

View File

@@ -1,236 +0,0 @@
package api
import (
"bytes"
"fmt"
"net/http"
"sync"
"text/template"
"time"
"go.signoz.io/signoz/pkg/query-service/app/metrics"
"go.signoz.io/signoz/pkg/query-service/app/parser"
"go.signoz.io/signoz/pkg/query-service/constants"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate"
"go.uber.org/zap"
)
func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.CustomMetricsFunction) {
zap.S().Info("CustomMetricsFunction feature is not enabled in this plan")
ah.APIHandler.QueryRangeMetricsV2(w, r)
return
}
metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r)
if apiErrorObj != nil {
zap.S().Errorf(apiErrorObj.Err.Error())
RespondError(w, apiErrorObj, nil)
return
}
// prometheus instant query needs same timestamp
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.PROM {
metricsQueryRangeParams.Start = metricsQueryRangeParams.End
}
// round up the end to nearest multiple
if metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER {
end := (metricsQueryRangeParams.End) / 1000
step := metricsQueryRangeParams.Step
metricsQueryRangeParams.End = (end / step * step) * 1000
}
type channelResult struct {
Series []*basemodel.Series
TableName string
Err error
Name string
Query string
}
execClickHouseQueries := func(queries map[string]string) ([]*basemodel.Series, []string, error, map[string]string) {
var seriesList []*basemodel.Series
var tableName []string
ch := make(chan channelResult, len(queries))
var wg sync.WaitGroup
for name, query := range queries {
wg.Add(1)
go func(name, query string) {
defer wg.Done()
seriesList, tableName, err := ah.opts.DataConnector.GetMetricResultEE(r.Context(), query)
for _, series := range seriesList {
series.QueryName = name
}
if err != nil {
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query}
return
}
ch <- channelResult{Series: seriesList, TableName: tableName}
}(name, query)
}
wg.Wait()
close(ch)
var errs []error
errQuriesByName := make(map[string]string)
// read values from the channel
for r := range ch {
if r.Err != nil {
errs = append(errs, r.Err)
errQuriesByName[r.Name] = r.Query
continue
}
seriesList = append(seriesList, r.Series...)
tableName = append(tableName, r.TableName)
}
if len(errs) != 0 {
return nil, nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
}
return seriesList, tableName, nil, nil
}
execPromQueries := func(metricsQueryRangeParams *basemodel.QueryRangeParamsV2) ([]*basemodel.Series, error, map[string]string) {
var seriesList []*basemodel.Series
ch := make(chan channelResult, len(metricsQueryRangeParams.CompositeMetricQuery.PromQueries))
var wg sync.WaitGroup
for name, query := range metricsQueryRangeParams.CompositeMetricQuery.PromQueries {
if query.Disabled {
continue
}
wg.Add(1)
go func(name string, query *basemodel.PromQuery) {
var seriesList []*basemodel.Series
defer wg.Done()
tmpl := template.New("promql-query")
tmpl, tmplErr := tmpl.Parse(query.Query)
if tmplErr != nil {
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
return
}
var queryBuf bytes.Buffer
tmplErr = tmpl.Execute(&queryBuf, metricsQueryRangeParams.Variables)
if tmplErr != nil {
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
return
}
query.Query = queryBuf.String()
queryModel := basemodel.QueryRangeParams{
Start: time.UnixMilli(metricsQueryRangeParams.Start),
End: time.UnixMilli(metricsQueryRangeParams.End),
Step: time.Duration(metricsQueryRangeParams.Step * int64(time.Second)),
Query: query.Query,
}
promResult, _, err := ah.opts.DataConnector.GetQueryRangeResult(r.Context(), &queryModel)
if err != nil {
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query.Query}
return
}
matrix, _ := promResult.Matrix()
for _, v := range matrix {
var s basemodel.Series
s.QueryName = name
s.Labels = v.Metric.Copy().Map()
for _, p := range v.Floats {
s.Points = append(s.Points, basemodel.MetricPoint{Timestamp: p.T, Value: p.F})
}
seriesList = append(seriesList, &s)
}
ch <- channelResult{Series: seriesList}
}(name, query)
}
wg.Wait()
close(ch)
var errs []error
errQuriesByName := make(map[string]string)
// read values from the channel
for r := range ch {
if r.Err != nil {
errs = append(errs, r.Err)
errQuriesByName[r.Name] = r.Query
continue
}
seriesList = append(seriesList, r.Series...)
}
if len(errs) != 0 {
return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
}
return seriesList, nil, nil
}
var seriesList []*basemodel.Series
var tableName []string
var err error
var errQuriesByName map[string]string
switch metricsQueryRangeParams.CompositeMetricQuery.QueryType {
case basemodel.QUERY_BUILDER:
runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME)
if runQueries.Err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: runQueries.Err}, nil)
return
}
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(runQueries.Queries)
case basemodel.CLICKHOUSE:
queries := make(map[string]string)
for name, chQuery := range metricsQueryRangeParams.CompositeMetricQuery.ClickHouseQueries {
if chQuery.Disabled {
continue
}
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(chQuery.Query)
if err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
return
}
var query bytes.Buffer
// replace go template variables
querytemplate.AssignReservedVars(metricsQueryRangeParams)
err = tmpl.Execute(&query, metricsQueryRangeParams.Variables)
if err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
return
}
queries[name] = query.String()
}
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(queries)
case basemodel.PROM:
seriesList, err, errQuriesByName = execPromQueries(metricsQueryRangeParams)
default:
err = fmt.Errorf("invalid query type")
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, errQuriesByName)
return
}
if err != nil {
apiErrObj := &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
return
}
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
len(seriesList) > 1 &&
(metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER ||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.CLICKHOUSE) {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil)
return
}
type ResponseFormat struct {
ResultType string `json:"resultType"`
Result []*basemodel.Series `json:"result"`
TableName []string `json:"tableName"`
}
resp := ResponseFormat{ResultType: "matrix", Result: seriesList, TableName: tableName}
ah.Respond(w, resp)
}

View File

@@ -12,6 +12,7 @@ import (
"github.com/gorilla/mux"
"go.signoz.io/signoz/ee/query-service/model"
"go.signoz.io/signoz/pkg/query-service/auth"
baseconstants "go.signoz.io/signoz/pkg/query-service/constants"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
@@ -28,7 +29,7 @@ func generatePATToken() string {
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := model.PAT{}
req := model.CreatePATRequestBody{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
@@ -41,30 +42,87 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
}, nil)
return
}
// All the PATs are associated with the user creating the PAT. Hence, the permissions
// associated with the PAT is also equivalent to that of the user.
req.UserID = user.Id
req.CreatedAt = time.Now().Unix()
req.Token = generatePATToken()
// default expiry is 30 days
if req.ExpiresAt == 0 {
req.ExpiresAt = time.Now().AddDate(0, 0, 30).Unix()
pat := model.PAT{
Name: req.Name,
Role: req.Role,
ExpiresAt: req.ExpiresInDays,
}
// max expiry is 1 year
if req.ExpiresAt > time.Now().AddDate(1, 0, 0).Unix() {
req.ExpiresAt = time.Now().AddDate(1, 0, 0).Unix()
err = validatePATRequest(pat)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
zap.S().Debugf("Got PAT request: %+v", req)
// All the PATs are associated with the user creating the PAT.
pat.UserID = user.Id
pat.CreatedAt = time.Now().Unix()
pat.UpdatedAt = time.Now().Unix()
pat.LastUsed = 0
pat.Token = generatePATToken()
if pat.ExpiresAt != 0 {
// convert expiresAt to unix timestamp from days
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
}
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
var apierr basemodel.BaseApiError
if req, apierr = ah.AppDao().CreatePAT(ctx, req); apierr != nil {
if pat, apierr = ah.AppDao().CreatePAT(ctx, pat); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, &req)
ah.Respond(w, &pat)
}
func validatePATRequest(req model.PAT) error {
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
return fmt.Errorf("valid role is required")
}
if req.ExpiresAt < 0 {
return fmt.Errorf("valid expiresAt is required")
}
if req.Name == "" {
return fmt.Errorf("valid name is required")
}
return nil
}
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := model.PAT{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
user, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
}
err = validatePATRequest(req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
req.UpdatedByUserID = user.Id
id := mux.Vars(r)["id"]
req.UpdatedAt = time.Now().Unix()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
if apierr = ah.AppDao().UpdatePAT(ctx, req, id); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, map[string]string{"data": "pat updated successfully"})
}
func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
@@ -77,8 +135,8 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
}, nil)
return
}
zap.S().Infof("Get PATs for user: %+v", user.Id)
pats, apierr := ah.AppDao().ListPATs(ctx, user.Id)
zap.L().Info("Get PATs for user", zap.String("user_id", user.Id))
pats, apierr := ah.AppDao().ListPATs(ctx)
if apierr != nil {
RespondError(w, apierr, nil)
return
@@ -86,7 +144,7 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
ah.Respond(w, pats)
}
func (ah *APIHandler) deletePAT(w http.ResponseWriter, r *http.Request) {
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
id := mux.Vars(r)["id"]
user, err := auth.GetUserFromRequest(r)
@@ -97,22 +155,11 @@ func (ah *APIHandler) deletePAT(w http.ResponseWriter, r *http.Request) {
}, nil)
return
}
pat, apierr := ah.AppDao().GetPATByID(ctx, id)
if apierr != nil {
zap.L().Info("Revoke PAT with id", zap.String("id", id))
if apierr := ah.AppDao().RevokePAT(ctx, id, user.Id); apierr != nil {
RespondError(w, apierr, nil)
return
}
if pat.UserID != user.Id {
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: fmt.Errorf("unauthorized PAT delete request"),
}, nil)
return
}
zap.S().Debugf("Delete PAT with id: %+v", id)
if apierr := ah.AppDao().DeletePAT(ctx, id); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, map[string]string{"data": "pat deleted successfully"})
ah.Respond(w, map[string]string{"data": "pat revoked successfully"})
}

View File

@@ -15,7 +15,7 @@ import (
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
zap.S().Info("SmartTraceDetail feature is not enabled in this plan")
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
ah.APIHandler.SearchTraces(w, r)
return
}
@@ -26,7 +26,7 @@ func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
}
spanLimit, err := strconv.Atoi(constants.SpanLimitStr)
if err != nil {
zap.S().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable: ", err)
zap.L().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable", zap.Error(err))
return
}
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, spanLimit, db.SmartTraceAlgorithm)

View File

@@ -22,7 +22,7 @@ import (
func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) {
defer utils.Elapsed("GetMetricResult")()
zap.S().Infof("Executing metric result query: %s", query)
zap.L().Info("Executing metric result query: ", zap.String("query", query))
var hash string
// If getSubTreeSpans function is used in the clickhouse query
@@ -38,9 +38,8 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
}
rows, err := r.conn.Query(ctx, query)
zap.S().Debug(query)
if err != nil {
zap.S().Debug("Error in processing query: ", err)
zap.L().Error("Error in processing query", zap.Error(err))
return nil, "", fmt.Errorf("error in processing query")
}
@@ -117,7 +116,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
}
default:
zap.S().Errorf("invalid var found in metric builder query result", v, colName)
zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName))
}
}
sort.Strings(groupBy)
@@ -140,7 +139,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
}
// err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash)
// if err != nil {
// zap.S().Error("Error in dropping temporary table: ", err)
// zap.L().Error("Error in dropping temporary table: ", err)
// return nil, err
// }
if hash == "" {
@@ -152,7 +151,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) {
zap.S().Debugf("Executing getSubTreeSpans function")
zap.L().Debug("Executing getSubTreeSpans function")
// str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;`
@@ -162,28 +161,28 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash)
if err != nil {
zap.S().Error("Error in dropping temporary table: ", err)
zap.L().Error("Error in dropping temporary table", zap.Error(err))
return query, hash, err
}
// Create temporary table to store the getSubTreeSpans() results
zap.S().Debugf("Creating temporary table getSubTreeSpans%s", hash)
zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash))
err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)")
if err != nil {
zap.S().Error("Error in creating temporary table: ", err)
zap.L().Error("Error in creating temporary table", zap.Error(err))
return query, hash, err
}
var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse
getSpansSubQuery := subtreeInput
// Execute the subTree query
zap.S().Debugf("Executing subTree query: %s", getSpansSubQuery)
zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery))
err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery)
// zap.S().Info(getSpansSubQuery)
// zap.L().Info(getSpansSubQuery)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("Error in processing sql query")
}
@@ -196,16 +195,16 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
if len(getSpansSubQueryDBResponses) == 0 {
return query, hash, fmt.Errorf("No spans found for the given query")
}
zap.S().Debugf("Executing query to fetch all the spans from the same TraceID: %s", modelQuery)
zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery))
err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("Error in processing sql query")
}
// Process model to fetch the spans
zap.S().Debugf("Processing model to fetch the spans")
zap.L().Debug("Processing model to fetch the spans")
searchSpanResponses := []basemodel.SearchSpanResponseItem{}
for _, item := range searchScanResponses {
var jsonItem basemodel.SearchSpanResponseItem
@@ -218,17 +217,17 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
}
// Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash
// Use map to store pointer to the spans to avoid duplicates and save memory
zap.S().Debugf("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans%s", hash)
zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses)
if err != nil {
zap.S().Error("Error in getSubTreeAlgorithm function: ", err)
zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err))
return query, hash, err
}
zap.S().Debugf("Preparing batch to store subtree spans in temporary table getSubTreeSpans%s", hash)
zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash))
if err != nil {
zap.S().Error("Error in preparing batch statement: ", err)
zap.L().Error("Error in preparing batch statement", zap.Error(err))
return query, hash, err
}
for _, span := range treeSearchResponse {
@@ -251,14 +250,14 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
span.Events,
)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, err
}
}
zap.S().Debugf("Inserting the subtree spans in temporary table getSubTreeSpans%s", hash)
zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
err = statement.Send()
if err != nil {
zap.S().Error("Error in sending statement: ", err)
zap.L().Error("Error in sending statement", zap.Error(err))
return query, hash, err
}
return query, hash, nil
@@ -323,7 +322,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
spans = append(spans, span)
}
zap.S().Debug("Building Tree")
zap.L().Debug("Building Tree")
roots, err := buildSpanTrees(&spans)
if err != nil {
return nil, err
@@ -333,7 +332,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
// For each root, get the subtree spans
for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses {
targetSpan := &model.SpanForTraceDetails{}
// zap.S().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
// zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
// Search target span object in the tree
for _, root := range roots {
targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID)
@@ -341,7 +340,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
break
}
if err != nil {
zap.S().Error("Error during BreadthFirstSearch(): ", err)
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
return nil, err
}
}

View File

@@ -49,7 +49,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
break
}
if err != nil {
zap.S().Error("Error during BreadthFirstSearch(): ", err)
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
return nil, err
}
}
@@ -186,7 +186,7 @@ func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTra
// If the parent span is not found, add current span to list of roots
if parent == nil {
// zap.S().Debug("Parent Span not found parent_id: ", span.ParentID)
// zap.L().Debug("Parent Span not found parent_id: ", span.ParentID)
roots = append(roots, span)
span.ParentID = ""
continue

View File

@@ -10,6 +10,7 @@ import (
"net/http"
_ "net/http/pprof" // http profiler
"os"
"regexp"
"time"
"github.com/gorilla/handlers"
@@ -20,10 +21,11 @@ import (
"github.com/soheilhy/cmux"
"go.signoz.io/signoz/ee/query-service/app/api"
"go.signoz.io/signoz/ee/query-service/app/db"
"go.signoz.io/signoz/ee/query-service/auth"
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/dao"
"go.signoz.io/signoz/ee/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/auth"
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
baseInterface "go.signoz.io/signoz/pkg/query-service/interfaces"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
@@ -34,10 +36,10 @@ import (
baseapp "go.signoz.io/signoz/pkg/query-service/app"
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
baseexplorer "go.signoz.io/signoz/pkg/query-service/app/explorer"
"go.signoz.io/signoz/pkg/query-service/app/integrations"
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
"go.signoz.io/signoz/pkg/query-service/app/opamp"
opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model"
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/cache"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/healthcheck"
@@ -133,7 +135,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
var reader interfaces.DataConnector
storage := os.Getenv("STORAGE")
if storage == "clickhouse" {
zap.S().Info("Using ClickHouse as datastore ...")
zap.L().Info("Using ClickHouse as datastore ...")
qb := db.NewDataConnector(
localDB,
serverOptions.PromConfigPath,
@@ -171,13 +173,22 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
// initiate opamp
_, err = opAmpModel.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH)
_, err = opAmpModel.InitDB(localDB)
if err != nil {
return nil, err
}
integrationsController, err := integrations.NewController(localDB)
if err != nil {
return nil, fmt.Errorf(
"couldn't create integrations controller: %w", err,
)
}
// ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(localDB, "sqlite")
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
localDB, "sqlite", integrationsController.GetPipelinesForInstalledIntegrations,
)
if err != nil {
return nil, err
}
@@ -233,6 +244,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
UsageManager: usageManager,
FeatureFlags: lm,
LicenseManager: lm,
IntegrationsController: integrationsController,
LogsParsingPipelineController: logParsingPipelineController,
Cache: c,
FluxInterval: fluxInterval,
@@ -278,6 +290,7 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
r := mux.NewRouter()
r.Use(baseapp.LogCommentEnricher)
r.Use(setTimeoutMiddleware)
r.Use(s.analyticsMiddleware)
r.Use(loggingMiddlewarePrivate)
@@ -304,32 +317,20 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
r := mux.NewRouter()
// add auth middleware
getUserFromRequest := func(r *http.Request) (*basemodel.UserPayload, error) {
patToken := r.Header.Get("SIGNOZ-API-KEY")
if len(patToken) > 0 {
zap.S().Debugf("Received a non-zero length PAT token")
ctx := context.Background()
dao := apiHandler.AppDao()
user, err := dao.GetUserByPAT(ctx, patToken)
if err == nil && user != nil {
zap.S().Debugf("Found valid PAT user: %+v", user)
return user, nil
}
if err != nil {
zap.S().Debugf("Error while getting user for PAT: %+v", err)
}
}
return baseauth.GetUserFromRequest(r)
return auth.GetUserFromRequest(r, apiHandler)
}
am := baseapp.NewAuthMiddleware(getUserFromRequest)
r.Use(baseapp.LogCommentEnricher)
r.Use(setTimeoutMiddleware)
r.Use(s.analyticsMiddleware)
r.Use(loggingMiddleware)
apiHandler.RegisterRoutes(r, am)
apiHandler.RegisterMetricsRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)
@@ -392,13 +393,14 @@ func (lrw *loggingResponseWriter) Flush() {
lrw.ResponseWriter.(http.Flusher).Flush()
}
func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFrom := "/api/v3/query_range"
func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFromV3 := "/api/v3/query_range"
pathToExtractBodyFromV4 := "/api/v4/query_range"
data := map[string]interface{}{}
var postData *v3.QueryRangeParamsV3
if path == pathToExtractBodyFrom && (r.Method == "POST") {
if (r.Method == "POST") && ((path == pathToExtractBodyFromV3) || (path == pathToExtractBodyFromV4)) {
if r.Body != nil {
bodyBytes, err := io.ReadAll(r.Body)
if err != nil {
@@ -416,32 +418,68 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface
return nil, false
}
referrer := r.Header.Get("Referer")
dashboardMatched, err := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the referrer", zap.Error(err))
}
alertMatched, err := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the alert: ", zap.Error(err))
}
logsExplorerMatched, err := regexp.MatchString(`/logs/logs-explorer(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the logs explorer: ", zap.Error(err))
}
traceExplorerMatched, err := regexp.MatchString(`/traces-explorer(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the trace explorer: ", zap.Error(err))
}
signozMetricsUsed := false
signozLogsUsed := false
dataSources := []string{}
signozTracesUsed := false
if postData != nil {
if postData.CompositeQuery != nil {
data["queryType"] = postData.CompositeQuery.QueryType
data["panelType"] = postData.CompositeQuery.PanelType
signozLogsUsed, signozMetricsUsed = telemetry.GetInstance().CheckSigNozSignals(postData)
signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData)
}
}
if signozMetricsUsed || signozLogsUsed {
if signozMetricsUsed || signozLogsUsed || signozTracesUsed {
if signozMetricsUsed {
dataSources = append(dataSources, "metrics")
telemetry.GetInstance().AddActiveMetricsUser()
}
if signozLogsUsed {
dataSources = append(dataSources, "logs")
telemetry.GetInstance().AddActiveLogsUser()
}
data["dataSources"] = dataSources
userEmail, err := auth.GetEmailFromJwt(r.Context())
if signozTracesUsed {
telemetry.GetInstance().AddActiveTracesUser()
}
data["metricsUsed"] = signozMetricsUsed
data["logsUsed"] = signozLogsUsed
data["tracesUsed"] = signozTracesUsed
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
if err == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_V3, data, userEmail, true)
// switch case to set data["screen"] based on the referrer
switch {
case dashboardMatched:
data["screen"] = "panel"
case alertMatched:
data["screen"] = "alert"
case logsExplorerMatched:
data["screen"] = "logs-explorer"
case traceExplorerMatched:
data["screen"] = "traces-explorer"
default:
data["screen"] = "unknown"
return data, true
}
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail, true, false)
}
}
return data, true
@@ -463,12 +501,12 @@ func getActiveLogs(path string, r *http.Request) {
func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := auth.AttachJwtToContext(r.Context(), r)
ctx := baseauth.AttachJwtToContext(r.Context(), r)
r = r.WithContext(ctx)
route := mux.CurrentRoute(r)
path, _ := route.GetPathTemplate()
queryRangeV3data, metadataExists := extractQueryRangeV3Data(path, r)
queryRangeData, metadataExists := extractQueryRangeData(path, r)
getActiveLogs(path, r)
lrw := NewLoggingResponseWriter(w)
@@ -476,15 +514,15 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
if metadataExists {
for key, value := range queryRangeV3data {
for key, value := range queryRangeData {
data[key] = value
}
}
if _, ok := telemetry.EnabledPaths()[path]; ok {
userEmail, err := auth.GetEmailFromJwt(r.Context())
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
if err == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail)
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail, true, false)
}
}
@@ -521,7 +559,7 @@ func (s *Server) initListeners() error {
return err
}
zap.S().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
// listen on private port to support internal services
privateHostPort := s.serverOptions.PrivateHostPort
@@ -534,7 +572,7 @@ func (s *Server) initListeners() error {
if err != nil {
return err
}
zap.S().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
return nil
}
@@ -546,7 +584,7 @@ func (s *Server) Start() error {
if !s.serverOptions.DisableRules {
s.ruleManager.Start()
} else {
zap.S().Info("msg: Rules disabled as rules.disable is set to TRUE")
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
}
err := s.initListeners()
@@ -560,23 +598,23 @@ func (s *Server) Start() error {
}
go func() {
zap.S().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
switch err := s.httpServer.Serve(s.httpConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
// normal exit, nothing to do
default:
zap.S().Error("Could not start HTTP server", zap.Error(err))
zap.L().Error("Could not start HTTP server", zap.Error(err))
}
s.unavailableChannel <- healthcheck.Unavailable
}()
go func() {
zap.S().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
if err != nil {
zap.S().Error("Could not start pprof server", zap.Error(err))
zap.L().Error("Could not start pprof server", zap.Error(err))
}
}()
@@ -586,14 +624,14 @@ func (s *Server) Start() error {
}
go func() {
zap.S().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
switch err := s.privateHTTP.Serve(s.privateConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
// normal exit, nothing to do
zap.S().Info("private http server closed")
zap.L().Info("private http server closed")
default:
zap.S().Error("Could not start private HTTP server", zap.Error(err))
zap.L().Error("Could not start private HTTP server", zap.Error(err))
}
s.unavailableChannel <- healthcheck.Unavailable
@@ -601,10 +639,10 @@ func (s *Server) Start() error {
}()
go func() {
zap.S().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
if err != nil {
zap.S().Info("opamp ws server failed to start", err)
zap.L().Error("opamp ws server failed to start", zap.Error(err))
s.unavailableChannel <- healthcheck.Unavailable
}
}()
@@ -680,7 +718,7 @@ func makeRulesManager(
return nil, fmt.Errorf("rule manager error: %v", err)
}
zap.S().Info("rules manager is ready")
zap.L().Info("rules manager is ready")
return manager, nil
}

View File

@@ -0,0 +1,56 @@
package auth
import (
"context"
"fmt"
"net/http"
"time"
"go.signoz.io/signoz/ee/query-service/app/api"
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.signoz.io/signoz/pkg/query-service/telemetry"
"go.uber.org/zap"
)
func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel.UserPayload, error) {
patToken := r.Header.Get("SIGNOZ-API-KEY")
if len(patToken) > 0 {
zap.L().Debug("Received a non-zero length PAT token")
ctx := context.Background()
dao := apiHandler.AppDao()
pat, err := dao.GetPAT(ctx, patToken)
if err == nil && pat != nil {
zap.L().Debug("Found valid PAT: ", zap.Any("pat", pat))
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
zap.L().Info("PAT has expired: ", zap.Any("pat", pat))
return nil, fmt.Errorf("PAT has expired")
}
group, apiErr := dao.GetGroupByName(ctx, pat.Role)
if apiErr != nil {
zap.L().Error("Error while getting group for PAT: ", zap.Any("apiErr", apiErr))
return nil, apiErr
}
user, err := dao.GetUser(ctx, pat.UserID)
if err != nil {
zap.L().Error("Error while getting user for PAT: ", zap.Error(err))
return nil, err
}
telemetry.GetInstance().SetPatTokenUser()
dao.UpdatePATLastUsed(ctx, patToken, time.Now().Unix())
user.User.GroupId = group.Id
user.User.Id = pat.Id
return &basemodel.UserPayload{
User: user.User,
Role: pat.Role,
}, nil
}
if err != nil {
zap.L().Error("Error while getting user for PAT: ", zap.Error(err))
return nil, err
}
}
return baseauth.GetUserFromRequest(r)
}

View File

@@ -34,9 +34,11 @@ type ModelDao interface {
GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError)
CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, p model.PAT, id string) (basemodel.BaseApiError)
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
UpdatePATLastUsed(ctx context.Context, pat string, lastUsed int64) basemodel.BaseApiError
GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError)
GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError)
ListPATs(ctx context.Context, userID string) ([]model.PAT, basemodel.BaseApiError)
DeletePAT(ctx context.Context, id string) basemodel.BaseApiError
ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, id string, userID string) basemodel.BaseApiError
}

View File

@@ -22,19 +22,19 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.S().Errorf("failed to get domain from email", apierr)
zap.L().Error("failed to get domain from email", zap.Error(apierr))
return nil, model.InternalErrorStr("failed to get domain from email")
}
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
if err != nil {
zap.S().Errorf("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
return nil, model.InternalErrorStr("failed to generate password hash")
}
group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup)
if apiErr != nil {
zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err)
zap.L().Error("GetGroupByName failed", zap.Error(apiErr))
return nil, apiErr
}
@@ -51,7 +51,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
user, apiErr = m.CreateUser(ctx, user, false)
if apiErr != nil {
zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err)
zap.L().Error("CreateUser failed", zap.Error(apiErr))
return nil, apiErr
}
@@ -65,7 +65,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
userPayload, apierr := m.GetUserByEmail(ctx, email)
if !apierr.IsNil() {
zap.S().Errorf(" failed to get user with email received from auth provider", apierr.Error())
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
return "", model.BadRequestStr("invalid user email received from the auth provider")
}
@@ -75,7 +75,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
user = newUser
if apiErr != nil {
zap.S().Errorf("failed to create user with email received from auth provider: %v", apierr.Error())
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
return "", apiErr
}
} else {
@@ -84,7 +84,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
tokenStore, err := baseauth.GenerateJWTForUser(user)
if err != nil {
zap.S().Errorf("failed to generate token for SSO login user", err)
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
return "", model.InternalErrorStr("failed to generate token for the user")
}
@@ -143,8 +143,8 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
// do nothing, just skip sso
ssoAvailable = false
default:
zap.S().Errorf("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return resp, model.BadRequest(err)
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return resp, model.BadRequestStr(err.Error())
}
}
@@ -160,7 +160,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
if len(emailComponents) > 0 {
emailDomain = emailComponents[1]
}
zap.S().Errorf("failed to get org domain from email", zap.String("emailDomain", emailDomain), apierr.ToError())
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
return resp, apierr
}
@@ -176,7 +176,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
escapedUrl, _ := url.QueryUnescape(sourceUrl)
siteUrl, err := url.Parse(escapedUrl)
if err != nil {
zap.S().Errorf("failed to parse referer", err)
zap.L().Error("failed to parse referer", zap.Error(err))
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
}
@@ -185,7 +185,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
if err != nil {
zap.S().Errorf("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), err)
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
return resp, model.InternalError(err)
}

View File

@@ -48,13 +48,13 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
if domainIdStr != "" {
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
zap.S().Errorf("failed to parse domainId from relay state", err)
zap.L().Error("failed to parse domainId from relay state", zap.Error(err))
return nil, fmt.Errorf("failed to parse domainId from IdP response")
}
domain, err = m.GetDomain(ctx, domainId)
if (err != nil) || domain == nil {
zap.S().Errorf("failed to find domain from domainId received in IdP response", err.Error())
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
}
@@ -64,7 +64,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
domain = domainFromDB
if (err != nil) || domain == nil {
zap.S().Errorf("failed to find domain from domainName received in IdP response", err.Error())
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
}
@@ -132,7 +132,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo
for _, s := range stored {
domain := model.OrgDomain{Id: s.Id, Name: s.Name, OrgId: s.OrgId}
if err := domain.LoadConfig(s.Data); err != nil {
zap.S().Errorf("ListDomains() failed", zap.Error(err))
zap.L().Error("ListDomains() failed", zap.Error(err))
}
domains = append(domains, domain)
}
@@ -153,7 +153,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
configJson, err := json.Marshal(domain)
if err != nil {
zap.S().Errorf("failed to unmarshal domain config", zap.Error(err))
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
@@ -167,7 +167,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
time.Now().Unix())
if err != nil {
zap.S().Errorf("failed to insert domain in db", zap.Error(err))
zap.L().Error("failed to insert domain in db", zap.Error(err))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
@@ -178,13 +178,13 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
if domain.Id == uuid.Nil {
zap.S().Errorf("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return model.InternalError(fmt.Errorf("domain update failed"))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.S().Errorf("domain update failed", zap.Error(err))
zap.L().Error("domain update failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain update failed"))
}
@@ -195,7 +195,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba
domain.Id)
if err != nil {
zap.S().Errorf("domain update failed", zap.Error(err))
zap.L().Error("domain update failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain update failed"))
}
@@ -206,7 +206,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
if id == uuid.Nil {
zap.S().Errorf("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
@@ -215,7 +215,7 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.Bas
id)
if err != nil {
zap.S().Errorf("domain delete failed", zap.Error(err))
zap.L().Error("domain delete failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain delete failed"))
}

View File

@@ -7,6 +7,7 @@ import (
basedao "go.signoz.io/signoz/pkg/query-service/dao"
basedsql "go.signoz.io/signoz/pkg/query-service/dao/sqlite"
baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
"go.uber.org/zap"
)
type modelDao struct {
@@ -28,6 +29,41 @@ func (m *modelDao) checkFeature(key string) error {
return m.flags.CheckFeature(key)
}
func columnExists(db *sqlx.DB, tableName, columnName string) bool {
query := fmt.Sprintf("PRAGMA table_info(%s);", tableName)
rows, err := db.Query(query)
if err != nil {
zap.L().Error("Failed to query table info", zap.Error(err))
return false
}
defer rows.Close()
var (
cid int
name string
ctype string
notnull int
dflt_value *string
pk int
)
for rows.Next() {
err := rows.Scan(&cid, &name, &ctype, &notnull, &dflt_value, &pk)
if err != nil {
zap.L().Error("Failed to scan table info", zap.Error(err))
return false
}
if name == columnName {
return true
}
}
err = rows.Err()
if err != nil {
zap.L().Error("Failed to scan table info", zap.Error(err))
return false
}
return false
}
// InitDB creates and extends base model DB repository
func InitDB(dataSourceName string) (*modelDao, error) {
dao, err := basedsql.InitDB(dataSourceName)
@@ -51,11 +87,16 @@ func InitDB(dataSourceName string) (*modelDao, error) {
);
CREATE TABLE IF NOT EXISTS personal_access_tokens (
id INTEGER PRIMARY KEY AUTOINCREMENT,
role TEXT NOT NULL,
user_id TEXT NOT NULL,
token TEXT NOT NULL UNIQUE,
name TEXT NOT NULL,
created_at INTEGER NOT NULL,
expires_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
last_used INTEGER NOT NULL,
revoked BOOLEAN NOT NULL,
updated_by_user_id TEXT NOT NULL,
FOREIGN KEY(user_id) REFERENCES users(id)
);
`
@@ -65,6 +106,36 @@ func InitDB(dataSourceName string) (*modelDao, error) {
return nil, fmt.Errorf("error in creating tables: %v", err.Error())
}
if !columnExists(m.DB(), "personal_access_tokens", "role") {
_, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN role TEXT NOT NULL DEFAULT 'ADMIN';")
if err != nil {
return nil, fmt.Errorf("error in adding column: %v", err.Error())
}
}
if !columnExists(m.DB(), "personal_access_tokens", "updated_at") {
_, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN updated_at INTEGER NOT NULL DEFAULT 0;")
if err != nil {
return nil, fmt.Errorf("error in adding column: %v", err.Error())
}
}
if !columnExists(m.DB(), "personal_access_tokens", "last_used") {
_, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;")
if err != nil {
return nil, fmt.Errorf("error in adding column: %v", err.Error())
}
}
if !columnExists(m.DB(), "personal_access_tokens", "revoked") {
_, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN revoked BOOLEAN NOT NULL DEFAULT FALSE;")
if err != nil {
return nil, fmt.Errorf("error in adding column: %v", err.Error())
}
}
if !columnExists(m.DB(), "personal_access_tokens", "updated_by_user_id") {
_, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN updated_by_user_id TEXT NOT NULL DEFAULT '';")
if err != nil {
return nil, fmt.Errorf("error in adding column: %v", err.Error())
}
}
return m, nil
}

View File

@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"strconv"
"time"
"go.signoz.io/signoz/ee/query-service/model"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
@@ -12,40 +13,124 @@ import (
func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError) {
result, err := m.DB().ExecContext(ctx,
"INSERT INTO personal_access_tokens (user_id, token, name, created_at, expires_at) VALUES ($1, $2, $3, $4, $5)",
"INSERT INTO personal_access_tokens (user_id, token, role, name, created_at, expires_at, updated_at, updated_by_user_id, last_used, revoked) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)",
p.UserID,
p.Token,
p.Role,
p.Name,
p.CreatedAt,
p.ExpiresAt)
p.ExpiresAt,
p.UpdatedAt,
p.UpdatedByUserID,
p.LastUsed,
p.Revoked,
)
if err != nil {
zap.S().Errorf("Failed to insert PAT in db, err: %v", zap.Error(err))
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
id, err := result.LastInsertId()
if err != nil {
zap.S().Errorf("Failed to get last inserted id, err: %v", zap.Error(err))
zap.L().Error("Failed to get last inserted id, err: %v", zap.Error(err))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
p.Id = strconv.Itoa(int(id))
createdByUser, _ := m.GetUser(ctx, p.UserID)
if createdByUser == nil {
p.CreatedByUser = model.User{
NotFound: true,
}
} else {
p.CreatedByUser = model.User{
Id: createdByUser.Id,
Name: createdByUser.Name,
Email: createdByUser.Email,
CreatedAt: createdByUser.CreatedAt,
ProfilePictureURL: createdByUser.ProfilePictureURL,
NotFound: false,
}
}
return p, nil
}
func (m *modelDao) ListPATs(ctx context.Context, userID string) ([]model.PAT, basemodel.BaseApiError) {
func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) basemodel.BaseApiError {
_, err := m.DB().ExecContext(ctx,
"UPDATE personal_access_tokens SET role=$1, name=$2, updated_at=$3, updated_by_user_id=$4 WHERE id=$5 and revoked=false;",
p.Role,
p.Name,
p.UpdatedAt,
p.UpdatedByUserID,
id)
if err != nil {
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT update failed"))
}
return nil
}
func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed int64) basemodel.BaseApiError {
_, err := m.DB().ExecContext(ctx,
"UPDATE personal_access_tokens SET last_used=$1 WHERE token=$2 and revoked=false;",
lastUsed,
token)
if err != nil {
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT last used update failed"))
}
return nil
}
func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApiError) {
pats := []model.PAT{}
if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE user_id=?;`, userID); err != nil {
zap.S().Errorf("Failed to fetch PATs for user: %s, err: %v", userID, zap.Error(err))
if err := m.DB().Select(&pats, "SELECT * FROM personal_access_tokens WHERE revoked=false ORDER by updated_at DESC;"); err != nil {
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
}
for i := range pats {
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
if createdByUser == nil {
pats[i].CreatedByUser = model.User{
NotFound: true,
}
} else {
pats[i].CreatedByUser = model.User{
Id: createdByUser.Id,
Name: createdByUser.Name,
Email: createdByUser.Email,
CreatedAt: createdByUser.CreatedAt,
ProfilePictureURL: createdByUser.ProfilePictureURL,
NotFound: false,
}
}
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
if updatedByUser == nil {
pats[i].UpdatedByUser = model.User{
NotFound: true,
}
} else {
pats[i].UpdatedByUser = model.User{
Id: updatedByUser.Id,
Name: updatedByUser.Name,
Email: updatedByUser.Email,
CreatedAt: updatedByUser.CreatedAt,
ProfilePictureURL: updatedByUser.ProfilePictureURL,
NotFound: false,
}
}
}
return pats, nil
}
func (m *modelDao) DeletePAT(ctx context.Context, id string) basemodel.BaseApiError {
_, err := m.DB().ExecContext(ctx, `DELETE from personal_access_tokens where id=?;`, id)
func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) basemodel.BaseApiError {
updatedAt := time.Now().Unix()
_, err := m.DB().ExecContext(ctx,
"UPDATE personal_access_tokens SET revoked=true, updated_by_user_id = $1, updated_at=$2 WHERE id=$3",
userID, updatedAt, id)
if err != nil {
zap.S().Errorf("Failed to delete PAT, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("failed to delete PAT"))
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT revoke failed"))
}
return nil
}
@@ -53,7 +138,7 @@ func (m *modelDao) DeletePAT(ctx context.Context, id string) basemodel.BaseApiEr
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) {
pats := []model.PAT{}
if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE token=?;`, token); err != nil {
if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE token=? and revoked=false;`, token); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
@@ -70,7 +155,7 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemo
func (m *modelDao) GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError) {
pats := []model.PAT{}
if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE id=?;`, id); err != nil {
if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE id=? and revoked=false;`, id); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
@@ -84,6 +169,7 @@ func (m *modelDao) GetPATByID(ctx context.Context, id string) (*model.PAT, basem
return &pats[0], nil
}
// deprecated
func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError) {
users := []basemodel.UserPayload{}

View File

@@ -47,13 +47,13 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError)
httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString))
if err != nil {
zap.S().Errorf("failed to connect to license.signoz.io", err)
zap.L().Error("failed to connect to license.signoz.io", zap.Error(err))
return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
}
httpBody, err := io.ReadAll(httpResponse.Body)
if err != nil {
zap.S().Errorf("failed to read activation response from license.signoz.io", err)
zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err))
return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
}
@@ -63,7 +63,7 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError)
result := ActivationResult{}
err = json.Unmarshal(httpBody, &result)
if err != nil {
zap.S().Errorf("failed to marshal activation response from license.signoz.io", err)
zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err))
return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
}

View File

@@ -97,7 +97,7 @@ func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error {
l.ValidationMessage)
if err != nil {
zap.S().Errorf("error in inserting license data: ", zap.Error(err))
zap.L().Error("error in inserting license data: ", zap.Error(err))
return fmt.Errorf("failed to insert license in db: %v", err)
}
@@ -121,7 +121,7 @@ func (r *Repo) UpdatePlanDetails(ctx context.Context,
_, err := r.db.ExecContext(ctx, query, planDetails, time.Now(), key)
if err != nil {
zap.S().Errorf("error in updating license: ", zap.Error(err))
zap.L().Error("error in updating license: ", zap.Error(err))
return fmt.Errorf("failed to update license in db: %v", err)
}

View File

@@ -100,7 +100,7 @@ func (lm *Manager) SetActive(l *model.License) {
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.S().Panicf("Couldn't activate features: %v", err)
zap.L().Panic("Couldn't activate features", zap.Error(err))
}
if !lm.validatorRunning {
// we want to make sure only one validator runs,
@@ -125,13 +125,13 @@ func (lm *Manager) LoadActiveLicense() error {
if active != nil {
lm.SetActive(active)
} else {
zap.S().Info("No active license found, defaulting to basic plan")
zap.L().Info("No active license found, defaulting to basic plan")
// if no active license is found, we default to basic(free) plan with all default features
lm.activeFeatures = model.BasicPlan
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.S().Error("Couldn't initialize features: ", err)
zap.L().Error("Couldn't initialize features", zap.Error(err))
return err
}
}
@@ -191,7 +191,7 @@ func (lm *Manager) Validator(ctx context.Context) {
// Validate validates the current active license
func (lm *Manager) Validate(ctx context.Context) (reterr error) {
zap.S().Info("License validation started")
zap.L().Info("License validation started")
if lm.activeLicense == nil {
return nil
}
@@ -201,12 +201,12 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
lm.lastValidated = time.Now().Unix()
if reterr != nil {
zap.S().Errorf("License validation completed with error", reterr)
zap.L().Error("License validation completed with error", zap.Error(reterr))
atomic.AddUint64(&lm.failedAttempts, 1)
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
map[string]interface{}{"err": reterr.Error()}, "")
map[string]interface{}{"err": reterr.Error()}, "", true, false)
} else {
zap.S().Info("License validation completed with no errors")
zap.L().Info("License validation completed with no errors")
}
lm.mutex.Unlock()
@@ -214,7 +214,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId)
if apiError != nil {
zap.S().Errorf("failed to validate license", apiError)
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
return apiError.Err
}
@@ -235,7 +235,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
}
if err := l.ParsePlan(); err != nil {
zap.S().Errorf("failed to parse updated license", zap.Error(err))
zap.L().Error("failed to parse updated license", zap.Error(err))
return err
}
@@ -245,7 +245,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
if err != nil {
// unexpected db write issue but we can let the user continue
// and wait for update to work in next cycle.
zap.S().Errorf("failed to validate license", zap.Error(err))
zap.L().Error("failed to validate license", zap.Error(err))
}
}
@@ -263,14 +263,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
userEmail, err := auth.GetEmailFromJwt(ctx)
if err == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
map[string]interface{}{"err": errResponse.Err.Error()}, userEmail)
map[string]interface{}{"err": errResponse.Err.Error()}, userEmail, true, false)
}
}
}()
response, apiError := validate.ActivateLicense(key, "")
if apiError != nil {
zap.S().Errorf("failed to activate license", zap.Error(apiError.Err))
zap.L().Error("failed to activate license", zap.Error(apiError.Err))
return nil, apiError
}
@@ -284,14 +284,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
err := l.ParsePlan()
if err != nil {
zap.S().Errorf("failed to activate license", zap.Error(err))
zap.L().Error("failed to activate license", zap.Error(err))
return nil, model.InternalError(err)
}
// store the license before activating it
err = lm.repo.InsertLicense(ctx, l)
if err != nil {
zap.S().Errorf("failed to activate license", zap.Error(err))
zap.L().Error("failed to activate license", zap.Error(err))
return nil, model.InternalError(err)
}

View File

@@ -16,8 +16,10 @@ import (
"go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/constants"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/migrate"
"go.signoz.io/signoz/pkg/query-service/version"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder"
zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync"
@@ -27,18 +29,19 @@ import (
)
func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
config := zap.NewDevelopmentConfig()
config := zap.NewProductionConfig()
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
defer stop()
config.EncoderConfig.EncodeDuration = zapcore.StringDurationEncoder
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
consoleEncoder := zapcore.NewConsoleEncoder(config.EncoderConfig)
defaultLogLevel := zapcore.DebugLevel
config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
config.EncoderConfig.EncodeDuration = zapcore.MillisDurationEncoder
config.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder
config.EncoderConfig.TimeKey = "timestamp"
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
consoleEncoder := zapcore.NewJSONEncoder(config.EncoderConfig)
defaultLogLevel := zapcore.InfoLevel
res := resource.NewWithAttributes(
semconv.SchemaURL,
semconv.ServiceNameKey.String("query-service"),
@@ -48,14 +51,15 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
)
if enableQueryServiceLogOTLPExport == true {
conn, err := grpc.DialContext(ctx, constants.OTLPTarget, grpc.WithBlock(), grpc.WithInsecure(), grpc.WithTimeout(time.Second*30))
if enableQueryServiceLogOTLPExport {
ctx, _ := context.WithTimeout(ctx, time.Second*30)
conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials()))
if err != nil {
log.Println("failed to connect to otlp collector to export query service logs with error:", err)
log.Fatalf("failed to establish connection: %v", err)
} else {
logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize)
if err != nil {
logExportBatchSizeInt = 1000
logExportBatchSizeInt = 512
}
ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{
BatchSize: logExportBatchSizeInt,
@@ -113,7 +117,6 @@ func main() {
zap.ReplaceGlobals(loggerMgr)
defer loggerMgr.Sync() // flushes buffer, if any
logger := loggerMgr.Sugar()
version.PrintVersion()
serverOptions := &app.ServerOptions{
@@ -137,22 +140,28 @@ func main() {
auth.JwtSecret = os.Getenv("SIGNOZ_JWT_SECRET")
if len(auth.JwtSecret) == 0 {
zap.S().Warn("No JWT secret key is specified.")
zap.L().Warn("No JWT secret key is specified.")
} else {
zap.S().Info("No JWT secret key set successfully.")
zap.L().Info("JWT secret key set successfully.")
}
if err := migrate.Migrate(constants.RELATIONAL_DATASOURCE_PATH); err != nil {
zap.L().Error("Failed to migrate", zap.Error(err))
} else {
zap.L().Info("Migration successful")
}
server, err := app.NewServer(serverOptions)
if err != nil {
logger.Fatal("Failed to create server", zap.Error(err))
zap.L().Fatal("Failed to create server", zap.Error(err))
}
if err := server.Start(); err != nil {
logger.Fatal("Could not start servers", zap.Error(err))
zap.L().Fatal("Could not start server", zap.Error(err))
}
if err := auth.InitAuthCache(context.Background()); err != nil {
logger.Fatal("Failed to initialize auth cache", zap.Error(err))
zap.L().Fatal("Failed to initialize auth cache", zap.Error(err))
}
signalsChannel := make(chan os.Signal, 1)
@@ -161,9 +170,9 @@ func main() {
for {
select {
case status := <-server.HealthCheckStatus():
logger.Info("Received HealthCheck status: ", zap.Int("status", int(status)))
zap.L().Info("Received HealthCheck status: ", zap.Int("status", int(status)))
case <-signalsChannel:
logger.Fatal("Received OS Interrupt Signal ... ")
zap.L().Fatal("Received OS Interrupt Signal ... ")
server.Stop()
}
}

View File

@@ -9,8 +9,8 @@ import (
"github.com/google/uuid"
"github.com/pkg/errors"
saml2 "github.com/russellhaering/gosaml2"
"go.signoz.io/signoz/ee/query-service/sso/saml"
"go.signoz.io/signoz/ee/query-service/sso"
"go.signoz.io/signoz/ee/query-service/sso/saml"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
@@ -24,16 +24,16 @@ const (
// OrgDomain identify org owned web domains for auth and other purposes
type OrgDomain struct {
Id uuid.UUID `json:"id"`
Name string `json:"name"`
OrgId string `json:"orgId"`
SsoEnabled bool `json:"ssoEnabled"`
SsoType SSOType `json:"ssoType"`
Id uuid.UUID `json:"id"`
Name string `json:"name"`
OrgId string `json:"orgId"`
SsoEnabled bool `json:"ssoEnabled"`
SsoType SSOType `json:"ssoType"`
SamlConfig *SamlConfig `json:"samlConfig"`
SamlConfig *SamlConfig `json:"samlConfig"`
GoogleAuthConfig *GoogleOAuthConfig `json:"googleAuthConfig"`
Org *basemodel.Organization
Org *basemodel.Organization
}
func (od *OrgDomain) String() string {
@@ -100,8 +100,8 @@ func (od *OrgDomain) GetSAMLCert() string {
return ""
}
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in
// requesting OAuth and also used in processing response from google
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in
// requesting OAuth and also used in processing response from google
func (od *OrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) {
if od.GoogleAuthConfig == nil {
return nil, fmt.Errorf("Google auth is not setup correctly for this domain")
@@ -137,38 +137,36 @@ func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServicePro
}
func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
fmtDomainId := strings.Replace(od.Id.String(), "-", ":", -1)
// build redirect url from window.location sent by frontend
redirectURL := fmt.Sprintf("%s://%s%s", siteUrl.Scheme, siteUrl.Host, siteUrl.Path)
// prepare state that gets relayed back when the auth provider
// calls back our url. here we pass the app url (where signoz runs)
// and the domain Id. The domain Id helps in identifying sso config
// when the call back occurs and the app url is useful in redirecting user
// back to the right path.
// when the call back occurs and the app url is useful in redirecting user
// back to the right path.
// why do we need to pass app url? the callback typically is handled by backend
// and sometimes backend might right at a different port or is unaware of frontend
// endpoint (unless SITE_URL param is set). hence, we receive this build sso request
// along with frontend window.location and use it to relay the information through
// auth provider to the backend (HandleCallback or HandleSSO method).
// along with frontend window.location and use it to relay the information through
// auth provider to the backend (HandleCallback or HandleSSO method).
relayState := fmt.Sprintf("%s?domainId=%s", redirectURL, fmtDomainId)
switch (od.SsoType) {
switch od.SsoType {
case SAML:
sp, err := od.PrepareSamlRequest(siteUrl)
if err != nil {
return "", err
}
return sp.BuildAuthURL(relayState)
case GoogleAuth:
googleProvider, err := od.PrepareGoogleOAuthProvider(siteUrl)
if err != nil {
return "", err
@@ -176,9 +174,8 @@ func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
return googleProvider.BuildAuthURL(relayState)
default:
zap.S().Errorf("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
return "", fmt.Errorf("unsupported SSO config for the domain")
zap.L().Error("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
return "", fmt.Errorf("unsupported SSO config for the domain")
}
}

View File

@@ -1,10 +1,32 @@
package model
type PAT struct {
Id string `json:"id" db:"id"`
UserID string `json:"userId" db:"user_id"`
Token string `json:"token" db:"token"`
Name string `json:"name" db:"name"`
CreatedAt int64 `json:"createdAt" db:"created_at"`
ExpiresAt int64 `json:"expiresAt" db:"expires_at"`
type User struct {
Id string `json:"id" db:"id"`
Name string `json:"name" db:"name"`
Email string `json:"email" db:"email"`
CreatedAt int64 `json:"createdAt" db:"created_at"`
ProfilePictureURL string `json:"profilePictureURL" db:"profile_picture_url"`
NotFound bool `json:"notFound"`
}
type CreatePATRequestBody struct {
Name string `json:"name"`
Role string `json:"role"`
ExpiresInDays int64 `json:"expiresInDays"`
}
type PAT struct {
Id string `json:"id" db:"id"`
UserID string `json:"userId" db:"user_id"`
CreatedByUser User `json:"createdByUser"`
UpdatedByUser User `json:"updatedByUser"`
Token string `json:"token" db:"token"`
Role string `json:"role" db:"role"`
Name string `json:"name" db:"name"`
CreatedAt int64 `json:"createdAt" db:"created_at"`
ExpiresAt int64 `json:"expiresAt" db:"expires_at"`
UpdatedAt int64 `json:"updatedAt" db:"updated_at"`
LastUsed int64 `json:"lastUsed" db:"last_used"`
Revoked bool `json:"revoked" db:"revoked"`
UpdatedByUserID string `json:"updatedByUserId" db:"updated_by_user_id"`
}

View File

@@ -52,14 +52,14 @@ var BasicPlan = basemodel.FeatureSet{
Name: basemodel.QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: 20,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: 10,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
@@ -90,6 +90,13 @@ var BasicPlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelEmail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelMsTeams,
Active: false,
@@ -177,6 +184,13 @@ var ProPlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelEmail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelMsTeams,
Active: true,
@@ -264,6 +278,13 @@ var EnterprisePlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelEmail,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AlertChannelMsTeams,
Active: true,
@@ -279,17 +300,17 @@ var EnterprisePlan = basemodel.FeatureSet{
Route: "",
},
basemodel.Feature{
Name: Onboarding,
Active: true,
Usage: 0,
Name: Onboarding,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
Route: "",
},
basemodel.Feature{
Name: ChatSupport,
Active: true,
Usage: 0,
Name: ChatSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
Route: "",
},
}

View File

@@ -102,6 +102,6 @@ func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (*
IDPCertificateStore: certStore,
SPKeyStore: randomKeyStore,
}
zap.S().Debugf("SAML request:", sp)
zap.L().Debug("SAML request", zap.Any("sp", sp))
return sp, nil
}

View File

@@ -91,12 +91,12 @@ func (lm *Manager) UploadUsage() {
// check if license is present or not
license, err := lm.licenseRepo.GetActiveLicense(ctx)
if err != nil {
zap.S().Errorf("failed to get active license: %v", zap.Error(err))
zap.L().Error("failed to get active license", zap.Error(err))
return
}
if license == nil {
// we will not start the usage reporting if license is not present.
zap.S().Info("no license present, skipping usage reporting")
zap.L().Info("no license present, skipping usage reporting")
return
}
@@ -123,7 +123,7 @@ func (lm *Manager) UploadUsage() {
dbusages := []model.UsageDB{}
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
zap.S().Errorf("failed to get usage from clickhouse: %v", zap.Error(err))
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
return
}
for _, u := range dbusages {
@@ -133,37 +133,33 @@ func (lm *Manager) UploadUsage() {
}
if len(usages) <= 0 {
zap.S().Info("no snapshots to upload, skipping.")
zap.L().Info("no snapshots to upload, skipping.")
return
}
zap.S().Info("uploading usage data")
zap.L().Info("uploading usage data")
// Try to get the org name
orgName := ""
orgNames, err := lm.modelDao.GetOrgs(ctx)
if err != nil {
zap.S().Errorf("failed to get org data: %v", zap.Error(err))
} else {
if len(orgNames) != 1 {
zap.S().Errorf("expected one org but got %d orgs", len(orgNames))
} else {
orgName = orgNames[0].Name
}
orgNames, orgError := lm.modelDao.GetOrgs(ctx)
if orgError != nil {
zap.L().Error("failed to get org data: %v", zap.Error(orgError))
}
if len(orgNames) == 1 {
orgName = orgNames[0].Name
}
usagesPayload := []model.Usage{}
for _, usage := range usages {
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
if err != nil {
zap.S().Errorf("error while decrypting usage data: %v", zap.Error(err))
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
return
}
usageData := model.Usage{}
err = json.Unmarshal(usageDataBytes, &usageData)
if err != nil {
zap.S().Errorf("error while unmarshalling usage data: %v", zap.Error(err))
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
return
}
@@ -188,13 +184,13 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
for i := 1; i <= MaxRetries; i++ {
apiErr := licenseserver.SendUsage(ctx, payload)
if apiErr != nil && i == MaxRetries {
zap.S().Errorf("retries stopped : %v", zap.Error(apiErr))
zap.L().Error("retries stopped : %v", zap.Error(apiErr))
// not returning error here since it is captured in the failed count
return
} else if apiErr != nil {
// sleeping for exponential backoff
sleepDuration := RetryInterval * time.Duration(i)
zap.S().Errorf("failed to upload snapshot retrying after %v secs : %v", sleepDuration.Seconds(), zap.Error(apiErr.Err))
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err))
time.Sleep(sleepDuration)
} else {
break
@@ -205,7 +201,7 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
func (lm *Manager) Stop() {
lm.scheduler.Stop()
zap.S().Debug("sending usage data before shutting down")
zap.L().Info("sending usage data before shutting down")
// send usage before shutting down
lm.UploadUsage()

3
frontend/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
# Sentry Config File
.env.sentry-build-plugin

View File

@@ -1,4 +1,4 @@
FROM nginx:1.25.2-alpine
FROM nginx:1.26-alpine
# Add Maintainer Info
LABEL maintainer="signoz"

View File

@@ -4,6 +4,7 @@ const config: Config.InitialOptions = {
clearMocks: true,
coverageDirectory: 'coverage',
coverageReporters: ['text', 'cobertura', 'html', 'json-summary'],
collectCoverageFrom: ['src/**/*.{ts,tsx}'],
moduleFileExtensions: ['ts', 'tsx', 'js', 'json'],
modulePathIgnorePatterns: ['dist'],
moduleNameMapper: {
@@ -20,9 +21,11 @@ const config: Config.InitialOptions = {
transform: {
'^.+\\.(ts|tsx)?$': 'ts-jest',
'^.+\\.(js|jsx)$': 'babel-jest',
'^.+\\.(css|scss|sass|less)$': 'jest-preview/transforms/css',
'^(?!.*\\.(js|jsx|mjs|cjs|ts|tsx|css|json)$)': 'jest-preview/transforms/file',
},
transformIgnorePatterns: [
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios)/)',
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color)/)',
],
setupFilesAfterEnv: ['<rootDir>jest.setup.ts'],
testPathIgnorePatterns: ['/node_modules/', '/public/'],
@@ -33,6 +36,14 @@ const config: Config.InitialOptions = {
browsers: ['chromium', 'firefox', 'webkit'],
},
},
coverageThreshold: {
global: {
statements: 80,
branches: 65,
functions: 80,
lines: 80,
},
},
};
export default config;

View File

@@ -7,6 +7,7 @@
*/
import '@testing-library/jest-dom';
import 'jest-styled-components';
import './src/styles.scss';
import { server } from './src/mocks-server/server';
// Establish API mocking before all tests.

View File

@@ -13,13 +13,17 @@
"jest": "jest",
"jest:coverage": "jest --coverage",
"jest:watch": "jest --watch",
"jest-preview": "jest-preview",
"test:debug": "npm-run-all -p test jest-preview",
"postinstall": "is-ci || yarn husky:configure",
"playwright": "npm run i18n:generate-hash && NODE_ENV=testing playwright test --config=./playwright.config.ts",
"playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium",
"playwright:codegen:local": "playwright codegen http://localhost:3301",
"playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json",
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
"commitlint": "commitlint --edit $1"
"commitlint": "commitlint --edit $1",
"test": "jest --coverage",
"test:changedsince": "jest --changedSince=develop --coverage --silent"
},
"engines": {
"node": ">=16.15.0"
@@ -36,13 +40,20 @@
"@mdx-js/loader": "2.3.0",
"@mdx-js/react": "2.3.0",
"@monaco-editor/react": "^4.3.1",
"@signozhq/design-tokens": "0.0.6",
"@radix-ui/react-tabs": "1.0.4",
"@radix-ui/react-tooltip": "1.0.7",
"@sentry/react": "7.102.1",
"@sentry/webpack-plugin": "2.16.0",
"@signozhq/design-tokens": "0.0.8",
"@uiw/react-md-editor": "3.23.5",
"@visx/group": "3.3.0",
"@visx/shape": "3.5.0",
"@visx/tooltip": "3.3.0",
"@xstate/react": "^3.0.0",
"ansi-to-html": "0.7.2",
"antd": "5.11.0",
"antd-table-saveas-excel": "2.2.1",
"axios": "1.6.2",
"axios": "1.6.4",
"babel-eslint": "^10.1.0",
"babel-jest": "^29.6.4",
"babel-loader": "9.1.3",
@@ -67,6 +78,7 @@
"fontfaceobserver": "2.3.0",
"history": "4.10.1",
"html-webpack-plugin": "5.5.0",
"http-proxy-middleware": "2.0.6",
"i18next": "^21.6.12",
"i18next-browser-languagedetector": "^6.1.3",
"i18next-http-backend": "^1.3.2",
@@ -75,11 +87,12 @@
"less": "^4.1.2",
"less-loader": "^10.2.0",
"lodash-es": "^4.17.21",
"lucide-react": "0.288.0",
"lucide-react": "0.321.0",
"mini-css-extract-plugin": "2.4.5",
"papaparse": "5.4.1",
"react": "18.2.0",
"react-addons-update": "15.6.3",
"react-beautiful-dnd": "13.1.1",
"react-dnd": "16.0.1",
"react-dnd-html5-backend": "16.0.1",
"react-dom": "18.2.0",
@@ -99,6 +112,7 @@
"react-virtuoso": "4.0.3",
"redux": "^4.0.5",
"redux-thunk": "^2.3.0",
"rehype-raw": "7.0.0",
"stream": "^0.0.2",
"style-loader": "1.3.0",
"styled-components": "^5.3.11",
@@ -112,6 +126,7 @@
"web-vitals": "^0.2.4",
"webpack": "5.88.2",
"webpack-dev-server": "^4.15.1",
"webpack-retry-chunk-load-plugin": "3.1.1",
"xstate": "^4.31.0"
},
"browserslist": {
@@ -153,6 +168,7 @@
"@types/papaparse": "5.3.7",
"@types/react": "18.0.26",
"@types/react-addons-update": "0.14.21",
"@types/react-beautiful-dnd": "13.1.8",
"@types/react-dom": "18.0.10",
"@types/react-grid-layout": "^1.1.2",
"@types/react-helmet-async": "1.0.3",
@@ -190,9 +206,11 @@
"husky": "^7.0.4",
"is-ci": "^3.0.1",
"jest-playwright-preset": "^1.7.2",
"jest-preview": "0.3.1",
"jest-styled-components": "^7.0.8",
"lint-staged": "^12.5.0",
"msw": "1.3.2",
"npm-run-all": "latest",
"portfinder-sync": "^0.0.2",
"prettier": "2.2.1",
"raw-loader": "4.0.2",
@@ -210,7 +228,8 @@
},
"lint-staged": {
"*.(js|jsx|ts|tsx)": [
"eslint --fix"
"eslint --fix",
"sh scripts/typecheck-staged.sh"
]
},
"resolutions": {

View File

@@ -0,0 +1 @@
<svg width="32" height="33" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M15.91 28.675c-6.199 0-12.888-3.888-12.888-12.421S9.711 3.832 15.911 3.832c3.444 0 6.621 1.134 8.977 3.2 2.555 2.267 3.91 5.466 3.91 9.222 0 3.755-1.355 6.933-3.91 9.2-2.356 2.066-5.555 3.221-8.977 3.221z" fill="url(#prefix__paint0_radial_2122_6520)"/><path d="M26.552 8.87c1.185 1.91 1.803 4.186 1.803 6.717 0 3.756-1.356 6.933-3.911 9.2-2.356 2.066-5.556 3.222-8.978 3.222-4.013 0-8.221-1.634-10.706-5.098 2.391 3.924 6.889 5.764 11.15 5.764 3.423 0 6.623-1.155 8.978-3.222 2.555-2.266 3.911-5.444 3.911-9.2 0-2.83-.771-5.346-2.247-7.383z" fill="#EB8F00"/><path d="M20.123 22.905c0 1.685-1.846 2.667-4.124 2.667-2.277 0-4.124-.989-4.124-2.667 0-1.677 1.847-3.522 4.124-3.522 2.278 0 4.124 1.838 4.124 3.522zM12.06 14.852l1.88-1.748c.267-.331.307-.778.038-1.045-.353-.355-.98-.269-1.32.136-.018.033-.03.042-.049.075l-1.333 1.938-1.804-1.682c-.027-.03-.042-.034-.067-.062-.42-.32-1.05-.267-1.315.157-.207.32-.07.745.264 1.011l2.313 1.372-1.96 1.833c-.262.326-.31.77-.04 1.044.351.358.978.276 1.32-.127.018-.033.031-.042.051-.075l1.405-2.031 1.706 1.609c.027.029.043.035.067.064.418.322 1.049.273 1.318-.149.206-.32.07-.746-.26-1.013l-2.213-1.307zM20.61 14.852l-1.879-1.748c-.267-.331-.307-.778-.036-1.045.354-.355.978-.269 1.318.136.018.033.034.042.051.075l1.334 1.938 1.806-1.682c.025-.03.04-.034.065-.062.422-.32 1.05-.267 1.317.157.205.32.067.745-.266 1.011L22 15.004l1.96 1.833c.268.33.313.775.042 1.044-.349.358-.976.276-1.318-.127-.02-.033-.033-.042-.051-.075l-1.404-2.031-1.71 1.609c-.024.029-.04.035-.066.064-.418.322-1.046.273-1.315-.149-.21-.32-.074-.746.257-1.013l2.216-1.307zM11.911 8.696c.511.044.711-.645.178-.8a4.07 4.07 0 00-1.289-.133A4.596 4.596 0 007.689 9.14c-.378.4.156.89.556.6a5.829 5.829 0 013.666-1.044zM20.044 8.696a5.85 5.85 0 013.689 1.044c.4.29.933-.2.555-.6a4.645 4.645 0 00-3.11-1.377 4.07 4.07 0 00-1.29.133.408.408 0 00-.282.504c.053.194.24.318.438.296z" fill="#422B0D"/><defs><radialGradient id="prefix__paint0_radial_2122_6520" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(15.91 16.254) scale(12.657)"><stop offset=".5" stop-color="#FDE030"/><stop offset=".92" stop-color="#F7C02B"/><stop offset="1" stop-color="#F4A223"/></radialGradient></defs></svg>

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -0,0 +1 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#prefix__clip0_2022_1972)" stroke="#fff" stroke-width="1.333" stroke-linecap="round" stroke-linejoin="round"><path d="M6.667 2h.006M9.333 1.333h.007M1.333 6l13.334-3.333M8 8V4.333M11.333 8H4.667a2 2 0 00-2 2v2.667a2 2 0 002 2h6.666a2 2 0 002-2V10a2 2 0 00-2-2zM6 8v3.333M10 8v3.333M2.667 11.334h10.666"/></g><defs><clipPath id="prefix__clip0_2022_1972"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 507 B

View File

@@ -0,0 +1 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.333" stroke-linecap="round"><path d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z" stroke-linejoin="round"/><path d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3M10.75 10.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"/></g></svg>

After

Width:  |  Height:  |  Size: 644 B

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.6 KiB

View File

@@ -0,0 +1 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.333" stroke-linecap="round" stroke-linejoin="round"><path d="M2 4.667V3.333C2 2.6 2.6 2 3.333 2h1.334M11.333 2h1.334C13.4 2 14 2.6 14 3.333v1.334M14 11.334v1.333C14 13.4 13.4 14 12.667 14h-1.334M4.667 14H3.333C2.6 14 2 13.4 2 12.667v-1.333M8.667 4.667H5.333a.667.667 0 00-.666.666v2c0 .368.298.667.666.667h3.334a.667.667 0 00.666-.667v-2a.667.667 0 00-.666-.667zM10.667 8H7.333a.667.667 0 00-.666.667v2c0 .368.298.666.666.666h3.334a.667.667 0 00.666-.666v-2A.667.667 0 0010.667 8z"/></g></svg>

After

Width:  |  Height:  |  Size: 604 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 88 KiB

View File

@@ -0,0 +1 @@
<svg width="24" height="24" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M12 2c1 2.538 2.5 2.962 3.5 3.808.942.78 1.481 1.845 1.5 2.961 0 1.122-.527 2.198-1.464 2.992C14.598 12.554 13.326 13 12 13s-2.598-.446-3.536-1.24C7.527 10.968 7 9.892 7 8.77c0-.255 0-.508.1-.762.085.25.236.48.443.673.207.193.463.342.75.437a2.334 2.334 0 001.767-.128c.263-.135.485-.32.65-.539.166-.22.269-.468.301-.727a1.452 1.452 0 00-.11-.765 1.699 1.699 0 00-.501-.644C8 4.115 11 2 12 2zM17 16l-5 6-5-6h10z" stroke="#fff" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/></svg>

After

Width:  |  Height:  |  Size: 581 B

View File

@@ -0,0 +1 @@
<svg width="24" height="24" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M23.06 17.526c-1.281.668-7.916 3.396-9.328 4.132-1.413.736-2.198.73-3.314.196C9.303 21.32 2.242 18.468.97 17.86c-.636-.303-.97-.56-.97-.802v-2.426s9.192-2.001 10.676-2.534c1.484-.532 1.999-.551 3.262-.089 1.263.463 8.814 1.826 10.062 2.283v2.391c0 .24-.288.503-.94.843z" fill="#912626"/><path d="M23.06 15.114c-1.281.668-7.916 3.396-9.329 4.132-1.412.737-2.197.73-3.313.196C9.302 18.91 2.242 16.056.97 15.45c-1.272-.608-1.298-1.027-.049-1.516 1.25-.49 8.271-3.244 9.755-3.776 1.484-.533 1.999-.552 3.262-.09 1.263.463 7.858 3.088 9.106 3.546 1.248.457 1.296.834.015 1.501z" fill="#C6302B"/><path d="M23.06 13.6c-1.281.668-7.916 3.396-9.328 4.133-1.413.736-2.198.73-3.314.196S2.242 14.543.97 13.935c-.636-.304-.97-.56-.97-.802v-2.426s9.192-2.001 10.676-2.534c1.484-.532 1.999-.551 3.262-.089C15.2 8.547 22.752 9.91 24 10.366v2.392c0 .24-.288.503-.94.843z" fill="#912626"/><path d="M23.06 11.19c-1.281.667-7.916 3.395-9.329 4.131-1.412.737-2.197.73-3.313.196-1.116-.533-8.176-3.386-9.448-3.993-1.272-.608-1.298-1.027-.049-1.516 1.25-.49 8.271-3.244 9.755-3.776 1.484-.533 1.999-.552 3.262-.09 1.263.463 7.858 3.088 9.106 3.545 1.248.458 1.296.835.015 1.502z" fill="#C6302B"/><path d="M23.06 9.53c-1.281.668-7.916 3.396-9.328 4.132-1.413.737-2.198.73-3.314.196-1.116-.533-8.176-3.386-9.448-3.993C.334 9.56 0 9.305 0 9.062V6.636s9.192-2 10.676-2.533c1.484-.533 1.999-.552 3.262-.09C15.2 4.477 22.752 5.84 24 6.297v2.392c0 .24-.288.502-.94.842z" fill="#912626"/><path d="M23.06 7.118c-1.281.668-7.916 3.396-9.329 4.132-1.412.737-2.197.73-3.313.196C9.303 10.913 2.242 8.061.97 7.453-.302 6.845-.328 6.427.921 5.937c1.25-.489 8.271-3.244 9.755-3.776 1.484-.532 1.999-.552 3.262-.089 1.263.463 7.858 3.088 9.106 3.545 1.248.457 1.296.834.015 1.501z" fill="#C6302B"/><path d="M14.933 4.758l-2.064.215-.462 1.111-.746-1.24L9.28 4.63l1.778-.641-.534-.985 1.665.651 1.569-.513-.424 1.017 1.6.6zm-2.649 5.393l-3.85-1.597 5.517-.847-1.667 2.444zM6.945 5.376c1.63 0 2.95.512 2.95 1.143 0 .632-1.32 1.144-2.95 1.144-1.629 0-2.95-.512-2.95-1.144 0-.63 1.321-1.143 2.95-1.143z" fill="#fff"/><path d="M17.371 5.062l3.266 1.29-3.263 1.29-.003-2.58z" fill="#621B1C"/><path d="M13.758 6.492l3.613-1.43.003 2.58-.354.139-3.262-1.29z" fill="#9A2928"/></svg>

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -0,0 +1 @@
<svg width="32" height="33" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M14.309 13.108l-6.704-3.32s-.016-.317.284-.477c.302-.16 5.053-2.107 5.435-2.107.383 0 2.62.431 4.249.793 1.629.363 5.933 1.287 5.953 1.57.02.281-4.404 4.806-4.404 4.806l-4.813-1.265z" fill="#C3FECE"/><path d="M20.423 11.037s-2.811-.826-5.546-1.469c-1.274-.3-5.016-1.084-5.016-1.084s.398-.173.698-.3c.305-.127.547-.193.547-.193s2.44.486 4.253.873c2.453.522 5.886 1.547 5.966 1.709.082.16-.902.464-.902.464z" fill="#fff"/><path d="M14.98 10.26c-.598.415-.011.666 1.09.924 1.207.282 2.127.698 2.903.247.7-.405-1.014-.845-1.8-1.014-.6-.129-1.731-.478-2.193-.158z" fill="#ACB1B2"/><path d="M17.17 11.095c-.005 0 .02-4.869.02-5.049 0-.18-.203-.342.02-.724.222-.382.804-.342.804-.342s2.416-.702 3.38-.945c.964-.242 3.098-.804 3.098-.804l.142 1.22s-2.236.631-3.342.913c-1.107.282-2.616.745-2.616.745l-.222.202.064 4.757s-.206.231-.668.231c-.45-.002-.68-.204-.68-.204z" fill="#FFD816"/><path d="M24.095 3.855c.018.38.22.616.46.616.24 0 .404-.307.369-.707-.038-.398-.296-.58-.516-.506-.22.073-.327.32-.313.597zM18.46 6.422a.209.209 0 01-.123-.038l-1.153-.769a.225.225 0 01-.063-.309.222.222 0 01.31-.062l1.153.769a.224.224 0 01.062.309.228.228 0 01-.187.1z" fill="#FEB804"/><path d="M18.636 6.235a.225.225 0 01-.178-.089c-.295-.393-.633-.84-.693-.909a.225.225 0 01-.031-.284.222.222 0 01.309-.062c.04.027.062.042.771.986.073.098.007.238-.091.312-.04.03-.04.046-.087.046z" fill="#FEB804"/><path d="M18.365 6.609c-.01 0-.022 0-.035-.003l-1.111-.175a.221.221 0 11.069-.438l1.11.176c.12.02.225.042.205.164-.016.107-.129.276-.238.276z" fill="#FEB804"/><path d="M7.596 9.764c.353 0 3.188.744 4.65 1.013 1.463.27 5.878 1.314 6.027 1.342.149.03.12 1.94.12 1.94s2.089 10.8 2.029 11.309c-.06.506-1.431 4.415-1.431 4.415s-.807.12-2.865-.478c-2.057-.598-7.488-2.089-7.817-2.506-.329-.418-.12-5.938-.298-9.338-.182-3.402-.415-7.697-.415-7.697z" fill="#79DD8A"/><path d="M24.06 27.036c.113-.375-.518-4.402-.607-8.101-.089-3.698.229-9.324.076-9.369-.154-.042-5.256 2.553-5.256 2.553s-.022 3.671.04 7.133c.08 4.48.438 10.41.676 10.53.238.12 2.302-1.035 2.924-1.372 1.102-.598 2.058-1.074 2.147-1.374z" fill="#02AB46"/><path d="M20.408 13.82l.011-2.787.914-.45.026 3.056-.422.74-.529-.56z" fill="#DBDFE1"/><path d="M12.322 14.797c-1.973-.211-3.34 1.549-3.233 3.842.127 2.709 1.91 4.704 3.842 5.102 1.93.398 3.802-.44 3.842-3.402.044-3.087-2.669-5.353-4.451-5.542z" fill="#FEFEFD"/><path d="M13.637 17.27s-.4-1.344-1.602-.986c-1.202.357-1.853 2.973.187 4.15 1.96 1.131 3.764-.944 3.133-2.288-.574-1.227-1.718-.876-1.718-.876z" fill="#EF5B44"/><path d="M13.18 15.626c-.136.049-.243.602-.1 1.13.106.396.446.939.643.903.158-.029.278-.651.13-1.173-.174-.602-.516-.918-.674-.86z" fill="#B8CF17"/><path d="M13.15 18.746c-.564-.171-1.2 1.769-.057 2.977 1.26 1.331 2.73.158 2.69-.1-.057-.358-1.044-.615-1.53-1.215-.487-.605-.774-1.562-1.102-1.662z" fill="#FD8F01"/><path d="M11.346 18.417s.113-.849-.673-.802c-.76.046-.574.944-.574.944s-.633.076-.526.778c.08.53.64.524.64.524s-.616.242-.336.945c.249.624.822.373.822.373s-.21.609.287.93c.42.272.787.043.787.043s-.023.52.557.616c.703.115 1.007-.74.507-1.136-.38-.3-.724-.067-.724-.067s.07-.166.004-.357c-.045-.125-.116-.171-.116-.171s.616-.058.516-.758c-.1-.702-.716-.616-.716-.616s.358-.286.216-.802c-.14-.518-.671-.444-.671-.444z" fill="#A281D0"/><path d="M21.04 14.595c-.511 0-2.691-2.167-2.711-2.189a.222.222 0 01.024-.313.224.224 0 01.314.022c.14.155 1.806 1.702 2.286 2 .311-.465 1.322-2.498 2.191-4.333a.224.224 0 01.296-.107.223.223 0 01.106.296c-2.142 4.526-2.353 4.586-2.466 4.617-.013.007-.027.007-.04.007z" fill="#2D802D"/></svg>

After

Width:  |  Height:  |  Size: 3.6 KiB

View File

@@ -0,0 +1,19 @@
<svg width="32" height="33" viewBox="0 0 32 33" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9.36806 25.9481C5.93935 25.9481 3.15283 21.7098 3.15283 16.5002C3.15283 11.2907 5.94157 7.05238 9.36806 7.05238C12.7945 7.05238 15.5833 11.2907 15.5833 16.5002C15.5833 21.7098 12.7945 25.9481 9.36806 25.9481Z" fill="#FAFAFA"/>
<path d="M9.36815 7.49694C10.8414 7.49694 12.2524 8.38594 13.3391 10.0017C14.499 11.7241 15.139 14.0333 15.139 16.5003C15.139 18.9673 14.499 21.2764 13.3391 22.9989C12.2524 24.6146 10.8414 25.5036 9.36815 25.5036C7.89489 25.5036 6.48385 24.6146 5.39724 22.9989C4.23508 21.2764 3.59734 18.9673 3.59734 16.5003C3.59734 14.0333 4.23731 11.7241 5.39724 10.0017C6.48385 8.38594 7.89267 7.49694 9.36815 7.49694ZM9.36815 6.60794C5.69056 6.60794 2.7085 11.0374 2.7085 16.5003C2.7085 21.9632 5.69056 26.3926 9.36815 26.3926C13.0457 26.3926 16.0278 21.9632 16.0278 16.5003C16.0278 11.0374 13.0457 6.60794 9.36815 6.60794Z" fill="#B0BEC5"/>
<path d="M7.47266 15.5762C6.87269 15.0118 7.00602 13.8919 7.77487 13.0741C7.81486 13.0319 7.85486 12.9919 7.89708 12.9541C7.55488 12.7608 7.17934 12.6519 6.78381 12.6519C5.18611 12.6519 3.89062 14.414 3.89062 16.585C3.89062 18.756 5.18611 20.5182 6.78381 20.5182C8.3815 20.5182 9.67699 18.756 9.67699 16.585C9.67699 16.1962 9.63477 15.8184 9.55699 15.4629C8.83703 15.9806 7.97708 16.0495 7.47266 15.5762Z" fill="url(#paint0_linear_2122_5062)"/>
<path d="M22.6294 26.3932C26.3074 26.3932 29.289 21.9642 29.289 16.5008C29.289 11.0374 26.3074 6.60847 22.6294 6.60847C18.9514 6.60847 15.9697 11.0374 15.9697 16.5008C15.9697 21.9642 18.9514 26.3932 22.6294 26.3932Z" fill="#EEEEEE"/>
<path d="M22.6283 25.9493C19.2018 25.9493 16.4131 21.711 16.4131 16.5014C16.4131 11.2919 19.2018 7.05357 22.6283 7.05357C26.0548 7.05357 28.8435 11.2919 28.8435 16.5014C28.8435 21.711 26.057 25.9493 22.6283 25.9493Z" fill="#FAFAFA"/>
<path d="M22.6284 7.49816C24.1017 7.49816 25.5127 8.38716 26.5993 10.0029C27.7592 11.7254 28.3992 14.0345 28.3992 16.5015C28.3992 18.9685 27.7592 21.2777 26.5993 23.0001C25.5127 24.6159 24.1017 25.5049 22.6284 25.5049C21.1551 25.5049 19.7441 24.6159 18.6575 23.0001C17.4976 21.2777 16.8576 18.9685 16.8576 16.5015C16.8576 14.0345 17.4976 11.7254 18.6575 10.0029C19.7441 8.38716 21.1551 7.49816 22.6284 7.49816ZM22.6284 6.60916C18.9508 6.60916 15.9688 11.0386 15.9688 16.5015C15.9688 21.9644 18.9508 26.3939 22.6284 26.3939C26.306 26.3939 29.2881 21.9644 29.2881 16.5015C29.2881 11.0386 26.306 6.60916 22.6284 6.60916Z" fill="#B0BEC5"/>
<path d="M20.7339 15.5767C20.1339 15.0123 20.2672 13.8924 21.0361 13.0746C21.0761 13.0324 21.1161 12.9924 21.1583 12.9546C20.8161 12.7613 20.4406 12.6524 20.045 12.6524C18.4473 12.6524 17.1519 14.4146 17.1519 16.5856C17.1519 18.7566 18.4473 20.5187 20.045 20.5187C21.6427 20.5187 22.9382 18.7566 22.9382 16.5856C22.9382 16.1967 22.896 15.8189 22.8182 15.4634C22.1005 15.9812 21.2383 16.05 20.7339 15.5767Z" fill="url(#paint1_linear_2122_5062)"/>
<defs>
<linearGradient id="paint0_linear_2122_5062" x1="6.78232" y1="12.651" x2="6.78232" y2="20.5188" gradientUnits="userSpaceOnUse">
<stop stop-color="#424242"/>
<stop offset="1" stop-color="#212121"/>
</linearGradient>
<linearGradient id="paint1_linear_2122_5062" x1="20.0449" y1="12.6515" x2="20.0449" y2="20.5193" gradientUnits="userSpaceOnUse">
<stop stop-color="#424242"/>
<stop offset="1" stop-color="#212121"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 3.3 KiB

View File

@@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="80px" height="80px" viewBox="0 0 80 80" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<!-- Generator: Sketch 64 (93537) - https://sketch.com -->
<title>Icon-Architecture/64/Arch_Amazon-EC2_64</title>
<desc>Created with Sketch.</desc>
<defs>
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
<stop stop-color="#C8511B" offset="0%"></stop>
<stop stop-color="#FF9900" offset="100%"></stop>
</linearGradient>
</defs>
<g id="Icon-Architecture/64/Arch_Amazon-EC2_64" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g id="Icon-Architecture-BG/64/Compute" fill="url(#linearGradient-1)">
<rect id="Rectangle" x="0" y="0" width="80" height="80"></rect>
</g>
<path d="M27,53 L52,53 L52,28 L27,28 L27,53 Z M54,28 L58,28 L58,30 L54,30 L54,34 L58,34 L58,36 L54,36 L54,39 L58,39 L58,41 L54,41 L54,45 L58,45 L58,47 L54,47 L54,51 L58,51 L58,53 L54,53 L54,53.136 C54,54.164 53.164,55 52.136,55 L52,55 L52,59 L50,59 L50,55 L46,55 L46,59 L44,59 L44,55 L41,55 L41,59 L39,59 L39,55 L35,55 L35,59 L33,59 L33,55 L29,55 L29,59 L27,59 L27,55 L26.864,55 C25.836,55 25,54.164 25,53.136 L25,53 L22,53 L22,51 L25,51 L25,47 L22,47 L22,45 L25,45 L25,41 L22,41 L22,39 L25,39 L25,36 L22,36 L22,34 L25,34 L25,30 L22,30 L22,28 L25,28 L25,27.864 C25,26.836 25.836,26 26.864,26 L27,26 L27,22 L29,22 L29,26 L33,26 L33,22 L35,22 L35,26 L39,26 L39,22 L41,22 L41,26 L44,26 L44,22 L46,22 L46,26 L50,26 L50,22 L52,22 L52,26 L52.136,26 C53.164,26 54,26.836 54,27.864 L54,28 Z M41,65.876 C41,65.944 40.944,66 40.876,66 L14.124,66 C14.056,66 14,65.944 14,65.876 L14,39.124 C14,39.056 14.056,39 14.124,39 L20,39 L20,37 L14.124,37 C12.953,37 12,37.953 12,39.124 L12,65.876 C12,67.047 12.953,68 14.124,68 L40.876,68 C42.047,68 43,67.047 43,65.876 L43,61 L41,61 L41,65.876 Z M68,14.124 L68,40.876 C68,42.047 67.047,43 65.876,43 L60,43 L60,41 L65.876,41 C65.944,41 66,40.944 66,40.876 L66,14.124 C66,14.056 65.944,14 65.876,14 L39.124,14 C39.056,14 39,14.056 39,14.124 L39,20 L37,20 L37,14.124 C37,12.953 37.953,12 39.124,12 L65.876,12 C67.047,12 68,12.953 68,14.124 L68,14.124 Z" id="Amazon-EC2_Icon_64_Squid" fill="#FFFFFF"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="80px" height="80px" viewBox="0 0 80 80" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<!-- Generator: Sketch 64 (93537) - https://sketch.com -->
<title>Icon-Architecture/64/Arch_Amazon-Elastic-Container-Service_64</title>
<desc>Created with Sketch.</desc>
<defs>
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
<stop stop-color="#C8511B" offset="0%"></stop>
<stop stop-color="#FF9900" offset="100%"></stop>
</linearGradient>
</defs>
<g id="Icon-Architecture/64/Arch_Amazon-Elastic-Container-Service_64" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g id="Icon-Architecture-BG/64/Containers" fill="url(#linearGradient-1)">
<rect id="Rectangle" x="0" y="0" width="80" height="80"></rect>
</g>
<path d="M64,48.2340095 L56,43.4330117 L56,32.0000169 C56,31.6440171 55.812,31.3150172 55.504,31.1360173 L44,24.4260204 L44,14.7520248 L64,26.5710194 L64,48.2340095 Z M65.509,25.13902 L43.509,12.139026 C43.199,11.9560261 42.818,11.9540261 42.504,12.131026 C42.193,12.3090259 42,12.6410257 42,13.0000256 L42,25.0000201 C42,25.3550199 42.189,25.6840198 42.496,25.8640197 L54,32.5740166 L54,44.0000114 C54,44.3510113 54.185,44.6770111 54.486,44.857011 L64.486,50.8570083 C64.644,50.9520082 64.822,51 65,51 C65.17,51 65.34,50.9570082 65.493,50.8700083 C65.807,50.6930084 66,50.3600085 66,50 L66,26.0000196 C66,25.6460198 65.814,25.31902 65.509,25.13902 L65.509,25.13902 Z M40.445,66.863001 L17,54.3990067 L17,26.5710194 L37,14.7520248 L37,24.4510204 L26.463,31.1560173 C26.175,31.3400172 26,31.6580171 26,32.0000169 L26,49.0000091 C26,49.373009 26.208,49.7150088 26.538,49.8870087 L39.991,56.8870055 C40.28,57.0370055 40.624,57.0380055 40.912,56.8880055 L53.964,50.1440086 L61.996,54.9640064 L40.445,66.863001 Z M64.515,54.1420068 L54.515,48.1420095 C54.217,47.9640096 53.849,47.9520096 53.541,48.1120095 L40.455,54.8730065 L28,48.3930094 L28,32.5490167 L38.537,25.8440197 C38.825,25.6600198 39,25.3420199 39,25.0000201 L39,13.0000256 C39,12.6410257 38.808,12.3090259 38.496,12.131026 C38.184,11.9540261 37.802,11.9560261 37.491,12.139026 L15.491,25.13902 C15.187,25.31902 15,25.6460198 15,26.0000196 L15,55 C15,55.3690062 15.204,55.7090061 15.53,55.883006 L39.984,68.8830001 C40.131,68.961 40.292,69 40.453,69 C40.62,69 40.786,68.958 40.937,68.8750001 L64.484,55.875006 C64.797,55.7020061 64.993,55.3750062 65.0001416,55.0180064 C65.006,54.6600066 64.821,54.3260067 64.515,54.1420068 L64.515,54.1420068 Z" id="Amazon-Elastic-Container-Service_Icon_64_Squid" fill="#FFFFFF"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

@@ -0,0 +1,2 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg" fill="none"><path fill="url(#amazon-eks-color-16__paint0_linear_879_141)" fill-rule="evenodd" d="M6.381 10.148h.897V8.121l1.837 2.027h1.164L7.997 7.642l2.169-2.195H8.963L7.278 7.146V5.447h-.897v4.701z" clip-rule="evenodd"/><path fill="url(#amazon-eks-color-16__paint1_linear_879_141)" d="M8.532 3.803l3.186 1.81a.173.173 0 01.088.149v3.62c0 .06.033.118.088.149l2.842 1.615a.176.176 0 00.264-.15V3.947a.173.173 0 00-.088-.15L8.708.274a.176.176 0 00-.264.15v3.23c0 .062.034.119.088.15z"/><path fill="url(#amazon-eks-color-16__paint2_linear_879_141)" d="M11.273 10.288l-3.185 1.81a.178.178 0 01-.176 0l-3.63-2.062a.173.173 0 01-.088-.15V5.762c0-.062.034-.119.088-.15l3.186-1.81a.172.172 0 00.088-.15V.424a.176.176 0 00-.264-.15L1.088 3.798a.173.173 0 00-.088.15V11.7c0 .061.033.118.088.15l6.824 3.876c.054.03.122.03.176 0l6.204-3.524a.172.172 0 000-.3l-2.843-1.615a.178.178 0 00-.176 0z"/><defs><linearGradient id="amazon-eks-color-16__paint0_linear_879_141" x1="10.691" x2="8.521" y1="9.879" y2="4.634" gradientUnits="userSpaceOnUse"><stop stop-color="#426DDB"/><stop offset="1" stop-color="#3B4BDB"/></linearGradient><linearGradient id="amazon-eks-color-16__paint1_linear_879_141" x1="15.693" x2="9.546" y1="10.544" y2="-.213" gradientUnits="userSpaceOnUse"><stop stop-color="#426DDB"/><stop offset="1" stop-color="#3B4BDB"/></linearGradient><linearGradient id="amazon-eks-color-16__paint2_linear_879_141" x1="9.433" x2="2.732" y1="14.904" y2="2.88" gradientUnits="userSpaceOnUse"><stop stop-color="#2775FF"/><stop offset="1" stop-color="#188DFF"/></linearGradient></defs></svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -37,11 +37,16 @@
"text_condition1": "Send a notification when",
"text_condition2": "the threshold",
"text_condition3": "during the last",
"option_1min": "1 min",
"option_5min": "5 mins",
"option_10min": "10 mins",
"option_15min": "15 mins",
"option_30min": "30 mins",
"option_60min": "60 mins",
"option_4hours": "4 hours",
"option_3hours": "3 hours",
"option_6hours": "6 hours",
"option_12hours": "12 hours",
"option_24hours": "24 hours",
"field_threshold": "Alert Threshold",
"option_allthetimes": "all the times",
@@ -62,6 +67,7 @@
"button_cancel": "No",
"field_promql_expr": "PromQL Expression",
"field_alert_name": "Alert Name",
"field_notification_channel": "Notification Channel",
"field_alert_desc": "Alert Description",
"field_labels": "Labels",
"field_severity": "Severity",
@@ -100,7 +106,7 @@
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
"user_tooltip_more_help": "More details on how to create alerts",
"choose_alert_type": "Choose a type for the alert:",
"choose_alert_type": "Choose a type for the alert",
"metric_based_alert": "Metric based Alert",
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
"log_based_alert": "Log-based Alert",
@@ -110,5 +116,8 @@
"exceptions_based_alert": "Exceptions-based Alert",
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
"field_unit": "Threshold unit",
"text_alert_on_absent": "Send a notification if data is missing for",
"text_alert_frequency": "Run alert every",
"text_for": "minutes",
"selected_query_placeholder": "Select query"
}

View File

@@ -15,6 +15,7 @@
"button_test_channel": "Test",
"button_return": "Back",
"field_channel_name": "Name",
"field_send_resolved": "Send resolved alerts",
"field_channel_type": "Type",
"field_webhook_url": "Webhook URL",
"field_slack_recipient": "Recipient",

View File

@@ -16,6 +16,7 @@
"new_dashboard_title": "Sample Title",
"layout_saved_successfully": "Layout saved successfully",
"add_panel": "Add Panel",
"add_row": "Add Row",
"save_layout": "Save Layout",
"variable_updated_successfully": "Variable updated successfully",
"error_while_updating_variable": "Error while updating variable",
@@ -25,5 +26,5 @@
"dashboard_unsave_changes": "There are unsaved changes in the Query builder, please stage and run the query or the changes will be lost. Press OK to discard.",
"dashboard_save_changes": "Your graph built with {{queryTag}} query will be saved. Press OK to confirm.",
"your_graph_build_with": "Your graph built with",
"dashboar_ok_confirm": "query will be saved. Press OK to confirm."
"dashboard_ok_confirm": "query will be saved. Press OK to confirm."
}

View File

@@ -14,6 +14,5 @@
"delete_domain_message": "Are you sure you want to delete this domain?",
"delete_domain": "Delete Domain",
"add_domain": "Add Domains",
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly",
"invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually"
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly"
}

View File

@@ -3,6 +3,7 @@
"alert_channels": "Alert Channels",
"organization_settings": "Organization Settings",
"ingestion_settings": "Ingestion Settings",
"api_keys": "Access Tokens",
"my_settings": "My Settings",
"overview_metrics": "Overview Metrics",
"dbcall_metrics": "Database Calls",

View File

@@ -54,6 +54,7 @@
"field_promql_expr": "PromQL Expression",
"field_alert_name": "Alert Name",
"field_alert_desc": "Alert Description",
"field_notification_channel": "Notification Channel",
"field_labels": "Labels",
"field_severity": "Severity",
"option_critical": "Critical",

View File

@@ -26,6 +26,7 @@
"MY_SETTINGS": "SigNoz | My Settings",
"ORG_SETTINGS": "SigNoz | Organization Settings",
"INGESTION_SETTINGS": "SigNoz | Ingestion Settings",
"API_KEYS": "SigNoz | Access Tokens",
"SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong",
"UN_AUTHORIZED": "SigNoz | Unauthorized",
"NOT_FOUND": "SigNoz | Page Not Found",

View File

@@ -37,11 +37,16 @@
"text_condition1": "Send a notification when",
"text_condition2": "the threshold",
"text_condition3": "during the last",
"option_1min": "1 min",
"option_5min": "5 mins",
"option_10min": "10 mins",
"option_15min": "15 mins",
"option_30min": "30 mins",
"option_60min": "60 mins",
"option_3hours": "3 hours",
"option_4hours": "4 hours",
"option_6hours": "6 hours",
"option_12hours": "12 hours",
"option_24hours": "24 hours",
"field_threshold": "Alert Threshold",
"option_allthetimes": "all the times",
@@ -63,6 +68,7 @@
"field_promql_expr": "PromQL Expression",
"field_alert_name": "Alert Name",
"field_alert_desc": "Alert Description",
"field_notification_channel": "Notification Channel",
"field_labels": "Labels",
"field_severity": "Severity",
"option_critical": "Critical",
@@ -100,7 +106,7 @@
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
"user_tooltip_more_help": "More details on how to create alerts",
"choose_alert_type": "Choose a type for the alert:",
"choose_alert_type": "Choose a type for the alert",
"metric_based_alert": "Metric based Alert",
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
"log_based_alert": "Log-based Alert",
@@ -110,5 +116,8 @@
"exceptions_based_alert": "Exceptions-based Alert",
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
"field_unit": "Threshold unit",
"text_alert_on_absent": "Send a notification if data is missing for",
"text_alert_frequency": "Run alert every",
"text_for": "minutes",
"selected_query_placeholder": "Select query"
}

View File

@@ -0,0 +1,3 @@
{
"delete_confirm_message": "Are you sure you want to delete {{keyName}} token? Deleting a token is irreversible and cannot be undone."
}

View File

@@ -0,0 +1,14 @@
{
"days_remaining": "days remaining in your billing period.",
"billing": "Billing",
"manage_billing_and_costs": "Manage your billing information, invoices, and monitor costs.",
"enterprise_cloud": "Enterprise Cloud",
"enterprise": "Enterprise",
"card_details_recieved_and_billing_info": "We have received your card details, your billing will only start after the end of your free trial period.",
"upgrade_plan": "Upgrade Plan",
"manage_billing": "Manage Billing",
"upgrade_now_text": "Upgrade now to have uninterrupted access",
"billing_start_info": "Your billing will start only after the trial period",
"checkout_plans": "Check out features in paid plans",
"here": "here"
}

View File

@@ -15,6 +15,7 @@
"button_test_channel": "Test",
"button_return": "Back",
"field_channel_name": "Name",
"field_send_resolved": "Send resolved alerts",
"field_channel_type": "Type",
"field_webhook_url": "Webhook URL",
"field_slack_recipient": "Recipient",
@@ -23,6 +24,12 @@
"field_opsgenie_api_key": "API Key",
"field_opsgenie_description": "Description",
"placeholder_opsgenie_description": "Description",
"help_email_to": "Email address(es) to send alerts to (comma separated)",
"field_email_to": "To",
"placeholder_email_to": "To",
"help_email_html": "Send email in html format",
"field_email_html": "Email body template",
"placeholder_email_html": "Email body template",
"field_webhook_username": "User Name (optional)",
"field_webhook_password": "Password (optional)",
"field_pager_routing_key": "Routing Key",

View File

@@ -16,6 +16,7 @@
"new_dashboard_title": "Sample Title",
"layout_saved_successfully": "Layout saved successfully",
"add_panel": "Add Panel",
"add_row": "Add Row",
"save_layout": "Save Layout",
"full_view": "Full Screen View",
"variable_updated_successfully": "Variable updated successfully",
@@ -28,5 +29,5 @@
"dashboard_unsave_changes": "There are unsaved changes in the Query builder, please stage and run the query or the changes will be lost. Press OK to discard.",
"dashboard_save_changes": "Your graph built with {{queryTag}} query will be saved. Press OK to confirm.",
"your_graph_build_with": "Your graph built with",
"dashboar_ok_confirm": "query will be saved. Press OK to confirm."
"dashboard_ok_confirm": "query will be saved. Press OK to confirm."
}

View File

@@ -1,3 +1,4 @@
{
"name_of_the_view": "Name of the view"
"name_of_the_view": "Name of the view",
"delete_confirm_message": "Are you sure you want to delete {{viewName}} view? Deleting a view is irreversible and cannot be undone."
}

View File

@@ -14,6 +14,5 @@
"delete_domain_message": "Are you sure you want to delete this domain?",
"delete_domain": "Delete Domain",
"add_domain": "Add Domains",
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly",
"invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually"
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly"
}

View File

@@ -3,6 +3,7 @@
"alert_channels": "Alert Channels",
"organization_settings": "Organization Settings",
"ingestion_settings": "Ingestion Settings",
"api_keys": "Access Tokens",
"my_settings": "My Settings",
"overview_metrics": "Overview Metrics",
"dbcall_metrics": "Database Calls",

View File

@@ -54,6 +54,7 @@
"field_promql_expr": "PromQL Expression",
"field_alert_name": "Alert Name",
"field_alert_desc": "Alert Description",
"field_notification_channel": "Notification Channel",
"field_labels": "Labels",
"field_severity": "Severity",
"option_critical": "Critical",

View File

@@ -4,6 +4,10 @@
"SERVICE_METRICS": "SigNoz | Service Metrics",
"SERVICE_MAP": "SigNoz | Service Map",
"GET_STARTED": "SigNoz | Get Started",
"GET_STARTED_APPLICATION_MONITORING": "SigNoz | Get Started | APM",
"GET_STARTED_LOGS_MANAGEMENT": "SigNoz | Get Started | Logs",
"GET_STARTED_INFRASTRUCTURE_MONITORING": "SigNoz | Get Started | Infrastructure",
"GET_STARTED_AWS_MONITORING": "SigNoz | Get Started | AWS",
"TRACE": "SigNoz | Trace",
"TRACE_DETAIL": "SigNoz | Trace Detail",
"TRACES_EXPLORER": "SigNoz | Traces Explorer",
@@ -26,6 +30,7 @@
"MY_SETTINGS": "SigNoz | My Settings",
"ORG_SETTINGS": "SigNoz | Organization Settings",
"INGESTION_SETTINGS": "SigNoz | Ingestion Settings",
"API_KEYS": "SigNoz | Access Tokens",
"SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong",
"UN_AUTHORIZED": "SigNoz | Unauthorized",
"NOT_FOUND": "SigNoz | Page Not Found",
@@ -39,5 +44,9 @@
"LIST_LICENSES": "SigNoz | List of Licenses",
"WORKSPACE_LOCKED": "SigNoz | Workspace Locked",
"SUPPORT": "SigNoz | Support",
"DEFAULT": "Open source Observability Platform | SigNoz"
"LOGS_SAVE_VIEWS": "SigNoz | Logs Saved Views",
"TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views",
"DEFAULT": "Open source Observability Platform | SigNoz",
"SHORTCUTS": "SigNoz | Shortcuts",
"INTEGRATIONS": "SigNoz | Integrations"
}

View File

@@ -0,0 +1,25 @@
files="";
# lint-staged will pass all files in $1 $2 $3 etc. iterate and concat.
for var in "$@"
do
files="$files \"$var\","
done
# create temporary tsconfig which includes only passed files
str="{
\"extends\": \"./tsconfig.json\",
\"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./conf/default.conf\",\"./public\",\"./tests\",\"./playwright.config.ts\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files]
}"
echo $str > tsconfig.tmp
# run typecheck using temp config
tsc -p ./tsconfig.tmp
# capture exit code of tsc
code=$?
# delete temp config
rm ./tsconfig.tmp
exit $code

View File

@@ -20,16 +20,21 @@ import { UPDATE_USER_IS_FETCH } from 'types/actions/app';
import AppReducer from 'types/reducer/app';
import { routePermission } from 'utils/permission';
import routes from './routes';
import routes, {
LIST_LICENSES,
oldNewRoutesMapping,
oldRoutes,
} from './routes';
import afterLogin from './utils';
function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const { pathname } = useLocation();
const location = useLocation();
const { pathname } = location;
const mapRoutes = useMemo(
() =>
new Map(
routes.map((e) => {
[...routes, LIST_LICENSES].map((e) => {
const currentPath = matchPath(pathname, {
path: e.path,
});
@@ -59,6 +64,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const currentRoute = mapRoutes.get('current');
const isOldRoute = oldRoutes.indexOf(pathname) > -1;
const isLocalStorageLoggedIn =
getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true';
@@ -98,6 +105,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
if (
userResponse &&
route &&
route.find((e) => e === userResponse.payload.role) === undefined
) {
history.push(ROUTES.UN_AUTHORIZED);
@@ -157,10 +165,20 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
useEffect(() => {
(async (): Promise<void> => {
try {
if (isOldRoute) {
const redirectUrl = oldNewRoutesMapping[pathname];
const newLocation = {
...location,
pathname: redirectUrl,
};
history.replace(newLocation);
}
if (currentRoute) {
const { isPrivate, key } = currentRoute;
if (isPrivate && key !== ROUTES.WORKSPACE_LOCKED) {
if (isPrivate && key !== String(ROUTES.WORKSPACE_LOCKED)) {
handlePrivateRoutes(key);
} else {
// no need to fetch the user and make user fetching false

View File

@@ -8,6 +8,7 @@ import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import AppLayout from 'container/AppLayout';
import useAnalytics from 'hooks/analytics/useAnalytics';
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useThemeConfig } from 'hooks/useDarkMode';
import useGetFeatureFlag from 'hooks/useGetFeatureFlag';
import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
@@ -28,7 +29,11 @@ import AppReducer, { User } from 'types/reducer/app';
import { extractDomain, isCloudUser, isEECloudUser } from 'utils/app';
import PrivateRoute from './Private';
import defaultRoutes, { AppRoutes, SUPPORT_ROUTE } from './routes';
import defaultRoutes, {
AppRoutes,
LIST_LICENSES,
SUPPORT_ROUTE,
} from './routes';
function App(): JSX.Element {
const themeConfig = useThemeConfig();
@@ -142,7 +147,11 @@ function App(): JSX.Element {
}
}
if (isOnBasicPlan || (isLoggedInState && role && role !== 'ADMIN')) {
if (
isOnBasicPlan ||
(isLoggedInState && role && role !== 'ADMIN') ||
!(isCloudUserVal || isEECloudUser())
) {
const newRoutes = routes.filter((route) => route?.path !== ROUTES.BILLING);
setRoutes(newRoutes);
}
@@ -150,6 +159,10 @@ function App(): JSX.Element {
if (isCloudUserVal || isEECloudUser()) {
const newRoutes = [...routes, SUPPORT_ROUTE];
setRoutes(newRoutes);
} else {
const newRoutes = [...routes, LIST_LICENSES];
setRoutes(newRoutes);
}
@@ -169,22 +182,24 @@ function App(): JSX.Element {
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<KeyboardHotkeysProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>

View File

@@ -15,9 +15,20 @@ export const ServiceMapPage = Loadable(
() => import(/* webpackChunkName: "ServiceMapPage" */ 'modules/Servicemap'),
);
export const LogsSaveViews = Loadable(
() => import(/* webpackChunkName: "LogsSaveViews" */ 'pages/LogsModulePage'), // TODO: Add a wrapper so that the same component can be used in traces
);
export const TracesExplorer = Loadable(
() =>
import(/* webpackChunkName: "Traces Explorer Page" */ 'pages/TracesExplorer'),
import(
/* webpackChunkName: "Traces Explorer Page" */ 'pages/TracesModulePage'
),
);
export const TracesSaveViews = Loadable(
() =>
import(/* webpackChunkName: "Traces Save Views" */ 'pages/TracesModulePage'),
);
export const TraceFilter = Loadable(
@@ -107,6 +118,10 @@ export const IngestionSettings = Loadable(
() => import(/* webpackChunkName: "Ingestion Settings" */ 'pages/Settings'),
);
export const APIKeys = Loadable(
() => import(/* webpackChunkName: "All Settings" */ 'pages/Settings'),
);
export const MySettings = Loadable(
() => import(/* webpackChunkName: "All MySettings" */ 'pages/MySettings'),
);
@@ -171,3 +186,14 @@ export const WorkspaceBlocked = Loadable(
() =>
import(/* webpackChunkName: "WorkspaceLocked" */ 'pages/WorkspaceLocked'),
);
export const ShortcutsPage = Loadable(
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'),
);
export const InstalledIntegrations = Loadable(
() =>
import(
/* webpackChunkName: "InstalledIntegrations" */ 'pages/IntegrationsModulePage'
),
);

View File

@@ -1,10 +1,10 @@
import ROUTES from 'constants/routes';
import WorkspaceBlocked from 'pages/WorkspaceLocked';
import { RouteProps } from 'react-router-dom';
import {
AllAlertChannels,
AllErrors,
APIKeys,
BillingPage,
CreateAlertChannelAlerts,
CreateNewAlerts,
@@ -14,6 +14,7 @@ import {
EditRulesPage,
ErrorDetails,
IngestionSettings,
InstalledIntegrations,
LicensePage,
ListAllALertsPage,
LiveLogs,
@@ -21,6 +22,7 @@ import {
Logs,
LogsExplorer,
LogsIndexToFields,
LogsSaveViews,
MySettings,
NewDashboardPage,
OldLogsExplorer,
@@ -32,6 +34,7 @@ import {
ServiceMetricsPage,
ServicesTablePage,
SettingsPage,
ShortcutsPage,
SignupPage,
SomethingWentWrong,
StatusPage,
@@ -39,8 +42,10 @@ import {
TraceDetail,
TraceFilter,
TracesExplorer,
TracesSaveViews,
UnAuthorized,
UsageExplorerPage,
WorkspaceBlocked,
} from './pageComponents';
const routes: AppRoutes[] = [
@@ -53,7 +58,7 @@ const routes: AppRoutes[] = [
},
{
path: ROUTES.GET_STARTED,
exact: true,
exact: false,
component: Onboarding,
isPrivate: true,
key: 'GET_STARTED',
@@ -86,6 +91,13 @@ const routes: AppRoutes[] = [
exact: true,
key: 'SERVICE_MAP',
},
{
path: ROUTES.LOGS_SAVE_VIEWS,
component: LogsSaveViews,
isPrivate: true,
exact: true,
key: 'LOGS_SAVE_VIEWS',
},
{
path: ROUTES.TRACE_DETAIL,
exact: true,
@@ -163,6 +175,13 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'TRACES_EXPLORER',
},
{
path: ROUTES.TRACES_SAVE_VIEWS,
exact: true,
component: TracesSaveViews,
isPrivate: true,
key: 'TRACES_SAVE_VIEWS',
},
{
path: ROUTES.CHANNELS_NEW,
exact: true,
@@ -191,13 +210,6 @@ const routes: AppRoutes[] = [
component: AllErrors,
key: 'ALL_ERROR',
},
{
path: ROUTES.LIST_LICENSES,
exact: true,
component: LicensePage,
isPrivate: true,
key: 'LIST_LICENSES',
},
{
path: ROUTES.ERROR_DETAIL,
exact: true,
@@ -226,6 +238,13 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'INGESTION_SETTINGS',
},
{
path: ROUTES.API_KEYS,
exact: true,
component: APIKeys,
isPrivate: true,
key: 'API_KEYS',
},
{
path: ROUTES.MY_SETTINGS,
exact: true,
@@ -261,6 +280,13 @@ const routes: AppRoutes[] = [
key: 'LIVE_LOGS',
isPrivate: true,
},
{
path: ROUTES.LOGS_PIPELINES,
exact: true,
component: PipelinePage,
key: 'LOGS_PIPELINES',
isPrivate: true,
},
{
path: ROUTES.LOGIN,
exact: true,
@@ -289,13 +315,6 @@ const routes: AppRoutes[] = [
key: 'SOMETHING_WENT_WRONG',
isPrivate: false,
},
{
path: ROUTES.LOGS_PIPELINES,
exact: true,
component: PipelinePage,
key: 'LOGS_PIPELINES',
isPrivate: true,
},
{
path: ROUTES.BILLING,
exact: true,
@@ -310,6 +329,20 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'WORKSPACE_LOCKED',
},
{
path: ROUTES.SHORTCUTS,
exact: true,
component: ShortcutsPage,
isPrivate: true,
key: 'SHORTCUTS',
},
{
path: ROUTES.INTEGRATIONS,
exact: true,
component: InstalledIntegrations,
isPrivate: true,
key: 'INTEGRATIONS',
},
];
export const SUPPORT_ROUTE: AppRoutes = {
@@ -320,6 +353,34 @@ export const SUPPORT_ROUTE: AppRoutes = {
isPrivate: true,
};
export const LIST_LICENSES: AppRoutes = {
path: ROUTES.LIST_LICENSES,
exact: true,
component: LicensePage,
isPrivate: true,
key: 'LIST_LICENSES',
};
export const oldRoutes = [
'/pipelines',
'/logs/old-logs-explorer',
'/logs-explorer',
'/logs-explorer/live',
'/logs-save-views',
'/traces-save-views',
'/settings/api-keys',
];
export const oldNewRoutesMapping: Record<string, string> = {
'/pipelines': '/logs/pipelines',
'/logs/old-logs-explorer': '/logs/old-logs-explorer',
'/logs-explorer': '/logs/logs-explorer',
'/logs-explorer/live': '/logs/logs-explorer/live',
'/logs-save-views': '/logs/saved-views',
'/traces-save-views': '/traces/saved-views',
'/settings/api-keys': '/settings/access-tokens',
};
export interface AppRoutes {
component: RouteProps['component'];
path: RouteProps['path'];

View File

@@ -0,0 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { APIKeyProps, CreateAPIKeyProps } from 'types/api/pat/types';
const createAPIKey = async (
props: CreateAPIKeyProps,
): Promise<SuccessResponse<APIKeyProps> | ErrorResponse> => {
try {
const response = await axios.post('/pats', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default createAPIKey;

View File

@@ -0,0 +1,24 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllAPIKeyProps } from 'types/api/pat/types';
const deleteAPIKey = async (
id: string,
): Promise<SuccessResponse<AllAPIKeyProps> | ErrorResponse> => {
try {
const response = await axios.delete(`/pats/${id}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteAPIKey;

View File

@@ -0,0 +1,24 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/get';
const get = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.get(`/pats/${props.id}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default get;

View File

@@ -0,0 +1,6 @@
import axios from 'api';
import { AxiosResponse } from 'axios';
import { AllAPIKeyProps } from 'types/api/pat/types';
export const getAllAPIKeys = (): Promise<AxiosResponse<AllAPIKeyProps>> =>
axios.get(`/pats`);

View File

@@ -0,0 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, UpdateAPIKeyProps } from 'types/api/pat/types';
const updateAPIKey = async (
props: UpdateAPIKeyProps,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.put(`/pats/${props.id}`, {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateAPIKey;

View File

@@ -30,7 +30,8 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
statusCode,
payload: null,
error: errorMessage,
message: null,
message: (response.data as any)?.status,
body: JSON.stringify((response.data as any).data),
};
}

View File

@@ -0,0 +1,7 @@
import axios from 'api';
import { AxiosResponse } from 'axios';
import { AllIntegrationsProps } from 'types/api/integrations/types';
export const getAllIntegrations = (): Promise<
AxiosResponse<AllIntegrationsProps>
> => axios.get(`/integrations`);

View File

@@ -0,0 +1,11 @@
import axios from 'api';
import { AxiosResponse } from 'axios';
import {
GetIntegrationPayloadProps,
GetIntegrationProps,
} from 'types/api/integrations/types';
export const getIntegration = (
props: GetIntegrationPayloadProps,
): Promise<AxiosResponse<GetIntegrationProps>> =>
axios.get(`/integrations/${props.integrationId}`);

View File

@@ -0,0 +1,11 @@
import axios from 'api';
import { AxiosResponse } from 'axios';
import {
GetIntegrationPayloadProps,
GetIntegrationStatusProps,
} from 'types/api/integrations/types';
export const getIntegrationStatus = (
props: GetIntegrationPayloadProps,
): Promise<AxiosResponse<GetIntegrationStatusProps>> =>
axios.get(`/integrations/${props.integrationId}/connection_status`);

View File

@@ -0,0 +1,31 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
InstalledIntegrationsSuccessResponse,
InstallIntegrationKeyProps,
} from 'types/api/integrations/types';
const installIntegration = async (
props: InstallIntegrationKeyProps,
): Promise<
SuccessResponse<InstalledIntegrationsSuccessResponse> | ErrorResponse
> => {
try {
const response = await axios.post('/integrations/install', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default installIntegration;

View File

@@ -0,0 +1,31 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
UninstallIntegrationProps,
UninstallIntegrationSuccessResponse,
} from 'types/api/integrations/types';
const unInstallIntegration = async (
props: UninstallIntegrationProps,
): Promise<
SuccessResponse<UninstallIntegrationSuccessResponse> | ErrorResponse
> => {
try {
const response = await axios.post('/integrations/uninstall', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default unInstallIntegration;

View File

@@ -8,7 +8,7 @@ const listAllDomain = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.get(`orgs/${props.orgId}/domains`);
const response = await axios.get(`/orgs/${props.orgId}/domains`);
return {
statusCode: 200,

View File

@@ -2,6 +2,7 @@ const apiV1 = '/api/v1/';
export const apiV2 = '/api/v2/';
export const apiV3 = '/api/v3/';
export const apiV4 = '/api/v4/';
export const apiAlertManager = '/api/alertmanager';
export default apiV1;

View File

@@ -13,6 +13,7 @@ export interface UsageResponsePayloadProps {
billTotal: number;
};
discount: number;
subscriptionStatus?: string;
}
const getUsage = async (

View File

@@ -0,0 +1,34 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createEmail';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.post('/channels', {
name: props.name,
email_configs: [
{
send_resolved: props.send_resolved,
to: props.to,
html: props.html,
headers: props.headers,
},
],
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default create;

View File

@@ -12,7 +12,7 @@ const create = async (
name: props.name,
msteams_configs: [
{
send_resolved: true,
send_resolved: props.send_resolved,
webhook_url: props.webhook_url,
title: props.title,
text: props.text,

View File

@@ -12,7 +12,7 @@ const create = async (
name: props.name,
pagerduty_configs: [
{
send_resolved: true,
send_resolved: props.send_resolved,
routing_key: props.routing_key,
client: props.client,
client_url: props.client_url,

View File

@@ -12,7 +12,7 @@ const create = async (
name: props.name,
slack_configs: [
{
send_resolved: true,
send_resolved: props.send_resolved,
api_url: props.api_url,
channel: props.channel,
title: props.title,

View File

@@ -30,7 +30,7 @@ const create = async (
name: props.name,
webhook_configs: [
{
send_resolved: true,
send_resolved: props.send_resolved,
url: props.api_url,
http_config: httpConfig,
},

Some files were not shown because too many files have changed in this diff Show More