Compare commits

...

187 Commits

Author SHA1 Message Date
srikanthccv
8d03e35d84 chore: more updates 2025-06-18 15:30:29 +05:30
srikanthccv
f267ed8ad1 chore: fix step interval auto 2025-06-17 14:06:36 +05:30
Srikanth Chekuri
1542b9d6e9 chore: disallow unknown fields and address gaps (#8237) 2025-06-16 23:11:28 +05:30
Nityananda Gohain
8455349459 fix: support orgId and postgres in agents (#7327)
* fix: initial commit for agents

* fix: remove frontend package manger commit

* fix: use sqlstore

* fix: opamp server changes

* fix: tests

* fix: tests

* fix: minor changes

* fix: migrations

* fix: use uuid7

* fix: use default orgID for single tenant

* fix: pipelines tests fixed

* fix: use correct agentId

* fix: use orgID in coordinator

* fix: fix tests

* fix: remove redundant hash check

* fix: migration

* fix: migration

* fix: address comments

* fix: rename migration file

* fix: remove unwanted orgid code

* fix: use orggetter

* fix: comment

* fix: schema cleanup

* fix: minor changes

* chore: addresses changes

* fix: add back agentID as it used ulid

* fix: keep only 50 agents for an orgId

* chore: explicitly specify text type

* chore: use valuer.uuid for orgid

* fix: linting complain

* chore: final fixes

* chore: minor changes

* fix: add not null

* fix: fe tests

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-06-16 20:07:16 +05:30
aniketio-ctrl
c488a24d09 fix(prom-aggr): fix prom aggregation queries using utf-8 charset (#8262)
* fix(prom-aggr): added fix for prom aggregation

* fix(prom-aggr): added fix for prom aggregation
2025-06-16 19:42:17 +05:30
Vikrant Gupta
9091cf61fd chore(github): fix codeowners file (#8261) 2025-06-16 11:37:07 +00:00
Ekansh Gupta
eeb2ab3212 feat: added support for trace_operators in query range v5 (#8165)
* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5\n

* feat: added support for trace_operators in query range v5\n

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

* feat: added support for trace_operators in query range v5

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-06-16 16:43:51 +05:30
Nageshbansal
3f128f0f1d fix: configs in multi-node docker-swarm cluster (#8239) 2025-06-16 11:42:02 +05:30
Vibhu Pandey
59ff7ed1e1 feat(licensing): add analytics (#8252) 2025-06-16 01:09:41 +05:30
Vibhu Pandey
d236b6ce1e feat(statsreporter): add stats for telemetry.*.last_observed.time (#8251)
## 📄 Summary

- add stats for telemetry.*.last_observed.time
2025-06-16 00:02:17 +05:30
Dimitris Mavrommatis
44b118a212 fix: span links tab to span details drawer (#7888)
* fix: span links tab to span details drawer

* Update LinkedSpans.styles.scss

---------

Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2025-06-15 16:22:36 +04:30
Dimitris Mavrommatis
3fc6f7ee63 feat(trace): add visuals for events on span waterfall and flamegraph (#7889)
* fix: add visuals on waterfall and flamegraph for span events

* fix: correct offsets for events

* fix: addressed comments

* chore: update the name of the import

* fix: interface change

* chore: formatting

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-06-15 10:39:39 +00:00
Sahil Khan
f1016baf03 chore: updated http-proxy-middleware to 3.0.5 from 3.0.3 (#8245) 2025-06-13 21:35:48 +05:30
Amlan Kumar Nandy
e5c0d9e44a chore: allow url as label value in alerts (#8244) 2025-06-13 17:47:01 +07:00
Yunus M
e51056c804 fix: use preference.name rather than preference.key (#8234) 2025-06-13 11:44:42 +05:30
Nityananda Gohain
7d8dad4550 Revert "fix: remove whitespace from sso cert (#8141)" (#8233)
This reverts commit 44ea237039.
2025-06-12 22:39:24 +05:30
Yunus M
c477e0ef16 feat: sidebar revamp (#8087)
* feat: sidebar revamp - initial commit

* feat: move billinga and other isolated routes to settings

* feat: handle channel related routes

* feat: update account settings page

* feat: show dropdown for secondary items

* feat: handle reordering of pinned nav items

* feat: improve font load performance

* feat: update font reference

* feat: update page content styles

* feat: handle external links in sidebar

* feat: handle secondary nav item clicks

* feat: handle pinned nav items reordering

* feat: handle sidenav pinned state using preference, handle light mode

* feat: show sidenav items conditionally

* feat: show version diff indicator only to self hosted users

* feat: show billing to admins only and integrations to cloud and enterprise users

* feat: update fallback link

* feat: handle settings menu items

* fix: settings page reload on nav chnage

* feat: intercom to pylon

* feat: show invite user to admin only

* feat: handle review comments

* chore: remove react query dev tools

* feat: minor ui updates

* feat: update changes based on preference store changes

* feat: handle sidenav shortcut state

* feat: handle scroll for more

* feat: maintain shortcuts order

* feat: manage license ui updates

* feat: manage settings options based on license and roles

* feat: update types

* chore: add logEvents

* feat: update types

* chore: fix type errors

* chore: remove unused variable

* feat: update my settings page test cases

---------

Co-authored-by: makeavish <makeavish786@gmail.com>
2025-06-12 19:55:32 +05:30
Shaheer Kochai
fff7f8fc76 feat: add span scope filter to trace details page (#8005)
* feat: add span scope filter to trace details page

* chore: add tests for the span scope selector flows when onchange and query are provided

* refactor: remove the unnecessary queryName prop and infer it from query

* fix: fix the failing span scope selector tests
2025-06-12 11:03:28 +00:00
Shaheer Kochai
8cfeef4521 fix: fix sentries (#8003)
* fix: handle potential undefined values in groupBy calculation in TracesExplorer

* fix: add optional chaining for aggregateAttribute key check in Query component

* fix: add optional chaining for filters in SpanScopeSelector to handle potential undefined values

* fix: fix the warning in logs chart by adding the missing date-time format option

* fix: improve trace graph allDataPoints null check

* chore: remove the keys.length from null check
2025-06-12 15:28:06 +04:30
Sahil Khan
d85a1a21ac feat: generalised preferences framework (#7903)
* feat: preferences framework generalised scaffolded

* feat: preferences framework integrated logs & saved logs views

* feat: fixed bugs in saved views for traces

* fix: removed unused file

* fix: wrapped metric explorer inside preferences context as it uses useoptions hook

* feat: added tests for preferences framework alongside some minor bugs improvements

* chore: added tests for traces loader and updater

* chore: fixed failing tests due to new context for preferences

* fix: minor saved views handling bug

* fix: minor pref fix

* fix: breaking tests

* fix: undo removal of pref context from live logs

* feat: split the logic of columns and formatting load in logs

* fix: breaking tests

* fix: pr comments

* fix: minor bug and pr comments regarding better resync

* fix: bugs in internal flows

* fix: url pref sync

* fix: minor bug fix

* fix: fixed failing tests

* fix: fixed failing tests
2025-06-11 10:06:01 +00:00
primus-bot[bot]
17f48d656d chore(release): bump to v0.87.0 (#8222)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-06-11 12:06:17 +05:30
Srikanth Chekuri
2d6774da68 fix: add missing denominator for reset case (#8180) 2025-06-11 11:32:50 +05:30
Vibhu Pandey
62a9d7e602 docs(contributing): add endpoint docs (#8215)
* docs(contributing): add endpoint docs

* docs(contributing): add endpoint docs

* docs(contributing): add endpoint docs

* docs(contributing): add endpoint docs

* docs(contributing): add endpoint docs
2025-06-10 17:25:07 +00:00
Vikrant Gupta
3a2c7a7a68 fix(dashboard): create dashboard panic for id (#8214) 2025-06-10 21:31:56 +05:30
Sahil Khan
33e70d1f37 fix: traces back button issue (#8041)
Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2025-06-10 13:25:59 +00:00
Srikanth Chekuri
85f04e4bae chore: add querier HTTP API endpoint and bucket cache implementation (#8178)
* chore: update types
1. add partial bool to indicate if the value covers the partial interval
2. add optional unit if present (ex: duration_nano, metrics with units)
3. use pointers wherever necessary
4. add format options for request and remove redundant name in query envelope

* chore: fix some gaps
1. make the range as [start, end)
2. provide the logs statement builder with the body column
3. skip the body filter on resource filter statement builder
4. remove unnecessary agg expr rewriter in metrics
5. add ability to skip full text in where clause visitor

* chore: add API endpoint for new query range

* chore: add bucket cache implementation

* chore: add fingerprinting impl and add bucket cache to querier

* chore: add provider factory
2025-06-10 12:56:28 +00:00
Shaheer Kochai
53f9e7d811 chore: trace funnels bugfixes/improvements (#8114)
* fix: refetch funnel steps overview on clicking refresh

* chore: temporarily hide latency pointer from funnel steps

* chore: remove the existing filters of a step on clicking replace button

* fix(useLocalStorage): stabilize initialValue handling to prevent unnecessary re-renders

* chore: remove p99_latency references from funnel metrics

* fix(useFunnelMetrics): ensure latency type defaults to P99 when undefined
2025-06-10 12:45:25 +00:00
Vibhu Pandey
ad46e22561 docs(contributing): add provider docs (#8193) 2025-06-10 05:39:14 +00:00
Yunus M
e79195ccf1 fix: handle alert list updates (#8109) 2025-06-10 10:56:45 +05:30
Amlan Kumar Nandy
f77bb888a8 chore: add analytics for metrics explorer (#8108) 2025-06-10 04:42:49 +00:00
Amlan Kumar Nandy
baa15baea9 chore: metrics explorer summary view fixes (#8126) 2025-06-10 04:18:12 +00:00
Vibhu Pandey
316e6821f1 feat(statsreporter): report stats on stop (#8187) 2025-06-10 07:55:32 +05:30
Vibhu Pandey
a1fa2769e4 feat(statsreporter): build a statsreporter service (#8177)
- build a new statsreporter service
2025-06-09 16:43:29 +05:30
Vikrant Gupta
decb660992 chore(sqlmigration): drop the rule history and data migrations table (#8181) 2025-06-09 15:46:22 +05:30
Amlan Kumar Nandy
0acbcf8322 chore: remove critters-webpack-plugin (#8172) 2025-06-07 16:21:06 +07:00
dependabot[bot]
11eabdc2ac chore(deps): bump webpack-dev-server from 4.15.2 to 5.2.1 in /frontend (#8160)
* chore(deps): bump webpack-dev-server from 4.15.2 to 5.2.1 in /frontend

Bumps [webpack-dev-server](https://github.com/webpack/webpack-dev-server) from 4.15.2 to 5.2.1.
- [Release notes](https://github.com/webpack/webpack-dev-server/releases)
- [Changelog](https://github.com/webpack/webpack-dev-server/blob/master/CHANGELOG.md)
- [Commits](https://github.com/webpack/webpack-dev-server/compare/v4.15.2...v5.2.1)

---
updated-dependencies:
- dependency-name: webpack-dev-server
  dependency-version: 5.2.1
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>

* feat: upgraded webpack-cli for compatibility fix for webpack-dev-server upgrade

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com>
Co-authored-by: SagarRajput-7 <sagar@signoz.io>
2025-06-07 07:24:18 +00:00
Vibhu Pandey
eb94554f5a feat(preference): add support for objects and arrays (#8142)
* refactor(preference): better readability

* refactor: better readability

* refactor: better readability

* fix: change frontend contract

* refactor: change frontend

* refactor: change frontend

* refactor: change frontend

* refactor: change frontend

* chore: fix tsc

* chore: fix tsc

* chore: fix tsc

* chore: fix tsc
2025-06-06 22:38:28 +05:30
Piyush Singariya
e8280dbea4 feat: Adding ContainerInsights in ECS Integrations (AWS) (#8122)
* fix: adding ECS ContainerInsights

* chore: dashboard added

* chore: format integrations.json

* feat(7294): added _dot metrics for aws ecs

---------

Co-authored-by: aniket <aniket@signoz.io>
2025-06-06 09:27:35 +00:00
Nityananda Gohain
44ea237039 fix: remove whitespace from sso cert (#8141)
* fix: remove whitespace from sso cert

* fix: use trimspace instead

* fix: use replaceall
2025-06-06 09:03:46 +00:00
Srikanth Chekuri
72b0214d1d chore: add range query impl for promql (#8130) 2025-06-05 19:18:44 +00:00
Srikanth Chekuri
386a215324 chore: metric statement builder (#8104) 2025-06-06 00:38:48 +05:30
Vibhu Pandey
ba0ba4bbc9 build(go): upgrade purego to v0.8.4 (#8159) 2025-06-05 12:31:49 +00:00
SagarRajput-7
d60c9ab36b feat: handle unkown metric in panel query (#8083)
* feat: handle unkown metric in panel query

* feat: added handling with type empty and key present or not

* feat: added test cases

* feat: added comment to better explain the logic

* feat: fixed operator list for unkown metric

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-06-05 11:23:25 +00:00
Piyush Singariya
90770b90bd feat: Introducing EKS integration (AWS) (#8021)
* feat: introducing EKS integration (AWS)

* fix: update metrics and enable logs collection

* feat: eks Overview dashboard ready

* feat: containerinsights incoming

* chore: dashboard name update

* feat(7294): added _dot metrics for aws ecs

* feat(7274): added dot metrics for overview metrics in eks

* Update pkg/query-service/app/cloudintegrations/services/definitions/aws/eks/assets/dashboards/overview_dot.json

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

* Update pkg/query-service/app/cloudintegrations/services/definitions/aws/eks/assets/dashboards/containerinsights_dot.json

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

---------

Co-authored-by: aniket <aniket@signoz.io>
Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
2025-06-05 15:35:39 +05:30
Vikrant Gupta
a19874c1dd fix(dashboard): dashboards/alerts info telemetry fix (#8161) 2025-06-05 13:47:25 +05:30
Piyush Singariya
65ff460d63 fix: Enhance filter support for Pipeline Simulation (#8134)
* feat: enhance filter support for JSON log body

* test: added tests for exists and not exists

* test: remove the value
2025-06-05 05:05:39 +00:00
primus-bot[bot]
b9d542a294 chore(release): bump to v0.86.2 (#8154) 2025-06-04 14:53:32 +00:00
aniketio-ctrl
e75e5bdbdb feat(7294): added flag columns in query (#8153)
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-06-04 20:10:52 +05:30
Srikanth Chekuri
0d03203977 chore: add formula evaluator (#8112) 2025-06-04 13:40:42 +00:00
primus-bot[bot]
28f6f42ac4 chore(release): bump to v0.86.1 (#8152)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-06-04 13:16:42 +05:30
Vibhu Pandey
92f8e4d5b9 fix(alertmanager): fix legacy alertmanager injection (#8151) 2025-06-04 07:29:40 +00:00
primus-bot[bot]
037eea5262 chore(release): bump to v0.86.0 (#8150)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-06-04 12:23:01 +05:30
SagarRajput-7
cd4df6280f feat: fixed multiple sentry error around dashboards (#8148) 2025-06-04 13:20:51 +07:00
Vikrant Gupta
ad2d4ed56c chore(dashboard): mismatch in dashboard lock rbac (#8137) 2025-06-03 20:06:38 +05:30
aniketio-ctrl
7955497a8d Feat/7294 (#8139)
* feat(7294): updated dashboard uri for cloud integrations
2025-06-03 19:43:42 +05:30
aniketio-ctrl
6ed30318bd feat(7294): updated dashboard uri for cloud integrations (#8135) 2025-06-03 17:48:31 +05:30
Vikrant Gupta
c32dd9f17e chore(feature): drop the feature status table (#8124)
* chore(feature): drop the feature set table

* chore(feature): cleanup the types and remove unused flags

* chore(feature): some more cleanup

* chore(feature): add codeowners file

* chore(feature): init to basic plan for failed validations

* chore(feature): cleanup

* chore(feature): pkg handler cleanup

* chore(feature): pkg handler cleanup

* chore(feature): address review comments

* chore(feature): address review comments

* chore(feature): address review comments

---------

Co-authored-by: Vibhu Pandey <vibhupandey28@gmail.com>
2025-06-03 17:05:42 +05:30
Shaheer Kochai
c58cf67eb0 refactor: update funnel description endpoint from POST /save to PUT /{funnel_id} (#8080)
* refactor: update funnel description endpoint from POST /save to PUT /{funnel_id}

* feat: add timestamp to funnel description payload and update mutation type

---------

Co-authored-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-03 10:59:54 +00:00
Nageshbansal
440c3d8386 fix: Broken Docker Downloads Badge (#7954)
* Updated the Badge for Docker Downloads to use signoz/signoz repo
2025-06-03 16:01:28 +05:30
Aditya Singh
d683b94344 [Fix #8102] Logs Issues with context view (#8111)
* fix: add active log id to charts query

* refactor: remove comment

* fix: remove active log id on filter change

* test: update test for log explorer

* test: update test for log explorer

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-06-03 09:57:39 +00:00
Srikanth Chekuri
6a629623bc chore: port functions, reduce to, series limit support (#8105) 2025-06-03 11:37:47 +05:30
Srikanth Chekuri
982688ccc9 chore: add field mapper and condition builder for ts v4 (#8100) 2025-06-02 19:43:48 +00:00
aniketio-ctrl
74bbb26033 fix(metrics): exclude NoRecordedValue data points from aggregation (#7674) 2025-06-03 00:40:05 +05:30
Vikrant Gupta
3bb9e05681 chore(dashboard): make dashboard schema production ready (#8092)
* chore(dashboard): intial commit

* chore(dashboard): bring all the code in module

* chore(dashboard): remove lock unlock from ee codebase

* chore(dashboard): go deps

* chore(dashboard): fix lint

* chore(dashboard): implement the store

* chore(dashboard): add migration

* chore(dashboard): fix lint

* chore(dashboard): api and frontend changes

* chore(dashboard): frontend changes for new dashboards

* chore(dashboard): fix test cases

* chore(dashboard): add lock unlock APIs

* chore(dashboard): add lock unlock APIs

* chore(dashboard): move integrations controller out from module

* chore(dashboard): move integrations controller out from module

* chore(dashboard): move integrations controller out from module

* chore(dashboard): rename migration file

* chore(dashboard): surface errors for lock/unlock dashboard

* chore(dashboard): some testing cleanups

* chore(dashboard): fix postgres migrations

---------

Co-authored-by: Vibhu Pandey <vibhupandey28@gmail.com>
2025-06-02 22:41:38 +05:30
aniketio-ctrl
61b2f8cb31 fix(8082): removed unnecessary log lines (#8123) 2025-06-02 13:09:43 +00:00
Vikrant Gupta
9d397d0867 fix(license): fixes for license service (#8121)
* fix(license): fixes for license service

* fix(license): fixes for license service

* fix(license): add code comments
2025-06-02 17:09:19 +05:30
aniketio-ctrl
5fb4206a99 Feat/7294: Updated Dashboards for integrations (#8113) 2025-06-02 15:17:53 +05:30
Aditya Singh
dd11ba9f48 fix: remove create dashboard call before navigate (#8029)
* fix: remove create dashboard call before navigate

* feat: minor refactor

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-06-02 08:02:09 +00:00
Shivanshu Raj Shrivastava
f9cb9f10be feat: adds a part of trace funnel feature (APIs, module, handler, store, migrations) implementation (#7763)
* feat: adds server and handler changes

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* feat: add tracefunnel module and handler

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* feat: add required types for tracefunnels

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* feat: db operations, module and handler implementation

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* feat: add db migrations

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: add utility functions

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* test: add utility function tests

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* test: add handler tests

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* test: add trace funnel module tests

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: refactor handler and utils

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: add funnel validation while processing funnel steps

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* test: add more tests to utils

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: fix package naming

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: fix naming convention

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: update normalize funnel steps

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: added some improvements

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: optimize funnel creation by combining insert and update operations

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: fix error handling

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* feat: trace funnel state management

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: updated unit tests and mocks

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: review comments

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: minor fixes

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: update funnel migration number

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: review comments and some changes

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* fix: update modules

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

---------

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-06-02 07:00:49 +00:00
Aditya Singh
b6180f6957 fix: fix html escape and json string parsing in qb (#8039)
Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-06-02 06:42:02 +00:00
aniketio-ctrl
51d3ca16f7 fix(metric-explorer): case sensitivity in contains (#8103) 2025-06-02 04:35:32 +00:00
Vibhu Pandey
91cbd17275 feat(sharder): add simple and noop sharder (#8107) 2025-05-31 16:04:13 +05:30
aniketio-ctrl
68effaf232 chore: support for non-normalized metrics behind a feature flag (#7919)
feat(7294-services): added dot metrics boolean for services tab
2025-05-30 10:27:29 +00:00
Aditya Singh
c08d1bccaf FIX: Pipelines edit filter return empty filter (#8055)
* fix: fix pipeline add and edit form flow

* test: update test cases

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2025-05-30 15:47:26 +05:30
Amlan Kumar Nandy
1d77780c70 feat: add views tab to metrics explorer (#8091) 2025-05-30 05:39:24 +00:00
primus-bot[bot]
80ded899c7 chore(release): bump to v0.85.3 (#8099)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-05-29 17:45:38 +05:30
Vikrant Gupta
4733af974e fix(license): return the active license even in case of suspended status (#8097)
* fix(license): return the active license even in case of suspended status

* fix(license): suspended check for side nav

* fix(license): suspended check for side nav

* fix(license): suspended check for side nav

* fix(license): suspended check for side nav
2025-05-29 12:05:27 +00:00
Amlan Kumar Nandy
1ab6c7177f chore: infra monitoring fixes (#8066) 2025-05-29 10:53:47 +07:00
primus-bot[bot]
c3123a4fa4 chore(release): bump to v0.85.2 (#8089)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-05-28 16:42:27 +00:00
SagarRajput-7
5a602bbeb7 fix: added safety checks for query data (#8088) 2025-05-28 21:41:24 +05:30
SagarRajput-7
f487f088bd Revert "feat: improved the alert rules list search functionality" (#8085)
* Revert "feat: improved the alert rules list search functionality (#8075)"

This reverts commit bec52c3d3e.

* feat: added search capability for labels

* feat: added test cases
2025-05-28 21:24:26 +05:30
Vikrant Gupta
1cb01e8dd2 fix(saml): do not fetch the claims and use orgID from domain (#8086)
* fix(saml): do not fetch the claims and use orgID from domain

* fix(saml): do not fetch the claims and use orgID from domain
2025-05-28 18:21:35 +05:30
primus-bot[bot]
595a500be4 chore(release): bump to v0.85.0 (#8078)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-05-28 12:17:24 +05:30
SagarRajput-7
bec52c3d3e feat: improved the alert rules list search functionality (#8075)
* feat: improved the alert rules list search functionality

* feat: improvements and tooltip added for more info

* feat: style improvement

* feat: style improvement

* feat: style improvement
2025-05-28 05:23:42 +00:00
Srikanth Chekuri
0a6a7ba729 chore: show migration info to all cloud regions (#8077) 2025-05-28 04:48:42 +00:00
Vishal Sharma
3d758d4358 feat: init pylon and deprecate intercom (#8059) 2025-05-28 07:11:11 +05:30
Vishal Sharma
9c8435119d feat: add appcues and remove customerio (#8045) 2025-05-27 19:49:55 +00:00
Ekansh Gupta
d732f8ba42 fix: updated the service name in exceptions filter (#8069)
* fix: updated the service name in exceptions filter

* fix: updated the service name in exceptions filter

* fix: updated the service name in exceptions filter
2025-05-27 17:42:08 +00:00
Vibhu Pandey
83b8eaf623 feat(pylon|appcues): add pylon and appcues (#8073) 2025-05-27 17:32:45 +00:00
Vikrant Gupta
ae7364f098 fix(login): fixed the interceptor to handle multiple failures (#8071)
* fix(login): fixed the interceptor to handle multiple failures

* fix(login): fixed the interceptor to handle multiple failures
2025-05-27 15:47:33 +00:00
Srikanth Chekuri
0ec1be1ddf chore: add querier base implementation (#8028) 2025-05-27 20:54:48 +05:30
Yunus M
93de4681a9 feat: oss - sso and api keys (#8068)
* feat: oss - sso and api keys

* feat: show to community and community enterprise

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-05-27 20:29:09 +05:30
Aditya Singh
69e94cbd38 Custom Quick FIlters: Integration across other tabs (#8001)
* chore: added filters init

* chore: handle save and discard

* chore: search and api intergrations

* feat: search on filters

* feat: style fix

* feat: style fix

* feat: signal to data source config

* feat: search styles

* feat: update drawer slide style

* chore: quick filters - added filters init (#7867)

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

* feat: no results state

* fix: minor fix

* feat: qf setting ui

* feat: add skeleton to dynamic qf

* fix: minor fix

* feat: announcement tooltip added

* feat: announcement tooltip added refactor

* feat: announcement tooltip styles

* feat: announcement tooltip integration

* fix: number vals in filter list

* feat: announcement tooltip show logic added

* feat: light mode styles

* feat: remove unwanted styles

* feat: remove filter disable when one filter added

* style: minor style

* fix: minor refactor

* test: added test cases

* feat: integrate custom quick filters in logs

* Custom quick filter: Other Filters | Search integration (#7939)

* chore: added filters init

* chore: handle save and discard

* chore: search and api intergrations

* feat: search on filters

* feat: style fix

* feat: style fix

* feat: signal to data source config

* feat: search styles

* feat: update drawer slide style

* feat: no results state

* fix: minor fix

* Custom Quick FIlters: UI fixes and Announcement Tooltip (#7950)

* feat: qf setting ui

* feat: add skeleton to dynamic qf

* fix: minor fix

* feat: announcement tooltip added

* feat: announcement tooltip added refactor

* feat: announcement tooltip styles

* feat: announcement tooltip integration

* fix: number vals in filter list

* feat: announcement tooltip show logic added

* feat: light mode styles

* feat: remove unwanted styles

* feat: remove filter disable when one filter added

* style: minor style

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

* feat: code refactor

* feat: debounce search

* feat: refactor

* feat: exceptions integrate

* feat: api monitoring qf integrate

* feat: handle query name show

* Custom quick filters: Tests and pr review comments (#7967)

* chore: added filters init

* chore: handle save and discard

* chore: search and api intergrations

* feat: search on filters

* feat: style fix

* feat: style fix

* feat: signal to data source config

* feat: search styles

* feat: update drawer slide style

* feat: no results state

* fix: minor fix

* feat: qf setting ui

* feat: add skeleton to dynamic qf

* fix: minor fix

* feat: announcement tooltip added

* feat: announcement tooltip added refactor

* feat: announcement tooltip styles

* feat: announcement tooltip integration

* fix: number vals in filter list

* feat: announcement tooltip show logic added

* feat: light mode styles

* feat: remove unwanted styles

* feat: remove filter disable when one filter added

* style: minor style

* fix: minor refactor

* test: added test cases

* feat: integrate custom quick filters in logs

* feat: code refactor

* feat: debounce search

* feat: refactor

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

* feat: integrate traces data source to settings

* feat: duration nano traces filter in qf

* fix: allow only admins to change qf  settings

* feat: has error handling

* feat: fix existing tests

* feat: update test cases

* feat: update test cases

* feat: minor refactor

* feat: minor refactor

* feat: log quick filter settings changes

* feat: log quick filter settings changes

* feat: log quick filter settings changes

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-05-27 20:04:57 +05:30
Srikanth Chekuri
62810428d8 chore: add logs statement builder base (#8024) 2025-05-27 13:51:38 +00:00
Yunus M
b921a2280b Update pull_request_template.md (#8065) 2025-05-27 16:32:25 +05:30
SagarRajput-7
8990fb7a73 feat: allow custom color pallete in panel for legends (#8063) 2025-05-27 16:19:35 +07:00
SagarRajput-7
aaeffae1bd feat: added enhancements to legends in panel (#8035)
* feat: added enhancements to legends in panel

* feat: added option for right side legends

* feat: created the legend marker as checkboxes

* feat: removed histogram and pie from enhanced legends

* feat: row num adjustment

* feat: added graph visibilty in panel edit mode also

* feat: allignment and fixes

* feat: added test cases
2025-05-27 13:50:40 +05:30
Vikrant Gupta
d1d7da6c9b chore(preference): add sidenav pinned preference (#8062) 2025-05-27 13:29:31 +05:30
Piyush Singariya
28a01bf042 feat: Introducing DynamoDB integration (#8012)
* feat: introducing DynamoDB integration

* fix: allow non expireable API key

* fix: clean up pat to API key middleware

* fix: address comments

* fix: update response of create api key

* feat: adding dashboard

* fix: adding dynamodb icon

* Update pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/assets/dashboards/overview.json

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

---------

Co-authored-by: nityanandagohain <nityanandagohain@gmail.com>
Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
2025-05-27 07:18:20 +00:00
SagarRajput-7
fb1f320346 feat: added custom stepIntervals to bar chart for better visibilty (#8023)
* feat: added custom stepIntervals to bar chart for better visibilty

* feat: added test cases

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-05-27 11:48:38 +05:30
Shaheer Kochai
4d484b225f feat(error): build generic error component (#8038)
* feat: build generic error component

* chore: test error component in DataSourceInfo component

* feat: get version from API + minor improvements

* feat: enhance error notifications with ErrorV2 support and integrate ErrorModal

* feat: implement ErrorModalContext + directly display error modal in create channel if request fails

* chore: write tests for the generic error modal

* chore: add optional chaining + __blank to _blank

* test: add trigger component tests for ErrorModal component

* test: fix the failing tests by wrapping in ErrorModalProvider

* chore: address review comments

* test: fix the failing tests

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-05-26 22:20:24 +05:30
Amlan Kumar Nandy
3a396602a8 chore: persist the state selection in the URL for all entities and filters in Infra Monitoring (#7991) 2025-05-26 06:58:55 +00:00
Amlan Kumar Nandy
650cf81329 chore: metrics explorer minor fixes (#8042) 2025-05-26 06:43:06 +00:00
Amlan Kumar Nandy
cdbf23d053 chore: infra monitoring improvements (#8002) 2025-05-26 06:03:01 +00:00
Amlan Kumar Nandy
3ca3db2567 chore: add to alerts/dashboard improvements for one chart per query mode in metrics explorer (#8014) 2025-05-26 05:45:21 +00:00
Srikanth Chekuri
0925ae73a9 chore: add traces statement builder base (#8020) 2025-05-25 22:14:47 +05:30
Vikrant Gupta
cffa511cf3 feat(user): support sso and api key (#8030)
* feat(user): support sso and api key

* feat(user): remove ee references from pkg

* feat(user): remove ee references from pkg

* feat(user): related client changes

* feat(user): remove the sso available check

* feat(user): fix go tests

* feat(user): move the middleware from ee to pkg

* feat(user): some more error code cleanup

* feat(user): some more error code cleanup

* feat(user): skip flaky UI tests

* feat(user): some more error code cleanup
2025-05-25 14:16:42 +05:30
Vibhu Pandey
2ba693f040 chore(linter): add more linters and deprecate zap (#8034)
* chore(linter): add more linters and deprecate zap

* chore(linter): add more linters and deprecate zap

* chore(linter): add more linters and deprecate zap

* chore(linter): add more linters and deprecate zap
2025-05-25 11:40:39 +05:30
Vibhu Pandey
403630ad31 feat(signoz): compile time check for dependency injection (#8033) 2025-05-24 23:53:54 +05:30
Vibhu Pandey
93ca3fee33 fix(quickfilter): fix injection of quickfilter (#8031)
## 📄 Summary

fix injection of quickfilter
2025-05-24 22:00:12 +05:30
Vikrant Gupta
b1c78c2f12 feat(license): build license service (#7969)
* feat(license): base setup for license service

* feat(license): delete old manager and import to new

* feat(license): deal with features

* feat(license): complete the license service in ee

* feat(license): add sqlmigration for licenses

* feat(license): remove feature flags

* feat(license): refactor into provider pattern

* feat(license): remove the ff lookup interface

* feat(license): add logging to the validator functions

* feat(license): implement features for OSS build

* feat(license): fix the OSS build

* feat(license): lets blast frontend

* feat(license): fix the EE OSS build without license

* feat(license): remove the hardcoded testing configs

* feat(license): upgrade migration to 34

* feat(license): better naming and structure

* feat(license): better naming and structure

* feat(license): better naming and structure

* feat(license): better naming and structure

* feat(license): better naming and structure

* feat(license): better naming and structure

* feat(license): better naming and structure

* feat(license): integration tests

* feat(license): integration tests

* feat(license): refactor frontend

* feat(license): make frontend api structure changes

* feat(license): fix integration tests

* feat(license): revert hardcoded configs

* feat(license): fix integration tests

* feat(license): address review comments

* feat(license): address review comments

* feat(license): address review comments

* feat(license): address review comments

* feat(license): update migration

* feat(license): update migration

* feat(license): update migration

* feat(license): fixed logging

* feat(license): use the unmarshaller for postable subscription

* feat(license): correct the error message

* feat(license): fix license test

* feat(license): fix lint issues

* feat(user): do not kill the service if upstream is down
2025-05-24 19:14:29 +05:30
Piyush Singariya
7feb94e5eb feat: Introducing SNS integration (AWS) (#7996)
* feat: introducing SNS integrations

* fix: panel title updated
2025-05-23 11:38:27 +00:00
Vibhu Pandey
47dc2b98f1 chore(go-lint): enable go-lint (#8022) 2025-05-23 15:52:58 +05:30
Srikanth Chekuri
f4dc2a8fb8 chore: remove telemetrytests package and add generic type for aggregation (#8019) 2025-05-23 09:29:15 +00:00
Nityananda Gohain
77d1492aac fix: allow non expireable API key (#8013)
* fix: allow non expireable API key

* fix: clean up pat to API key middleware

* fix: address comments

* fix: update response of create api key

* fix: gettable struct

* fix(api-key): frontend changes for api key refactor

---------

Co-authored-by: vikrantgupta25 <vikrant@signoz.io>
2025-05-23 07:47:20 +00:00
Piyush Singariya
6090a6be6e feat: ElastiCache AWS Integration (#7923)
* feat: adding elastiCache

* chore: removing AWS unnecessary prefix

* chore: update in units in 2 panels
2025-05-23 06:07:29 +00:00
Srikanth Chekuri
eabddf87d2 fix: time shift not working with fill gaps (#7999) 2025-05-23 09:33:47 +05:30
Vibhu Pandey
9e13245d1b feat(emailing): add smtp and emailing (#7993)
* feat(emailing): initial commit for emailing

* feat(emailing): implement emailing

* test(integration): fix tests

* fix(emailing): fix directory path

* fix(emailing): fix email template path

* fix(emailing): copy from go-gomail

* fix(emailing): copy from go-gomail

* fix(emailing): fix smtp bugs

* test(integration): fix tests

* feat(emailing): let missing templates passthrough

* feat(emailing): let missing templates passthrough

* feat(smtp): refactor and beautify

* test(integration): fix tests

* docs(smtp): fix incorrect grammer

* feat(smtp): add to header

* feat(smtp): remove comments

* chore(smtp): address comments

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-05-22 18:31:52 +00:00
Nityananda Gohain
a1c7a948fa fix: add error message in login (#8010)
* fix: add error message in login

* fix: use newf
2025-05-22 16:23:54 +00:00
Vikrant Gupta
0bfe53a93c chore(cache): use sensible defaults for caching (#8011)
* chore(cache): use sensible defaults for caching

* chore(cache): initialize the cache for http handlers

* chore(cache): revert cache DI
2025-05-22 11:46:09 +00:00
Shaheer Kochai
16140991be refactor: update logs explorer pagination logic (#7010)
* refactor: pagination changes in query range custom hook

* refactor: handle pagination changes in k8s logs and host logs

* refactor: logs panel component pagination changes

* fix: handle resetting offset on changing page size

* fix: optimize context log rendering and prevent duplicate logs

* chore: revert pagination handling changes for logs context view

* chore: remove unused function and prop

* refactor: handle the updated pagination logic in k8s entity events

* refactor: refactor queryPayload generation in logs pagination custom hook

* fix: remove handling for last log timestamp being sent with query_range

* chore: remove the unnecessary last log related changes from LogsExplorerViews

* Revert "fix: optimize context log rendering and prevent duplicate logs"

This reverts commit ad9ef188651e5106cbc84fe40fc36061c2b9fd40.

* fix: prevent recalculating the start/end timestamps while fetching next/prev pages

* chore: logs explorer pagination tests

* fix: rewrite test and mock scroll

* chore: use real timers to detect the start/end timestamps mismatch + enhance tests

* refactor: extract filters and order by into a separate function

* chore: add tests for logs context pagination

* chore: write tests for host logs pagination

* chore: overall improvements to host logs tests

* chore: k8s entity logs pagination tests

* chore: reset capturedQueryRangePayloads in beforeEach

* chore: dashboard logs panel component pagination tests

* fix: fix the breaking logs pagination test by change /v3 to /v4

* chore: remove the unused prop from HostMetricsLogs
2025-05-22 09:42:51 +00:00
Nityananda Gohain
aadf2a3ac7 fix: logs window based pagination to pageSize offset instead of using… (#6830)
* fix: logs window based pagination to pageSize offset instead of using id filter

* fix: add support for asc

* fix: remove unwanted code

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-05-22 15:05:41 +05:30
Nityananda Gohain
824302be38 chore: update API key (#7959)
* chore: update API key

* fix: delete api key on user delete

* fix: migration

* fix: api

* fix: address comments

* fix: address comments

* fix: update structs

* fix: minor changes

* fix: error message

* fix: address comments

* fix: integration tests

* fix: minor issues

* fix: integration tests
2025-05-21 17:21:19 +05:30
primus-bot[bot]
8d4c4dc5f2 chore(release): bump to v0.84.1 (#7998)
#### Summary
 - Release SigNoz v0.84.1
2025-05-21 13:49:38 +05:30
Ekansh Gupta
91fae8c0f3 fix: changed the get request access from admin to view (#7997) 2025-05-21 08:01:34 +00:00
primus-bot[bot]
4bbe8c0ee7 chore(release): bump to v0.84.0 (#7995)
#### Summary
 - Release SigNoz v0.84.0
2025-05-21 12:18:14 +05:30
Nityananda Gohain
0f7d226b9b Fix: exists clause in logs QB (#7987)
* fix: exists in logs QB

* fix: exists in logs json qb
2025-05-21 10:22:42 +05:30
Shaheer Kochai
e03342e001 feat: add support for request integrations in aws integrations page (#7968)
* feat: add support for request integrations in aws integrations page

* chore: write test for request aws integrations
2025-05-20 21:39:40 +05:30
Shaheer Kochai
57f96574ff fix: trace funnel bugfixes and improvements (#7922)
* fix: display the inter-step latency type in step metrics table

* chore: send latency type with n+1th step + make latency type optional

* fix: fetch and format get funnel steps overview metrics

* chore: remove dev env check

* fix: overall fixes

* fix: don't cache validate query + trigger validate on changing error and where clause as well

* fix: display the latency type in step overview metrics table + p99_latency to latency

* chore: revert dev env check removal (remove after BE changes are merged)

* fix: adjust create API response

* chore: useLocalStorage custom hook

* feat: improve the run funnel flow

- for the initial fetch of funnel results, require the user to run the funnel
- subsequently change the run funnel button to a refresh button
- display loading state while any of the funnel results APIs are being fetched

* fix: fix the issue of add step details breaking

* fix: refetch funnel details on rename success

* fix: redirect 'learn more' to trace funnels docs

* fix: handle potential undefined step in latency type calculation

* fix: properly handle incomplete steps state

* fix: fix the edge case of stale validation state on transitioning from invalid steps to valid steps

* fix: remove the side effect from render and move to useEffect
2025-05-20 19:44:05 +04:30
Yunus M
354e4b4b8f feat: show pricing update banner in home page (#7990)
* feat: show pricing update banner in  home page
2025-05-20 19:40:01 +05:30
Shaheer Kochai
d7102f69a9 feat: add support for S3 region buckets syncing (#7874)
* feat: add support for S3 region buckets syncing

* fix: hide the dropdown icon and not found dropdown for s3 buckets selector

* fix: display s3 buckets selector only for s3 sync service

* chore: tests for configure service s3 sync

---------

Co-authored-by: Piyush Singariya <piyushsingariya@gmail.com>
2025-05-20 13:32:32 +00:00
Aditya Singh
040c45b144 Custom Quick Filters: Logs (#7986)
* chore: quick filters - added filters init (#7867)

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

* Custom quick filter: Other Filters | Search integration (#7939)

* chore: added filters init

* chore: handle save and discard

* chore: search and api intergrations

* feat: search on filters

* feat: style fix

* feat: style fix

* feat: signal to data source config

* feat: search styles

* feat: update drawer slide style

* feat: no results state

* fix: minor fix

* Custom Quick FIlters: UI fixes and Announcement Tooltip (#7950)

* feat: qf setting ui

* feat: add skeleton to dynamic qf

* fix: minor fix

* feat: announcement tooltip added

* feat: announcement tooltip added refactor

* feat: announcement tooltip styles

* feat: announcement tooltip integration

* fix: number vals in filter list

* feat: announcement tooltip show logic added

* feat: light mode styles

* feat: remove unwanted styles

* feat: remove filter disable when one filter added

* style: minor style

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

* Custom quick filters: Tests and pr review comments (#7967)

* chore: added filters init

* chore: handle save and discard

* chore: search and api intergrations

* feat: search on filters

* feat: style fix

* feat: style fix

* feat: signal to data source config

* feat: search styles

* feat: update drawer slide style

* feat: no results state

* fix: minor fix

* feat: qf setting ui

* feat: add skeleton to dynamic qf

* fix: minor fix

* feat: announcement tooltip added

* feat: announcement tooltip added refactor

* feat: announcement tooltip styles

* feat: announcement tooltip integration

* fix: number vals in filter list

* feat: announcement tooltip show logic added

* feat: light mode styles

* feat: remove unwanted styles

* feat: remove filter disable when one filter added

* style: minor style

* fix: minor refactor

* test: added test cases

* feat: integrate custom quick filters in logs

* feat: code refactor

* feat: debounce search

* feat: refactor

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-05-20 12:39:49 +00:00
Sahil Khan
207d7602ab chore: changed name of api monitoring from third party apis to external apis (#7989) 2025-05-20 17:24:09 +05:30
Srikanth Chekuri
018346ca18 chore: add aggregation expr rewriter and exhaustive tests for logs filter (#7972) 2025-05-20 16:54:34 +05:30
Ekansh Gupta
7290ab3602 feat: added entry point operations api for the service overview page (#7957)
* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2025-05-20 05:58:40 +00:00
Shaheer Kochai
88239cec4d fix: AWS integration bugfixes (#7886)
* fix(AccountSettingsModal): add region deselect functionality to region selector

* fix(AWS integration): redirect help button to aws integration documentation

* style(Header): update button color on hover for improved visibility
2025-05-20 03:48:11 +00:00
SagarRajput-7
10ba0e6b4f feat: added error preview and warning text with info on cyclic dependency detected (#7893)
* feat: added error preview and warning text with info on cyclic dependency detected

* feat: removed console.log

* feat: restricted save when cycle found

* feat: added test cases for variableitem flow and update test cases

* feat: updated test cases

* feat: corrected the recent resolved title text
2025-05-19 17:28:26 +07:00
Shaheer Kochai
88e1e42bf0 chore: add analytics events for trace funnels (#7638) 2025-05-19 09:22:35 +00:00
Piyush Singariya
a0d896557e feat: introducing ECS + SQS integration (#7840)
* feat: introducing S3 sync as AWS integration

* chore: trying restructuring

* chore: in progress

* chore: restructuring looks ok

* chore: minor fix in tests

* feat: integration with Agent check-in complete

* chore: minor change in validation

* fix: removing validation and altering overview

* fix: aftermath of merge conflicts

* test: updating agent version

* test: updating agent version

* test: updating agent version 3

* test: updating agent version 11

* test: updating agent version 14

* chore: replace with newer error utility

* feat: introducing ECS integration (AWS Integrations)

* chore: adding metrics to ecs integration

* feat: adding base SQS files

* feat: adding metrics for SQS

* feat: adding ECS dashboard

* feat: adding dashboards for SQS

* fix: adding SentMessageSize metrics in SQS

* fix: for calculating log connection status for S3 Sync

* fix: adding check for svc type, fixing cw logs integration.json S3 Sync

* fix: in compiledCollectionStrat for servicetype s3sync

* test: testing agent version

* fix: change in data collected for S3 Sync logs

* test: testing agent 19

* chore: replace fmt.Errorf

* fix: tests and adding validation in S3 buckets

* fix: test TestAvailableServices

* chore: replacing fmt.Errorf

* chore: updating the agent version to latest

* chore: reverting some changes

* fix: remove services from Variables

* chore: change overview.png
2025-05-19 14:17:52 +05:30
Amlan Kumar Nandy
2b28c5f2e2 chore: persist the filters and time selection, modal open state in summary view (#7942) 2025-05-19 11:54:05 +07:00
Vibhu Pandey
6dbcc5fb9d fix(analytics): fix heartbeat event (#7975) 2025-05-19 08:04:33 +05:30
Vikrant Gupta
175e9a4c5e fix(apm): update the apdex to latest response structure (#7966)
* fix(apm): update the apdex to latest response structure

* fix(apm): update the apdex to latest response structure
2025-05-17 16:23:11 +05:30
SagarRajput-7
33506cafce fix: cover the title as reactNode case for useGetResolvedText (#7965)
* fix: cover the title as reactNode case for useResolvedText

* fix: added more test cases
2025-05-16 22:15:19 +00:00
SagarRajput-7
e34e61a20d feat: removed allow clear icon from when ALL option is selected (#7894) 2025-05-17 00:48:41 +05:30
Vibhu Pandey
da084b4686 chore(savedview|apdex|dashboard): create modules and handlers (#7960)
* chore(savedview): refactor into module and handler

* chore(rule): move telemetry inside telemetry

* chore(apdex): refactor apdex and delete dao

* chore(dashboard): create a dashboard module

* chore(ee): get rid of the init nonesense

* chore(dashboard): fix err and apierror confusion

* chore: address comments
2025-05-17 00:15:00 +05:30
Srikanth Chekuri
6821efeb99 chore: move visitor impl out of generated files (#7956) 2025-05-16 14:47:23 +00:00
Srikanth Chekuri
c5d5c84a0e chore: add fieldmapper implementation (#7955) 2025-05-16 20:09:57 +05:30
SagarRajput-7
9c298e83a5 feat: added user role restriction on crud for planned downtime feat (#7896) 2025-05-16 16:59:52 +05:30
SagarRajput-7
9383b6576d feat: added variable description icon and details on tooltip (#7897) 2025-05-16 16:46:19 +05:30
SagarRajput-7
f10f7a806f feat: suggest and allow variables in panel title (#7898)
* feat: suggest and allow variables in panel title

* feat: refined the logic for suggestion and addition with $

* feat: added logic for panel title resolved string and added test cases

* feat: added support to full view
2025-05-16 16:35:11 +05:30
Srikanth Chekuri
03600f4d6f chore: add query builder types (#7940) 2025-05-16 00:00:01 +05:30
Srikanth Chekuri
9fbf111976 chore: less strict context for fetching field values (#7807) 2025-05-15 19:59:40 +05:30
Nityananda Gohain
b8dff86a56 fix: refresh token to access token (#7949)
* fix: fix refresh token to access token

* fix: update the if condition
2025-05-15 12:52:14 +05:30
Vikrant Gupta
f525647b40 chore(auth): refactor the client handlers in preparation for multi tenant login (#7902)
* chore: update auth

* chore: password changes

* chore: make changes in oss code

* chore: login

* chore: get to a running state

* fix: migration inital commit

* fix: signoz cloud intgtn tests

* fix: minor fixes

* chore: sso code fixed with org domain

* fix: tests

* fix: ee auth api's

* fix: changes in name

* fix: return user in login api

* fix: address comments

* fix: validate password

* fix: handle get domain by email properly

* fix: move authomain to usermodule

* fix: use displayname instead of hname

* fix: rename back endpoints

* fix: update telemetry

* fix: correct errors

* fix: test and fix the invite endpoints

* fix: delete all things related to user in store

* fix: address issues

* fix: ee delete invite

* fix: rename func

* fix: update user and update role

* fix: update role

* chore(api): update the api folder structure according to rest principles

* fix: login and invite changes

* chore(api): update the api folder structure according to rest principles

* chore(login): update the frontend according to the new APIs

* fix: return org name in users response

* chore(login): update the frontend according to the new APIs

* fix: update user role

* fix: nil check

* chore(login): update the frontend according to the new API

* fix: getinvite and update role

* fix: sso

* fix: getinvite use sso ctx

* fix: use correct sourceurl

* fix: getsourceurl from req payload

* chore(login): update the frontend according to the new API

* fix: update created_at

* fix: fix reset password

* chore(login): fixed reset password and bulk invites

* fix: sso signup and token password change

* fix: don't delete last admin

* fix: reset password and migration

* fix: migration

* chore(login): fix the unwanted throw statement and tsconfig

* fix: reset password for sso users

* fix: clean up invite

* chore(login): delete last admin user and reset password

* fix: migration

* fix: update claims and store code

* fix: use correct error

* fix: proper nil checks

* fix: make migration multitenant

* fix: address comments

* fix: minor fixes

* fix: test

* fix: rename reset password

* fix: set self restration only when sso endabled

* chore(auth): update the invite user API

* fix: integration tests

* fix: integration tests

* fix: integration tests

* fix: integration tests

* fix: integration tests

* fix: integration tests

* fix: integration tests

* chore(auth): update integration test

* fix: telemetry

---------

Co-authored-by: nityanandagohain <nityanandagohain@gmail.com>
2025-05-14 18:23:41 +00:00
Nityananda Gohain
0a2b7ca1d8 chore(auth): refactor the auth modules and handler in preparation for multi tenant login (#7778)
* chore: update auth

* chore: password changes

* chore: make changes in oss code

* chore: login

* chore: get to a running state

* fix: migration inital commit

* fix: signoz cloud intgtn tests

* fix: minor fixes

* chore: sso code fixed with org domain

* fix: tests

* fix: ee auth api's

* fix: changes in name

* fix: return user in login api

* fix: address comments

* fix: validate password

* fix: handle get domain by email properly

* fix: move authomain to usermodule

* fix: use displayname instead of hname

* fix: rename back endpoints

* fix: update telemetry

* fix: correct errors

* fix: test and fix the invite endpoints

* fix: delete all things related to user in store

* fix: address issues

* fix: ee delete invite

* fix: rename func

* fix: update user and update role

* fix: update role

* fix: login and invite changes

* fix: return org name in users response

* fix: update user role

* fix: nil check

* fix: getinvite and update role

* fix: sso

* fix: getinvite use sso ctx

* fix: use correct sourceurl

* fix: getsourceurl from req payload

* fix: update created_at

* fix: fix reset password

* fix: sso signup and token password change

* fix: don't delete last admin

* fix: reset password and migration

* fix: migration

* fix: reset password for sso users

* fix: clean up invite

* fix: migration

* fix: update claims and store code

* fix: use correct error

* fix: proper nil checks

* fix: make migration multitenant

* fix: address comments

* fix: minor fixes

* fix: test

* fix: rename reset password

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-05-14 23:12:55 +05:30
Sahil Khan
16938c6cc0 feat: added 10 minute time range for traces co-relation in api monitoring (#7815)
* feat: added 10 minute time range for traces co-relation in api monitoring

* feat: url sharing for domain details drawer w/o filters for endpoint stats

* feat: added endpoint details persistence

* feat: external services to api monitoring co relation - 0

* feat: added api co relations to other panels on external services

* fix: cosmetic fix

* feat: addded tests for url sharing utils

* feat: minor cosmetic changes

* fix: changed traces co relation window from 10 minutes to 5 minutes

* fix: minor copy changes

* fix: pr comments

* fix: minor bug fix

* fix: pr comments improvements
2025-05-14 11:42:45 +00:00
Piyush Singariya
81b8f93177 chore: remove unnecessary dependency (#7938) 2025-05-14 15:40:25 +05:30
Vibhu Pandey
96cfb607d1 chore(go): add go-deps workflow (#7936)
* feat(go): add go-deps workflow

* chore(go): fix dependencies
2025-05-14 09:00:53 +00:00
primus-bot[bot]
f526e887cc chore(release): bump to v0.83.0 (#7931)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-05-14 12:18:04 +05:30
Piyush Singariya
03ab6e704b feat: S3 Sync (AWS Integrations) (#7718) 2025-05-14 05:12:41 +05:30
Vishal Sharma
9c0134da54 feat: user pilot reload (#7905)
* feat: user pilot reload

* feat: added test cases

---------

Co-authored-by: SagarRajput-7 <sagar@signoz.io>
2025-05-13 23:28:01 +05:30
Ekansh Gupta
175b059268 fix: quick filter dependency injection issue in CE (#7918)
* fix: quick filter dependency injection issue in CE

* fix: quick filter dependency injection issue in CE
2025-05-13 19:45:48 +05:30
Nageshbansal
dfca5b13c0 fix: OSS telemetry for the number of services (#7908)
Fixes the OSS telemetry for sending the `Number of services` events.
2025-05-13 16:18:59 +05:30
Aditya Singh
ad392e81ff Fix: Update query_range api from v3 to v4 (Logs and Traces) (#7906)
* fix: change query range api from v3 to v4

* test: update test cases

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-05-13 09:54:37 +00:00
Yunus M
92ceefccee Update pull_request_template.md (#7865) 2025-05-13 09:41:16 +00:00
Vikrant Gupta
9cc4e1b56f chore(frontend): update the api folder structure (#7901)
* chore(api): update the api folder structure according to rest principles

* chore(api): update the api folder structure according to rest principles
2025-05-12 18:14:58 +05:30
Ekansh Gupta
3758ee7451 fix: changed the keys in the default quick filters to actual keys in … (#7863)
* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys

* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys

* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys

* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys

* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys
2025-05-12 16:20:47 +05:30
Vibhu Pandey
02b605d109 feat(analytics): add analytics package (#7808)
- add analytics package
2025-05-12 14:32:13 +05:30
Amlan Kumar Nandy
eb86aabf3e chore: improvements to the all attributes section in metric details (#7879) 2025-05-12 05:24:02 +00:00
Amlan Kumar Nandy
8810693bda chore: persist one chart per query toggle across refreshes and exports (#7884) 2025-05-12 05:16:13 +00:00
Shaheer Kochai
6334e09a60 feat: Funnel Details Page Base Structure (#7364)
* feat: funnels list page basic UI

* feat: get funnels list data from mock API, and handle data, loading and empty states

* feat: implement funnel rename

* chore: move useFunnels to hooks/TracesFunnels

* feat: create traces funnels details basic page + funnel -> details redirection

* fix: properly display created at in funnels list item + preventDefault

* chore: add tab bar to trace funnel details page

* chore: traces funnel details page overall skeleton

* chore: traces funnel details results skeleton

* fix: hide step count for add button only

* feat: funnel details page steps and configuration (#7424)

* chore: add a new tab for traces funnels

* feat: funnels list page basic UI

* feat: get funnels list data from mock API, and handle data, loading and empty states

* feat: implement funnel rename

* refactor: overall improvements

* feat: implement sorting in traces funnels list page

* feat: add sort column key and order to url params

* chore: move useFunnels to hooks/TracesFunnels

* feat: implement traces funnels search and refactor search and sort by extracting to custom hooks

* chore: overall improvements to rename trace funnel modal

* chore: make the rename input auto-focusable

* feat: handle create funnel modal

* feat: delete funnel modal and functionality

* fix: fix the layout shift in funnel item caused by getContainer={false}

* chore: overall improvements and use live api in traces funnels

* feat: create traces funnels details basic page + funnel -> details redirection

* fix: funnels traces light mode UI

* fix: properly display created at in funnels list item + preventDefault

* refactor: extract FunnelItemPopover into a separate component

* chore: hide funnel tab from traces explorer

* chore: add check to display trace funnels tab only in dev environment

* chore: improve funnels modals light mode

* chore: overall improvements

* fix: properly pass funnel details link

* chore: address PR review changes

* chore: add tab bar to trace funnel details page

* feat: funnel step UI with service, span, and where filters

* feat: build radio button component

* refactor: use the SignozRadioButton in funnel results -> step transitions radio buttons

* feat: inter step config (i.e. latency type) UI

* chore: improve steps header styles by removing divider width

* feat: funnel steps title, description, popover UI + pass data from API

* chore: update FilterSelect component to conditionally add url params and accept on change

* fix: fix funnel step where clause and update the state variables for filters

* chore: add support for isMultiple and fix the type in FilterSelect

* feat: centralize the steps state management in StepsContent

* fix: move steps state up + pass steps count from state

* feat: implement auto save for updating the steps whenever any step changes

* feat: implement auto save for validating steps if service name or span names change

* feat: impelement funnel step removal

* feat: implement add details modal for funnel steps

* fix: fix the overflowing time range picker

* feat: funnel details empty state

* feat: add support for saving funnel description

* chore: overall improvements

* fix: fix the light mode styles

* fix: fix the failing build + broken search UI

* refactor: remove the reference of useLocation from traceFunnel item in TraceModulePage constant

* fix: fix the issue of update steps getting triggered on initial render if we have filters

* fix: fix the edge case of stale state causing filters to be re-added after removing

* feat: funnel details page results (#7451)

* feat: funnel metrics table component

* feat: funnel metrics and steps transition metrics components UI

* feat: funnel table component

* feat: slowest traces and traces with error components

* fix: overall light theme fixes

* fix: fix the warning

* chore: add empty and loading states to FunnelMetricsTable

* feat: get overall funnel metrics from the API

* fix: fix the empty state of funnel metrics table

* feat: get data for slowest traces and traces with errors

* fix: link trace id to trace details page

* fix: get data for funnel step transition metrics and refactor the existing data fetching logic

* refactor: add funnel context + overall refactoring and optimizations

* refactor: move steps states to funnel context + handle empty and run funnel disabled states

* feat: handle run funnel

* fix: improve empty state

* chore: rename isValidateStepsMutationLoading -> isValidateStepsLoading

* chore: improve query key

* fix: display loading state if funnel results are fetching

* refactor: move steps validation fetching and states to the context API

* fix: display loading state in funnel results while steps validation is fetching

* fix: call validate steps API only on changing the service name or span name of any step

* refactor: move validateStepsQuery key out of useEffect and update the dependencies

* chore: centralize hasIncompleteSteps and run validate only if steps have service and spans

* fix: handle all empty fields state + overall improvements

* fix: handle long where query tags

* feat: build the funnel result graph component

* feat: build the funnel result graph component

* feat: handle loading, error, empty states in funnel graph

* fix: don't display change percentage if % is 0

* refactor: overall improvements

* feat: get funnel steps graph data from API + move logic to custom hook

* fix: improve empty and error states

* fix: handle funnel graph legends width using css

* fix: redirect to trace funnels list page on clicking delete from funnel details

* fix: update the query cache while updating steps

* fix: implement debounced search for funnel list search

* fix: refetch steps graph data query on clicking run funnel / sync button

* fix: improve the step footer spacing

* chore: add gap between divider to inter-step-config

* fix: handle loading state while fetching

* feat: add span to funnel flow (from trace details page) (#7477)

* chore: display add to funnel icon on hovering any span in trace details page

* chore: add className to funnel item actions popover

* feat: add funnels tab to trace details v2 tab bar

* feat: add span to funnel flow

* chore: hide actions popover button from funnel item in span -> funnel flows

* chore: improve the funnel details UI in add span to funnel modal

* fix: display empty state + don't redirect to funnels list on delete success + overall improvements

* chore: add null check

* fix: display add to funnel button based on feature flag

* fix: display funnels tab in trace details based on feature flag

* fix: remove maxTagCount

* feat: change ms to ns

* chore: address review comments

* chore: remove feature flag and display trace funnels only in dev envirnoment

* fix: handle restoring steps if updating funnel steps fail

* refactor: update the get and delete funnel endpoints to adjust to the BE changes (#7697)

* refactor: address review comments

* fix: handle nested funnel response structure to fix missing funnel_id… (#7740)

* fix: handle nested funnel response structure to fix missing funnel_id in updates

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: remove console.og

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: revert explicitly passing funnelId to updateFunnelSteps

---------

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
Co-authored-by: ahmadshaheer <ashaheerki@gmail.com>

* chore: fix api endpoint

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* refactor: incorporate the recent funnel details API changes (#7760)

* chore: trace funnels feedback changes (#7772)

* chore: change the copy from x traces to valid traces found / not found

* chore: add open funnel button in add span to funnel modal

* feat: display buttons for adding step details and funnel description + copy to clipboard

* feat: highlight funnel graph column based on selected (total / error span) from the legend items

* chore: trace funnel changes (#7780)

* refactor: handle funnels list search on frontend

* refactor: use funnel steps update API for adding / updating step title and description

* feat: allow selecting user's typed option in trace funnel service and span name dropdowns

* chore: properly render the -> between steps in funnel results

* fix: sync funnel step name with add details modal text fields

---------

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
Co-authored-by: Yunus M <myounis.ar@live.com>
Co-authored-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-05-12 10:16:26 +05:30
Vikrant Gupta
1d379931b2 chore(integration-test): remove the outdated-setup (#7887)
* chore(integration-test): remove the outdated-setup

* chore(integration-test): remove the outdated-setup
2025-05-11 17:10:55 +05:30
dependabot[bot]
815a6d13c5 chore(deps): bump @babel/helpers from 7.21.0 to 7.26.10 in /frontend (#7272)
Bumps [@babel/helpers](https://github.com/babel/babel/tree/HEAD/packages/babel-helpers) from 7.21.0 to 7.26.10.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.26.10/packages/babel-helpers)

---
updated-dependencies:
- dependency-name: "@babel/helpers"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-11 16:38:45 +05:30
Vibhu Pandey
59af9d1c2f chore(go): upgrade to 1.23 (#7885) 2025-05-11 14:09:24 +05:30
dependabot[bot]
19d24da147 chore(deps): bump @babel/runtime from 7.21.0 to 7.26.10 in /frontend (#7289)
Bumps [@babel/runtime](https://github.com/babel/babel/tree/HEAD/packages/babel-runtime) from 7.21.0 to 7.26.10.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.26.10/packages/babel-runtime)

---
updated-dependencies:
- dependency-name: "@babel/runtime"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-09 23:28:01 +05:30
Vibhu Pandey
cd1c9ddf11 fix(dashboards): fix lock/unlock functionality (#7880)
Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-05-09 16:44:57 +00:00
Sahil Khan
7ff3286c9c fix: added prismjs 1.30.0 in resolutions (#7872) 2025-05-09 21:47:22 +05:30
Amlan Kumar Nandy
27830742f9 fix: resolve typescript issues in metrics explorer (#7878) 2025-05-09 18:55:43 +05:30
Vikrant Gupta
39f07e7477 chore(error): update the channels module to use the new api errors (#7856)
* chore(error): integrate new errors for channels create and test

* chore(error): update all the channel APIs

* chore(error): update the edit org http issue

* chore(error): fix create channel test

* chore(error): fix create channel test

* chore(error): fix create channel test

* chore(error): fix create channel test

* chore(error): remove console logs
2025-05-09 07:56:47 +00:00
Amlan Kumar Nandy
0ab50da7b0 chore: change graph list layout to grid in explorer view (#7852) 2025-05-09 05:38:51 +00:00
Amlan Kumar Nandy
c03541cd6c chore: improve error handling and loading states in summary view of metrics explorer (#7862) 2025-05-09 04:41:41 +00:00
Amlan Kumar Nandy
727a039eb9 fix: fix 'open in explorer' functionality in metrics explorer (#7873) 2025-05-09 11:34:32 +07:00
Yunus M
c7db85f44c Update CODEOWNERS (#7861) 2025-05-08 08:22:41 +00:00
Vikrant Gupta
08d9a74055 fix(api-key): make the expires in human readable in api keys (#7864)
* fix(api-key): human readable expires in

* fix(api-key): human readable expires in
2025-05-08 08:14:02 +00:00
Ekansh Gupta
503e4cdf00 Feat trace ordering on the basis of span_count or Trace_duration (#7842)
* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2025-05-08 06:38:41 +00:00
Srikanth Chekuri
224f952da7 chore: add notification for upcoming migration for cloud region IN users (#7848) 2025-05-07 13:41:41 +00:00
Vikrant Gupta
0c28067f89 feat(error): base setup for error handling in frontend (#7851)
* feat(login): add error response v2 and error handler v2

* feat(error): added the base error class

* feat(error): added the base error class

* feat(error): remove unnecessary code

* feat(error): fix types

* feat(error): add http status code helper
2025-05-07 16:31:20 +05:30
1446 changed files with 166796 additions and 38375 deletions

View File

@@ -40,7 +40,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.111.41
image: signoz/signoz-schema-migrator:v0.111.42
container_name: schema-migrator-sync
command:
- sync
@@ -53,7 +53,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.111.41
image: signoz/signoz-schema-migrator:v0.111.42
container_name: schema-migrator-async
command:
- async

8
.github/CODEOWNERS vendored
View File

@@ -2,7 +2,7 @@
# Owners are automatically requested for review for PRs that changes code
# that they own.
/frontend/ @YounixM
/frontend/ @SigNoz/frontend @YounixM
/frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
/deploy/ @SigNoz/devops
@@ -11,4 +11,10 @@
/pkg/errors/ @grandwizard28
/pkg/factory/ @grandwizard28
/pkg/types/ @grandwizard28
.golangci.yml @grandwizard28
/pkg/zeus/ @vikrantgupta25
/pkg/licensing/ @vikrantgupta25
/pkg/sqlmigration/ @vikrantgupta25
/ee/zeus/ @vikrantgupta25
/ee/licensing/ @vikrantgupta25
/ee/sqlmigration/ @vikrantgupta25

View File

@@ -1,17 +1,72 @@
### Summary
## 📄 Summary
<!-- ✍️ A clear and concise description...-->
<!-- Describe the purpose of the PR in a few sentences. What does it fix/add/update? -->
#### Related Issues / PR's
---
<!-- ✍️ Add the issues being resolved here and related PR's where applicable -->
## ✅ Changes
#### Screenshots
- [ ] Feature: Brief description
- [ ] Bug fix: Brief description
NA
---
<!-- ✍️ Add screenshots of before and after changes where applicable-->
## 🏷️ Required: Add Relevant Labels
#### Affected Areas and Manually Tested Areas
> ⚠️ **Manually add appropriate labels in the PR sidebar**
Please select one or more labels (as applicable):
<!-- ✍️ Add details of blast radius and dev testing areas where applicable-->
ex:
- `frontend`
- `backend`
- `devops`
- `bug`
- `enhancement`
- `ui`
- `test`
---
## 👥 Reviewers
> Tag the relevant teams for review:
- frontend / backend / devops
---
## 🧪 How to Test
<!-- Describe how reviewers can test this PR -->
1. ...
2. ...
3. ...
---
## 🔍 Related Issues
<!-- Reference any related issues (e.g. Fixes #123, Closes #456) -->
Closes #
---
## 📸 Screenshots / Screen Recording (if applicable / mandatory for UI related changes)
<!-- Add screenshots or GIFs to help visualize changes -->
---
## 📋 Checklist
- [ ] Dev Review
- [ ] Test cases added (Unit/ Integration / E2E)
- [ ] Manually tested the changes
---
## 👀 Notes for Reviewers
<!-- Anything reviewers should keep in mind while reviewing -->

View File

@@ -62,6 +62,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
GO_NAME: signoz-community
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
@@ -73,7 +74,8 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.variant=community
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}'
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch

View File

@@ -67,9 +67,8 @@ jobs:
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> frontend/.env
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
@@ -94,6 +93,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service
@@ -108,7 +108,8 @@ jobs:
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch

View File

@@ -66,7 +66,8 @@ jobs:
echo 'CI=1' > frontend/.env
echo 'TUNNEL_URL="${{ secrets.NP_TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'USERPILOT_KEY="${{ secrets.NP_USERPILOT_KEY }}"' >> frontend/.env
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
@@ -91,6 +92,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service
@@ -105,7 +107,8 @@ jobs:
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
GO_CGO_ENABLED: 1
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch

View File

@@ -18,6 +18,7 @@ jobs:
with:
PRIMUS_REF: main
GO_TEST_CONTEXT: ./...
GO_VERSION: 1.23
fmt:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -26,6 +27,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
lint:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -34,6 +36,16 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
deps:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
uses: signoz/primus.workflows/.github/workflows/go-deps.yaml@main
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
build:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -45,7 +57,7 @@ jobs:
- name: go-install
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
- name: qemu-install
uses: docker/setup-qemu-action@v3
- name: aarch64-install

View File

@@ -58,7 +58,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
- name: cross-compilation-tools
if: matrix.os == 'ubuntu-latest'
run: |
@@ -122,7 +122,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
# copy the caches from build
- name: get-sha

View File

@@ -33,9 +33,8 @@ jobs:
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> .env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> .env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> .env
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> .env
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> .env
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
- name: build-frontend
run: make js-build
- name: upload-frontend-artifact
@@ -73,7 +72,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
- name: cross-compilation-tools
if: matrix.os == 'ubuntu-latest'
run: |
@@ -136,7 +135,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
# copy the caches from build
- name: get-sha

3
.gitignore vendored
View File

@@ -60,14 +60,13 @@ ee/query-service/db
e2e/node_modules/
e2e/test-results/
e2e/playwright-report/
e2e/blob-report/
e2e/playwright/.cache/
e2e/.auth
# go
vendor/
**/main/**
__debug_bin**
# git-town
.git-branches.toml

33
.golangci.yml Normal file
View File

@@ -0,0 +1,33 @@
linters:
default: standard
enable:
- bodyclose
- misspell
- nilnil
- sloglint
- depguard
- iface
linters-settings:
sloglint:
no-mixed-args: true
kv-only: true
no-global: all
context: all
static-msg: true
msg-style: lowercased
key-naming-case: snake
depguard:
rules:
nozap:
deny:
- pkg: "go.uber.org/zap"
desc: "Do not use zap logger. Use slog instead."
iface:
enable:
- identical
issues:
exclude-dirs:
- "pkg/query-service"
- "ee/query-service"
- "scripts/"

View File

@@ -8,7 +8,7 @@
<p align="center">All your logs, metrics, and traces in one place. Monitor your application, spot issues before they occur and troubleshoot downtime quickly with rich context. SigNoz is a cost-effective open-source alternative to Datadog and New Relic. Visit <a href="https://signoz.io" target="_blank">signoz.io</a> for the full documentation, tutorials, and guide.</p>
<p align="center">
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/query-service?label=Docker Downloads"> </a>
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/signoz.svg?label=Docker%20Downloads"> </a>
<img alt="GitHub issues" src="https://img.shields.io/github/issues/signoz/signoz"> </a>
<a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNozHQ&hashtags=opensource,signoz,observability">
<img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a>

View File

@@ -164,3 +164,63 @@ alertmanager:
maintenance_interval: 15m
# Retention of the notification logs.
retention: 120h
##################### Emailing #####################
emailing:
# Whether to enable emailing.
enabled: false
templates:
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
directory: /opt/signoz/conf/templates/email
smtp:
# The SMTP server address.
address: localhost:25
# The email address to use for the SMTP server.
from:
# The hello message to use for the SMTP server.
hello:
# The static headers to send with the email.
headers: {}
auth:
# The username to use for the SMTP server.
username:
# The password to use for the SMTP server.
password:
# The secret to use for the SMTP server.
secret:
# The identity to use for the SMTP server.
identity:
tls:
# Whether to enable TLS. It should be false in most cases since the authentication mechanism should use the STARTTLS extension instead.
enabled: false
# Whether to skip TLS verification.
insecure_skip_verify: false
# The path to the CA file.
ca_file_path:
# The path to the key file.
key_file_path:
# The path to the certificate file.
cert_file_path:
##################### Sharder (experimental) #####################
sharder:
# Specifies the sharder provider to use.
provider: noop
single:
# The org id to which this instance belongs to.
org_id: org_id
##################### Analytics #####################
analytics:
# Whether to enable analytics.
enabled: false
segment:
# The key to use for segment.
key: ""
##################### StatsReporter #####################
statsreporter:
# Whether to enable stats reporter. This is used to provide valuable insights to the SigNoz team. It does not collect any sensitive/PII data.
enabled: true
# The interval at which the stats are collected.
interval: 6h

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.82.0
image: signoz/signoz:v0.87.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -206,7 +206,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.41
image: signoz/signoz-otel-collector:v0.111.42
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -230,7 +230,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.41
image: signoz/signoz-schema-migrator:v0.111.42
deploy:
restart_policy:
condition: on-failure

View File

@@ -100,26 +100,33 @@ services:
# - "9000:9000"
# - "8123:8123"
# - "9181:9181"
configs:
- source: clickhouse-config
target: /etc/clickhouse-server/config.xml
- source: clickhouse-users
target: /etc/clickhouse-server/users.xml
- source: clickhouse-custom-function
target: /etc/clickhouse-server/custom-function.xml
- source: clickhouse-cluster
target: /etc/clickhouse-server/config.d/cluster.xml
volumes:
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
- ../common/clickhouse/cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
- clickhouse:/var/lib/clickhouse/
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.82.0
image: signoz/signoz:v0.87.0
command:
- --config=/root/config/prometheus.yml
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port
volumes:
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
- ../common/dashboards:/root/config/dashboards
- sqlite:/var/lib/signoz/
configs:
- source: signoz-prometheus-config
target: /root/config/prometheus.yml
environment:
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
@@ -141,15 +148,17 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.41
image: signoz/signoz-otel-collector:v0.111.42
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
- --copy-path=/var/tmp/collector-config.yaml
- --feature-gates=-pkg.translator.prometheus.NormalizeName
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
configs:
- source: otel-collector-config
target: /etc/otel-collector-config.yaml
- source: otel-manager-config
target: /etc/manager-config.yaml
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
- LOW_CARDINAL_EXCEPTION_GROUPING=false
@@ -165,7 +174,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.41
image: signoz/signoz-schema-migrator:v0.111.42
deploy:
restart_policy:
condition: on-failure
@@ -186,3 +195,26 @@ volumes:
name: signoz-sqlite
zookeeper-1:
name: signoz-zookeeper-1
configs:
clickhouse-config:
file: ../common/clickhouse/config.xml
clickhouse-users:
file: ../common/clickhouse/users.xml
clickhouse-custom-function:
file: ../common/clickhouse/custom-function.xml
clickhouse-cluster:
file: ../common/clickhouse/cluster.xml
signoz-prometheus-config:
file: ../common/signoz/prometheus.yml
# If you have multiple dashboard files, you can list them individually:
# dashboard-foo:
# file: ../common/dashboards/foo.json
# dashboard-bar:
# file: ../common/dashboards/bar.json
otel-collector-config:
file: ./otel-collector-config.yaml
otel-manager-config:
file: ../common/signoz/otel-collector-opamp-config.yaml

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.82.0}
image: signoz/signoz:${VERSION:-v0.87.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -210,7 +210,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -236,7 +236,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-sync
command:
- sync
@@ -247,7 +247,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.82.0}
image: signoz/signoz:${VERSION:-v0.87.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -142,7 +142,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -164,7 +164,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-sync
command:
- sync
@@ -176,7 +176,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-async
command:
- async

View File

@@ -0,0 +1,51 @@
# Endpoint
This guide outlines the recommended approach for designing endpoints, with a focus on entity relationships, RESTful structure, and examples from the codebase.
## How do we design an endpoint?
### Understand the core entities and their relationships
Start with understanding the core entities and their relationships. For example:
- **Organization**: an organization can have multiple users
### Structure Endpoints RESTfully
Endpoints should reflect the resource hierarchy and follow RESTful conventions. Use clear, **pluralized resource names** and versioning. For example:
- `POST /v1/organizations` — Create an organization
- `GET /v1/organizations/:id` — Get an organization by id
- `DELETE /v1/organizations/:id` — Delete an organization by id
- `PUT /v1/organizations/:id` — Update an organization by id
- `GET /v1/organizations/:id/users` — Get all users in an organization
- `GET /v1/organizations/me/users` — Get all users in my organization
Think in terms of resource navigation in a file system. For example, to find your organization, you would navigate to the root of the file system and then to the `organizations` directory. To find a user in an organization, you would navigate to the `organizations` directory and then to the `id` directory.
```bash
v1/
├── organizations/
│ └── 123/
│ └── users/
```
`me` endpoints are special. They are used to determine the actual id via some auth/external mechanism. For `me` endpoints, think of the `me` directory being symlinked to your organization directory. For example, if you are a part of the organization `123`, the `me` directory will be symlinked to `/v1/organizations/123`:
```bash
v1/
├── organizations/
│ └── me/ -> symlink to /v1/organizations/123
│ └── users/
│ └── 123/
│ └── users/
```
> 💡 **Note**: There are various ways to structure endpoints. Some prefer to use singular resource names instead of `me`. Others prefer to use singular resource names for all endpoints. We have, however, chosen to standardize our endpoints in the manner described above.
## What should I remember?
- Use clear, **plural resource names**
- Use `me` endpoints for determining the actual id via some auth mechanism
> 💡 **Note**: When in doubt, diagram the relationships and walk through the user flows as if navigating a file system. This will help you design endpoints that are both logical and user-friendly.

View File

@@ -0,0 +1,106 @@
# Provider
SigNoz is built on the provider pattern, a design approach where code is organized into providers that handle specific application responsibilities. Providers act as adapter components that integrate with external services and deliver required functionality to the application.
> 💡 **Note**: Coming from a DDD background? Providers are similar (not exactly the same) to adapter/infrastructure services.
## How to create a new provider?
To create a new provider, create a directory in the `pkg/` directory named after your provider. The provider package consists of four key components:
- **Interface** (`pkg/<name>/<name>.go`): Defines the provider's interface. Other packages should import this interface to use the provider.
- **Config** (`pkg/<name>/config.go`): Contains provider configuration, implementing the `factory.Config` interface from [factory/config.go](/pkg/factory/config.go).
- **Implementation** (`pkg/<name>/<implname><name>/provider.go`): Contains the provider implementation, including a `NewProvider` function that returns a `factory.Provider` interface from [factory/provider.go](/pkg/factory/provider.go).
- **Mock** (`pkg/<name>/<name>test.go`): Provides mocks for the provider, typically used by dependent packages for unit testing.
For example, the [prometheus](/pkg/prometheus) provider delivers a prometheus engine to the application:
- `pkg/prometheus/prometheus.go` - Interface definition
- `pkg/prometheus/config.go` - Configuration
- `pkg/prometheus/clickhouseprometheus/provider.go` - Clickhouse-powered implementation
- `pkg/prometheus/prometheustest/provider.go` - Mock implementation
## How to wire it up?
The `pkg/signoz` package contains the inversion of control container responsible for wiring providers. It handles instantiation, configuration, and assembly of providers based on configuration metadata.
> 💡 **Note**: Coming from a Java background? Providers are similar to Spring beans.
Wiring up a provider involves three steps:
1. Wiring up the configuration
Add your config from `pkg/<name>/config.go` to the `pkg/signoz/config.Config` struct and in new factories:
```go
type Config struct {
...
MyProvider myprovider.Config `mapstructure:"myprovider"`
...
}
func NewConfig(ctx context.Context, resolverConfig config.ResolverConfig, ....) (Config, error) {
...
configFactories := []factory.ConfigFactory{
myprovider.NewConfigFactory(),
}
...
}
```
2. Wiring up the provider
Add available provider implementations in `pkg/signoz/provider.go`:
```go
func NewMyProviderFactories() factory.NamedMap[factory.ProviderFactory[myprovider.MyProvider, myprovider.Config]] {
return factory.MustNewNamedMap(
myproviderone.NewFactory(),
myprovidertwo.NewFactory(),
)
}
```
3. Instantiate the provider by adding it to the `SigNoz` struct in `pkg/signoz/signoz.go`:
```go
type SigNoz struct {
...
MyProvider myprovider.MyProvider
...
}
func New(...) (*SigNoz, error) {
...
myprovider, err := myproviderone.New(ctx, settings, config.MyProvider, "one/two")
if err != nil {
return nil, err
}
...
}
```
## How to use it?
To use a provider, import its interface. For example, to use the prometheus provider, import `pkg/prometheus/prometheus.go`:
```go
import "github.com/SigNoz/signoz/pkg/prometheus/prometheus"
func CreateSomething(ctx context.Context, prometheus prometheus.Prometheus) {
...
prometheus.DoSomething()
...
}
```
## Why do we need this?
Like any dependency injection framework, providers decouple the codebase from implementation details. This is especially valuable in SigNoz's large codebase, where we need to swap implementations without changing dependent code. The provider pattern offers several benefits apart from the obvious one of decoupling:
- Configuration is **defined with each provider and centralized in one place**, making it easier to understand and manage through various methods (environment variables, config files, etc.)
- Provider mocking is **straightforward for unit testing**, with a consistent pattern for locating mocks
- **Multiple implementations** of the same provider are **supported**, as demonstrated by our sqlstore provider
## What should I remember?
- Use the provider pattern wherever applicable.
- Always create a provider **irrespective of the number of implementations**. This makes it easier to add new implementations in the future.

View File

@@ -1,91 +0,0 @@
package middleware
import (
"net/http"
"time"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"go.uber.org/zap"
)
type Pat struct {
store sqlstore.SQLStore
uuid *authtypes.UUID
headers []string
}
func NewPat(store sqlstore.SQLStore, headers []string) *Pat {
return &Pat{store: store, uuid: authtypes.NewUUID(), headers: headers}
}
func (p *Pat) Wrap(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
var values []string
var patToken string
var pat eeTypes.StorablePersonalAccessToken
for _, header := range p.headers {
values = append(values, r.Header.Get(header))
}
ctx, err := p.uuid.ContextFromRequest(r.Context(), values...)
if err != nil {
next.ServeHTTP(w, r)
return
}
patToken, ok := authtypes.UUIDFromContext(ctx)
if !ok {
next.ServeHTTP(w, r)
return
}
err = p.store.BunDB().NewSelect().Model(&pat).Where("token = ?", patToken).Scan(r.Context())
if err != nil {
next.ServeHTTP(w, r)
return
}
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
next.ServeHTTP(w, r)
return
}
// get user from db
user := types.User{}
err = p.store.BunDB().NewSelect().Model(&user).Where("id = ?", pat.UserID).Scan(r.Context())
if err != nil {
next.ServeHTTP(w, r)
return
}
role, err := authtypes.NewRole(user.Role)
if err != nil {
next.ServeHTTP(w, r)
return
}
jwt := authtypes.Claims{
UserID: user.ID,
Role: role,
Email: user.Email,
OrgID: user.OrgID,
}
ctx = authtypes.NewContextWithClaims(ctx, jwt)
r = r.WithContext(ctx)
next.ServeHTTP(w, r)
pat.LastUsed = time.Now().Unix()
_, err = p.store.BunDB().NewUpdate().Model(&pat).Column("last_used").Where("token = ?", patToken).Where("revoked = false").Exec(r.Context())
if err != nil {
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
}
})
}

26
ee/licensing/config.go Normal file
View File

@@ -0,0 +1,26 @@
package licensing
import (
"fmt"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/licensing"
)
var (
config licensing.Config
once sync.Once
)
// initializes the licensing configuration
func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
once.Do(func() {
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
if err := config.Validate(); err != nil {
panic(fmt.Errorf("invalid licensing config: %w", err))
}
})
return config
}

View File

@@ -0,0 +1,168 @@
package httplicensing
import (
"context"
"encoding/json"
"net/http"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type licensingAPI struct {
licensing licensing.Licensing
}
func NewLicensingAPI(licensing licensing.Licensing) licensing.API {
return &licensingAPI{licensing: licensing}
}
func (api *licensingAPI) Activate(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
req := new(licensetypes.PostableLicense)
err = json.NewDecoder(r.Body).Decode(&req)
if err != nil {
render.Error(rw, err)
return
}
err = api.licensing.Activate(r.Context(), orgID, req.Key)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusAccepted, nil)
}
func (api *licensingAPI) GetActive(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
license, err := api.licensing.GetActive(r.Context(), orgID)
if err != nil {
render.Error(rw, err)
return
}
gettableLicense := licensetypes.NewGettableLicense(license.Data, license.Key)
render.Success(rw, http.StatusOK, gettableLicense)
}
func (api *licensingAPI) Refresh(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
err = api.licensing.Refresh(r.Context(), orgID)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusNoContent, nil)
}
func (api *licensingAPI) Checkout(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
req := new(licensetypes.PostableSubscription)
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
render.Error(rw, err)
return
}
gettableSubscription, err := api.licensing.Checkout(ctx, orgID, req)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusCreated, gettableSubscription)
}
func (api *licensingAPI) Portal(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
req := new(licensetypes.PostableSubscription)
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
render.Error(rw, err)
return
}
gettableSubscription, err := api.licensing.Portal(ctx, orgID, req)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusCreated, gettableSubscription)
}

View File

@@ -0,0 +1,249 @@
package httplicensing
import (
"context"
"encoding/json"
"time"
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
type provider struct {
store licensetypes.Store
zeus zeus.Zeus
config licensing.Config
settings factory.ScopedProviderSettings
orgGetter organization.Getter
analytics analytics.Analytics
stopChan chan struct{}
}
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
return New(ctx, providerSettings, config, store, zeus, orgGetter, analytics)
})
}
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) (licensing.Licensing, error) {
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing")
licensestore := sqllicensingstore.New(sqlstore)
return &provider{
store: licensestore,
zeus: zeus,
config: config,
settings: settings,
orgGetter: orgGetter,
stopChan: make(chan struct{}),
analytics: analytics,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
tick := time.NewTicker(provider.config.PollInterval)
defer tick.Stop()
err := provider.Validate(ctx)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
}
for {
select {
case <-provider.stopChan:
return nil
case <-tick.C:
err := provider.Validate(ctx)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
}
}
}
}
func (provider *provider) Stop(ctx context.Context) error {
provider.settings.Logger().DebugContext(ctx, "license validation stopped")
close(provider.stopChan)
return nil
}
func (provider *provider) Validate(ctx context.Context) error {
organizations, err := provider.orgGetter.ListByOwnedKeyRange(ctx)
if err != nil {
return err
}
for _, organization := range organizations {
err := provider.Refresh(ctx, organization.ID)
if err != nil {
return err
}
}
return nil
}
func (provider *provider) Activate(ctx context.Context, organizationID valuer.UUID, key string) error {
data, err := provider.zeus.GetLicense(ctx, key)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to fetch license data with upstream server")
}
license, err := licensetypes.NewLicense(data, organizationID)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity")
}
storableLicense := licensetypes.NewStorableLicenseFromLicense(license)
err = provider.store.Create(ctx, storableLicense)
if err != nil {
return err
}
return nil
}
func (provider *provider) GetActive(ctx context.Context, organizationID valuer.UUID) (*licensetypes.License, error) {
storableLicenses, err := provider.store.GetAll(ctx, organizationID)
if err != nil {
return nil, err
}
activeLicense, err := licensetypes.GetActiveLicenseFromStorableLicenses(storableLicenses, organizationID)
if err != nil {
return nil, err
}
return activeLicense, nil
}
func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUID) error {
activeLicense, err := provider.GetActive(ctx, organizationID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil
}
provider.settings.Logger().ErrorContext(ctx, "license validation failed", "org_id", organizationID.StringValue())
return err
}
data, err := provider.zeus.GetLicense(ctx, activeLicense.Key)
if err != nil {
if time.Since(activeLicense.LastValidatedAt) > time.Duration(provider.config.FailureThreshold)*provider.config.PollInterval {
activeLicense.UpdateFeatures(licensetypes.BasicPlan)
updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense)
err = provider.store.Update(ctx, organizationID, updatedStorableLicense)
if err != nil {
return err
}
return nil
}
return err
}
err = activeLicense.Update(data)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity from license data")
}
updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense)
err = provider.store.Update(ctx, organizationID, updatedStorableLicense)
if err != nil {
return err
}
stats := licensetypes.NewStatsFromLicense(activeLicense)
provider.analytics.Send(ctx,
analyticstypes.Track{
UserId: "stats_" + organizationID.String(),
Event: "License Updated",
Properties: analyticstypes.NewPropertiesFromMap(stats),
Context: &analyticstypes.Context{
Extra: map[string]interface{}{
analyticstypes.KeyGroupID: organizationID.String(),
},
},
},
analyticstypes.Group{
UserId: "stats_" + organizationID.String(),
GroupId: organizationID.String(),
Traits: analyticstypes.NewTraitsFromMap(stats),
},
)
return nil
}
func (provider *provider) Checkout(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) {
activeLicense, err := provider.GetActive(ctx, organizationID)
if err != nil {
return nil, err
}
body, err := json.Marshal(postableSubscription)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal checkout payload")
}
response, err := provider.zeus.GetCheckoutURL(ctx, activeLicense.Key, body)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate checkout session")
}
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
}
func (provider *provider) Portal(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) {
activeLicense, err := provider.GetActive(ctx, organizationID)
if err != nil {
return nil, err
}
body, err := json.Marshal(postableSubscription)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal portal payload")
}
response, err := provider.zeus.GetPortalURL(ctx, activeLicense.Key, body)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate portal session")
}
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
}
func (provider *provider) GetFeatureFlags(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.Feature, error) {
license, err := provider.GetActive(ctx, organizationID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return licensetypes.BasicPlan, nil
}
return nil, err
}
return license.Features, nil
}
func (provider *provider) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
activeLicense, err := provider.GetActive(ctx, orgID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return map[string]any{}, nil
}
return nil, err
}
return licensetypes.NewStatsFromLicense(activeLicense), nil
}

View File

@@ -0,0 +1,81 @@
package sqllicensingstore
import (
"context"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type store struct {
sqlstore sqlstore.SQLStore
}
func New(sqlstore sqlstore.SQLStore) licensetypes.Store {
return &store{sqlstore}
}
func (store *store) Create(ctx context.Context, storableLicense *licensetypes.StorableLicense) error {
_, err := store.
sqlstore.
BunDB().
NewInsert().
Model(storableLicense).
Exec(ctx)
if err != nil {
return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "license with ID: %s already exists", storableLicense.ID)
}
return nil
}
func (store *store) Get(ctx context.Context, organizationID valuer.UUID, licenseID valuer.UUID) (*licensetypes.StorableLicense, error) {
storableLicense := new(licensetypes.StorableLicense)
err := store.
sqlstore.
BunDB().
NewSelect().
Model(storableLicense).
Where("org_id = ?", organizationID).
Where("id = ?", licenseID).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "license with ID: %s does not exist", licenseID)
}
return storableLicense, nil
}
func (store *store) GetAll(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.StorableLicense, error) {
storableLicenses := make([]*licensetypes.StorableLicense, 0)
err := store.
sqlstore.
BunDB().
NewSelect().
Model(&storableLicenses).
Where("org_id = ?", organizationID).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "licenses for organizationID: %s does not exists", organizationID)
}
return storableLicenses, nil
}
func (store *store) Update(ctx context.Context, organizationID valuer.UUID, storableLicense *licensetypes.StorableLicense) error {
_, err := store.
sqlstore.
BunDB().
NewUpdate().
Model(storableLicense).
WherePK().
Where("org_id = ?", organizationID).
Exec(ctx)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to update license with ID: %s", storableLicense.ID)
}
return nil
}

View File

@@ -39,6 +39,7 @@ builds:
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
- >-
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
mod_timestamp: "{{ .CommitTimestamp }}"

View File

@@ -1,4 +1,4 @@
FROM golang:1.22-bullseye
FROM golang:1.23-bullseye
ARG OS="linux"
ARG TARGETARCH

View File

@@ -5,21 +5,18 @@ import (
"net/http/httputil"
"time"
"github.com/SigNoz/signoz/ee/query-service/dao"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/interfaces"
"github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -31,11 +28,8 @@ import (
type APIHandlerOptions struct {
DataConnector interfaces.DataConnector
PreferSpanMetrics bool
AppDao dao.ModelDao
RulesManager *rules.Manager
UsageManager *usage.Manager
FeatureFlags baseint.FeatureLookup
LicenseManager *license.Manager
IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
@@ -55,23 +49,19 @@ type APIHandler struct {
// NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
quickfiltermodule := quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(signoz.SQLStore))
quickFilter := quickfilter.NewAPI(quickfiltermodule)
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
PreferSpanMetrics: opts.PreferSpanMetrics,
AppDao: opts.AppDao,
RuleManager: opts.RulesManager,
FeatureFlags: opts.FeatureFlags,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsController: opts.CloudIntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuickFilters: quickFilter,
QuickFilterModule: quickfiltermodule,
QuerierAPI: querierAPI.NewAPI(signoz.Querier),
})
if err != nil {
@@ -85,98 +75,39 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
return ah, nil
}
func (ah *APIHandler) FF() baseint.FeatureLookup {
return ah.opts.FeatureFlags
}
func (ah *APIHandler) RM() *rules.Manager {
return ah.opts.RulesManager
}
func (ah *APIHandler) LM() *license.Manager {
return ah.opts.LicenseManager
}
func (ah *APIHandler) UM() *usage.Manager {
return ah.opts.UsageManager
}
func (ah *APIHandler) AppDao() dao.ModelDao {
return ah.opts.AppDao
}
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
return ah.opts.Gateway
}
func (ah *APIHandler) CheckFeature(f string) bool {
err := ah.FF().CheckFeature(f)
return err == nil
}
// RegisterRoutes registers routes for this handler on the given router
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// note: add ee override methods first
// routes available only in ee version
router.HandleFunc("/api/v1/featureFlags",
am.OpenAccess(ah.getFeatureFlags)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/loginPrecheck",
am.OpenAccess(ah.precheckLogin)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
// paid plans specific routes
router.HandleFunc("/api/v1/complete/saml",
am.OpenAccess(ah.receiveSAML)).
Methods(http.MethodPost)
router.HandleFunc("/api/v1/complete/google",
am.OpenAccess(ah.receiveGoogleAuth)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/orgs/{orgId}/domains",
am.AdminAccess(ah.listDomainsByOrg)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/domains",
am.AdminAccess(ah.postDomain)).
Methods(http.MethodPost)
router.HandleFunc("/api/v1/domains/{id}",
am.AdminAccess(ah.putDomain)).
Methods(http.MethodPut)
router.HandleFunc("/api/v1/domains/{id}",
am.AdminAccess(ah.deleteDomain)).
Methods(http.MethodDelete)
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost)
// base overrides
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
// PAT APIs
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.getPATs)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.updatePAT)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.revokePAT)).Methods(http.MethodDelete)
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.LicensingAPI.Checkout)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.portalSession)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/dashboards/{uuid}/lock", am.EditAccess(ah.lockDashboard)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/dashboards/{uuid}/unlock", am.EditAccess(ah.unlockDashboard)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
// v3
router.HandleFunc("/api/v3/licenses", am.ViewAccess(ah.listLicensesV3)).Methods(http.MethodGet)
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.applyLicenseV3)).Methods(http.MethodPost)
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.refreshLicensesV3)).Methods(http.MethodPut)
router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.getActiveLicenseV3)).Methods(http.MethodGet)
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)
router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.LicensingAPI.GetActive)).Methods(http.MethodGet)
// v4
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)

View File

@@ -3,191 +3,16 @@ package api
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"github.com/gorilla/mux"
"go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/model"
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseRequest(r *http.Request, req interface{}) error {
defer r.Body.Close()
requestBody, err := io.ReadAll(r.Body)
if err != nil {
return err
}
err = json.Unmarshal(requestBody, &req)
return err
}
// loginUser overrides base handler and considers SSO case.
func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
req := basemodel.LoginRequest{}
err := parseRequest(r, &req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
ctx := context.Background()
if req.Email != "" && ah.CheckFeature(model.SSO) {
var apierr basemodel.BaseApiError
_, apierr = ah.AppDao().CanUsePassword(ctx, req.Email)
if apierr != nil && !apierr.IsNil() {
RespondError(w, apierr, nil)
}
}
// if all looks good, call auth
resp, err := baseauth.Login(ctx, &req, ah.opts.JWT)
if ah.HandleError(w, err, http.StatusUnauthorized) {
return
}
ah.WriteJSON(w, r, resp)
}
// registerUser registers a user and responds with a precheck
// so the front-end can decide the login method
func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(model.SSO) {
ah.APIHandler.Register(w, r)
return
}
ctx := context.Background()
var req *baseauth.RegisterRequest
defer r.Body.Close()
requestBody, err := io.ReadAll(r.Body)
if err != nil {
zap.L().Error("received no input in api", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
err = json.Unmarshal(requestBody, &req)
if err != nil {
zap.L().Error("received invalid user registration request", zap.Error(err))
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
return
}
// get invite object
invite, err := baseauth.ValidateInvite(ctx, req)
if err != nil {
zap.L().Error("failed to validate invite token", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
if invite == nil {
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
return
}
// get auth domain from email domain
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
}
precheckResp := &basemodel.PrecheckResponse{
SSO: false,
IsUser: false,
}
if domain != nil && domain.SsoEnabled {
// sso is enabled, create user and respond precheck data
user, apierr := baseauth.RegisterInvitedUser(ctx, req, true)
if apierr != nil {
RespondError(w, apierr, nil)
return
}
var precheckError basemodel.BaseApiError
precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl)
if precheckError != nil {
RespondError(w, precheckError, precheckResp)
}
} else {
// no-sso, validate password
if err := baseauth.ValidatePassword(req.Password); err != nil {
RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil)
return
}
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager, ah.Signoz.Modules.Organization, ah.QuickFilterModule)
if !registerError.IsNil() {
RespondError(w, apierr, nil)
return
}
precheckResp.IsUser = true
}
ah.Respond(w, precheckResp)
}
// getInvite returns the invite object details for the given invite token. We do not need to
// protect this API because invite token itself is meant to be private.
func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
token := mux.Vars(r)["token"]
sourceUrl := r.URL.Query().Get("ref")
inviteObject, err := baseauth.GetInvite(r.Context(), token, ah.Signoz.Modules.Organization)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
resp := model.GettableInvitation{
InvitationResponseObject: inviteObject,
}
precheck, apierr := ah.AppDao().PrecheckLogin(r.Context(), inviteObject.Email, sourceUrl)
resp.Precheck = precheck
if apierr != nil {
RespondError(w, apierr, resp)
}
ah.WriteJSON(w, r, resp)
}
// PrecheckLogin enables browser login page to display appropriate
// login methods
func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
email := r.URL.Query().Get("email")
sourceUrl := r.URL.Query().Get("ref")
resp, apierr := ah.AppDao().PrecheckLogin(ctx, email, sourceUrl)
if apierr != nil {
RespondError(w, apierr, resp)
}
ah.Respond(w, resp)
}
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
ssoError := []byte("Login failed. Please contact your system administrator")
dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
@@ -196,84 +21,12 @@ func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string)
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther)
}
// receiveGoogleAuth completes google OAuth response and forwards a request
// to front-end to sign user in
func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) {
redirectUri := constants.GetDefaultSiteURL()
ctx := context.Background()
if !ah.CheckFeature(model.SSO) {
zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
q := r.URL.Query()
if errType := q.Get("error"); errType != "" {
zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description")))
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently)
return
}
relayState := q.Get("state")
zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
parsedState, err := url.Parse(relayState)
if err != nil || relayState == "" {
zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
// upgrade redirect url from the relay state for better accuracy
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
// fetch domain by parsing relay state.
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
if err != nil {
handleSsoError(w, r, redirectUri)
return
}
// now that we have domain, use domain to fetch sso settings.
// prepare google callback handler using parsedState -
// which contains redirect URL (front-end endpoint)
callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState)
if err != nil {
zap.L().Error("[receiveGoogleAuth] failed to prepare google oauth provider", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
identity, err := callbackHandler.HandleCallback(r)
if err != nil {
zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email, ah.opts.JWT)
if err != nil {
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
http.Redirect(w, r, nextPage, http.StatusSeeOther)
}
// receiveSAML completes a SAML request and gets user logged in
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
// this is the source url that initiated the login request
redirectUri := constants.GetDefaultSiteURL()
ctx := context.Background()
if !ah.CheckFeature(model.SSO) {
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
err := r.ParseForm()
if err != nil {
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
@@ -297,12 +50,25 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
// fetch domain by parsing relay state.
domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState)
domain, err := ah.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState)
if err != nil {
handleSsoError(w, r, redirectUri)
return
}
orgID, err := valuer.NewUUID(domain.OrgID)
if err != nil {
handleSsoError(w, r, redirectUri)
return
}
_, err = ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
sp, err := domain.PrepareSamlRequest(parsedState)
if err != nil {
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
@@ -330,7 +96,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
return
}
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
if err != nil {
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)

View File

@@ -11,12 +11,12 @@ import (
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/auth"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/gorilla/mux"
"go.uber.org/zap"
@@ -36,6 +36,12 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
cloudProvider := mux.Vars(r)["cloudProvider"]
if cloudProvider != "aws" {
RespondError(w, basemodel.BadRequest(fmt.Errorf(
@@ -56,11 +62,9 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
SigNozAPIKey: apiKey,
}
license, apiErr := ah.LM().GetRepo().GetActiveLicense(r.Context())
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't look for active license",
), nil)
license, err := ah.Signoz.Licensing.GetActive(r.Context(), orgID)
if err != nil {
render.Error(w, err)
return
}
@@ -116,7 +120,14 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
return "", apiErr
}
allPats, err := ah.AppDao().ListPATs(ctx, orgId)
orgIdUUID, err := valuer.NewUUID(orgId)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't parse orgId: %w", err,
))
}
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't list PATs: %w", err,
@@ -133,19 +144,25 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
zap.String("cloudProvider", cloudProvider),
)
newPAT := eeTypes.NewGettablePAT(
newPAT, err := types.NewStorableAPIKey(
integrationPATName,
authtypes.RoleViewer.String(),
integrationUser.ID,
types.RoleViewer,
0,
)
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
}
return integrationPAT.Token, nil
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
}
return newPAT.Token, nil
}
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
@@ -154,10 +171,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
// TODO(nitya): there should be orgId here
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err))
}
if integrationUserResult != nil {
@@ -169,29 +185,18 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
zap.String("cloudProvider", cloudProvider),
)
newUser := &types.User{
ID: uuid.New().String(),
Name: cloudIntegrationUser,
Email: email,
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
},
OrgID: orgId,
}
newUser.Role = authtypes.RoleViewer.String()
passwordHash, err := auth.PasswordHash(uuid.NewString())
newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't hash random password for cloud integration user: %w", err,
"couldn't create cloud integration user: %w", err,
))
}
newUser.Password = passwordHash
integrationUser, apiErr := ah.AppDao().CreateUser(ctx, newUser, false)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't create cloud integration user")
password, err := types.NewFactorPassword(uuid.NewString())
integrationUser, err := ah.Signoz.Modules.User.CreateUserWithPassword(ctx, newUser, password)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
}
return integrationUser, nil

View File

@@ -1,63 +0,0 @@
package api
import (
"net/http"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/gorilla/mux"
)
func (ah *APIHandler) lockDashboard(w http.ResponseWriter, r *http.Request) {
ah.lockUnlockDashboard(w, r, true)
}
func (ah *APIHandler) unlockDashboard(w http.ResponseWriter, r *http.Request) {
ah.lockUnlockDashboard(w, r, false)
}
func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request, lock bool) {
// Locking can only be done by the owner of the dashboard
// or an admin
// - Fetch the dashboard
// - Check if the user is the owner or an admin
// - If yes, lock/unlock the dashboard
// - If no, return 403
// Get the dashboard UUID from the request
uuid := mux.Vars(r)["uuid"]
if strings.HasPrefix(uuid, "integration") {
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "dashboards created by integrations cannot be modified"))
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated"))
return
}
dashboard, err := dashboards.GetDashboard(r.Context(), claims.OrgID, uuid)
if err != nil {
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get dashboard"))
return
}
if err := claims.IsAdmin(); err != nil && (dashboard.CreatedBy != claims.Email) {
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "You are not authorized to lock/unlock this dashboard"))
return
}
// Lock/Unlock the dashboard
err = dashboards.LockUnlockDashboard(r.Context(), claims.OrgID, uuid, lock)
if err != nil {
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to lock/unlock dashboard"))
return
}
ah.Respond(w, "Dashboard updated successfully")
}

View File

@@ -1,91 +0,0 @@
package api
import (
"context"
"encoding/json"
"fmt"
"net/http"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
"github.com/google/uuid"
"github.com/gorilla/mux"
)
func (ah *APIHandler) listDomainsByOrg(w http.ResponseWriter, r *http.Request) {
orgId := mux.Vars(r)["orgId"]
domains, apierr := ah.AppDao().ListDomains(context.Background(), orgId)
if apierr != nil {
RespondError(w, apierr, domains)
return
}
ah.Respond(w, domains)
}
func (ah *APIHandler) postDomain(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := types.GettableOrgDomain{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
if err := req.ValidNew(); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
if apierr := ah.AppDao().CreateDomain(ctx, &req); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, &req)
}
func (ah *APIHandler) putDomain(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
domainIdStr := mux.Vars(r)["id"]
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
req := types.GettableOrgDomain{StorableOrgDomain: types.StorableOrgDomain{ID: domainId}}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
req.ID = domainId
if err := req.Valid(nil); err != nil {
RespondError(w, model.BadRequest(err), nil)
}
if apierr := ah.AppDao().UpdateDomain(ctx, &req); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, &req)
}
func (ah *APIHandler) deleteDomain(w http.ResponseWriter, r *http.Request) {
domainIdStr := mux.Vars(r)["id"]
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
RespondError(w, model.BadRequest(fmt.Errorf("invalid domain id")), nil)
return
}
apierr := ah.AppDao().DeleteDomain(context.Background(), domainId)
if apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, nil)
}

View File

@@ -9,13 +9,29 @@ import (
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
pkgError "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
featureSet, err := ah.FF().GetFeatureFlags()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(w, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(w, pkgError.Newf(pkgError.TypeInvalidInput, pkgError.CodeInvalidInput, "orgId is invalid"))
return
}
featureSet, err := ah.Signoz.Licensing.GetFeatureFlags(r.Context(), orgID)
if err != nil {
ah.HandleError(w, err, http.StatusInternalServerError)
return
@@ -23,7 +39,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
if constants.FetchFeatures == "true" {
zap.L().Debug("fetching license")
license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
zap.L().Error("failed to fetch license", zap.Error(err))
} else if license == nil {
@@ -44,9 +60,16 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
}
if ah.opts.PreferSpanMetrics {
for idx := range featureSet {
feature := &featureSet[idx]
if feature.Name == basemodel.UseSpanMetrics {
for idx, feature := range featureSet {
if feature.Name == licensetypes.UseSpanMetrics {
featureSet[idx].Active = true
}
}
}
if constants.IsDotMetricsEnabled {
for idx, feature := range featureSet {
if feature.Name == licensetypes.DotMetricsEnabled {
featureSet[idx].Active = true
}
}
@@ -57,7 +80,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
// fetchZeusFeatures makes an HTTP GET request to the /zeusFeatures endpoint
// and returns the FeatureSet.
func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) {
func fetchZeusFeatures(url, licenseKey string) ([]*licensetypes.Feature, error) {
// Check if the URL is empty
if url == "" {
return nil, fmt.Errorf("url is empty")
@@ -116,28 +139,28 @@ func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) {
}
type ZeusFeaturesResponse struct {
Status string `json:"status"`
Data basemodel.FeatureSet `json:"data"`
Status string `json:"status"`
Data []*licensetypes.Feature `json:"data"`
}
// MergeFeatureSets merges two FeatureSet arrays with precedence to zeusFeatures.
func MergeFeatureSets(zeusFeatures, internalFeatures basemodel.FeatureSet) basemodel.FeatureSet {
func MergeFeatureSets(zeusFeatures, internalFeatures []*licensetypes.Feature) []*licensetypes.Feature {
// Create a map to store the merged features
featureMap := make(map[string]basemodel.Feature)
featureMap := make(map[string]*licensetypes.Feature)
// Add all features from the otherFeatures set to the map
for _, feature := range internalFeatures {
featureMap[feature.Name] = feature
featureMap[feature.Name.StringValue()] = feature
}
// Add all features from the zeusFeatures set to the map
// If a feature already exists (i.e., same name), the zeusFeature will overwrite it
for _, feature := range zeusFeatures {
featureMap[feature.Name] = feature
featureMap[feature.Name.StringValue()] = feature
}
// Convert the map back to a FeatureSet slice
var mergedFeatures basemodel.FeatureSet
var mergedFeatures []*licensetypes.Feature
for _, feature := range featureMap {
mergedFeatures = append(mergedFeatures, feature)
}

View File

@@ -3,78 +3,79 @@ package api
import (
"testing"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/stretchr/testify/assert"
)
func TestMergeFeatureSets(t *testing.T) {
tests := []struct {
name string
zeusFeatures basemodel.FeatureSet
internalFeatures basemodel.FeatureSet
expected basemodel.FeatureSet
zeusFeatures []*licensetypes.Feature
internalFeatures []*licensetypes.Feature
expected []*licensetypes.Feature
}{
{
name: "empty zeusFeatures and internalFeatures",
zeusFeatures: basemodel.FeatureSet{},
internalFeatures: basemodel.FeatureSet{},
expected: basemodel.FeatureSet{},
zeusFeatures: []*licensetypes.Feature{},
internalFeatures: []*licensetypes.Feature{},
expected: []*licensetypes.Feature{},
},
{
name: "non-empty zeusFeatures and empty internalFeatures",
zeusFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
zeusFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
},
internalFeatures: basemodel.FeatureSet{},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
internalFeatures: []*licensetypes.Feature{},
expected: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
},
},
{
name: "empty zeusFeatures and non-empty internalFeatures",
zeusFeatures: basemodel.FeatureSet{},
internalFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
zeusFeatures: []*licensetypes.Feature{},
internalFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
expected: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
},
},
{
name: "non-empty zeusFeatures and non-empty internalFeatures with no conflicts",
zeusFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature3", Active: false},
zeusFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature3"), Active: false},
},
internalFeatures: basemodel.FeatureSet{
{Name: "Feature2", Active: true},
{Name: "Feature4", Active: false},
internalFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature2"), Active: true},
{Name: valuer.NewString("Feature4"), Active: false},
},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: true},
{Name: "Feature3", Active: false},
{Name: "Feature4", Active: false},
expected: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: true},
{Name: valuer.NewString("Feature3"), Active: false},
{Name: valuer.NewString("Feature4"), Active: false},
},
},
{
name: "non-empty zeusFeatures and non-empty internalFeatures with conflicts",
zeusFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
zeusFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
},
internalFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: false},
{Name: "Feature3", Active: true},
internalFeatures: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: false},
{Name: valuer.NewString("Feature3"), Active: true},
},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
{Name: "Feature3", Active: true},
expected: []*licensetypes.Feature{
{Name: valuer.NewString("Feature1"), Active: true},
{Name: valuer.NewString("Feature2"), Active: false},
{Name: valuer.NewString("Feature3"), Active: true},
},
},
}

View File

@@ -5,10 +5,26 @@ import (
"strings"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
validPath := false
for _, allowedPrefix := range gateway.AllowedPrefix {
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
@@ -22,9 +38,9 @@ func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request
return
}
license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
RespondError(rw, err, nil)
render.Error(rw, err)
return
}

View File

@@ -6,11 +6,7 @@ import (
"net/http"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
"github.com/SigNoz/signoz/pkg/types/authtypes"
)
type DayWiseBreakdown struct {
@@ -49,10 +45,6 @@ type details struct {
BillTotal float64 `json:"billTotal"`
}
type Redirect struct {
RedirectURL string `json:"redirectURL"`
}
type billingDetails struct {
Status string `json:"status"`
Data struct {
@@ -64,97 +56,6 @@ type billingDetails struct {
} `json:"data"`
}
type ApplyLicenseRequest struct {
LicenseKey string `json:"key"`
}
func (ah *APIHandler) listLicensesV3(w http.ResponseWriter, r *http.Request) {
ah.listLicensesV2(w, r)
}
func (ah *APIHandler) getActiveLicenseV3(w http.ResponseWriter, r *http.Request) {
activeLicense, err := ah.LM().GetRepo().GetActiveLicenseV3(r.Context())
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
// return 404 not found if there is no active license
if activeLicense == nil {
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no active license found")}, nil)
return
}
// TODO deprecate this when we move away from key for stripe
activeLicense.Data["key"] = activeLicense.Key
render.Success(w, http.StatusOK, activeLicense.Data)
}
// this function is called by zeus when inserting licenses in the query-service
func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
var licenseKey ApplyLicenseRequest
if err := json.NewDecoder(r.Body).Decode(&licenseKey); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
if licenseKey.LicenseKey == "" {
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
return
}
_, err = ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey)
if err != nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED, map[string]interface{}{"err": err.Error()}, claims.Email, true, false)
render.Error(w, err)
return
}
render.Success(w, http.StatusAccepted, nil)
}
func (ah *APIHandler) refreshLicensesV3(w http.ResponseWriter, r *http.Request) {
err := ah.LM().RefreshLicense(r.Context())
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusNoContent, nil)
}
func getCheckoutPortalResponse(redirectURL string) *Redirect {
return &Redirect{RedirectURL: redirectURL}
}
func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
checkoutRequest := &model.CheckoutRequest{}
if err := json.NewDecoder(r.Body).Decode(checkoutRequest); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
license := ah.LM().GetActiveLicense()
if license == nil {
RespondError(w, model.BadRequestStr("cannot proceed with checkout without license key"), nil)
return
}
redirectUrl, err := signozio.CheckoutSession(r.Context(), checkoutRequest, license.Key, ah.Signoz.Zeus)
if err != nil {
render.Error(w, err)
return
}
ah.Respond(w, getCheckoutPortalResponse(redirectUrl))
}
func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
licenseKey := r.URL.Query().Get("licenseKey")
@@ -188,71 +89,3 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
// TODO(srikanthccv):Fetch the current day usage and add it to the response
ah.Respond(w, billingResponse.Data)
}
func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License {
licensesV2 := []model.License{}
for _, l := range licenses {
planKeyFromPlanName, ok := model.MapOldPlanKeyToNewPlanName[l.PlanName]
if !ok {
planKeyFromPlanName = model.Basic
}
licenseV2 := model.License{
Key: l.Key,
ActivationId: "",
PlanDetails: "",
FeatureSet: l.Features,
ValidationMessage: "",
IsCurrent: l.IsCurrent,
LicensePlan: model.LicensePlan{
PlanKey: planKeyFromPlanName,
ValidFrom: l.ValidFrom,
ValidUntil: l.ValidUntil,
Status: l.Status},
}
licensesV2 = append(licensesV2, licenseV2)
}
return licensesV2
}
func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
licensesV3, apierr := ah.LM().GetLicensesV3(r.Context())
if apierr != nil {
RespondError(w, apierr, nil)
return
}
licenses := convertLicenseV3ToLicenseV2(licensesV3)
resp := model.Licenses{
TrialStart: -1,
TrialEnd: -1,
OnTrial: false,
WorkSpaceBlock: false,
TrialConvertedToSubscription: false,
GracePeriodEnd: -1,
Licenses: licenses,
}
ah.Respond(w, resp)
}
func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
portalRequest := &model.PortalRequest{}
if err := json.NewDecoder(r.Body).Decode(portalRequest); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
license := ah.LM().GetActiveLicense()
if license == nil {
RespondError(w, model.BadRequestStr("cannot request the portal session without license key"), nil)
return
}
redirectUrl, err := signozio.PortalSession(r.Context(), portalRequest, license.Key, ah.Signoz.Zeus)
if err != nil {
render.Error(w, err)
return
}
ah.Respond(w, getCheckoutPortalResponse(redirectUrl))
}

View File

@@ -1,187 +0,0 @@
package api
import (
"encoding/json"
"fmt"
"net/http"
"slices"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/errors"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
req := model.CreatePATRequestBody{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
pat := eeTypes.NewGettablePAT(
req.Name,
req.Role,
claims.UserID,
req.ExpiresInDays,
)
err = validatePATRequest(pat)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
var apierr basemodel.BaseApiError
if pat, apierr = ah.AppDao().CreatePAT(r.Context(), claims.OrgID, pat); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, &pat)
}
func validatePATRequest(req eeTypes.GettablePAT) error {
_, err := authtypes.NewRole(req.Role)
if err != nil {
return err
}
if req.ExpiresAt < 0 {
return fmt.Errorf("valid expiresAt is required")
}
if req.Name == "" {
return fmt.Errorf("valid name is required")
}
return nil
}
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
req := eeTypes.GettablePAT{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(r.Context(), claims.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(r.Context(), existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
err = validatePATRequest(req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
req.UpdatedByUserID = claims.UserID
req.UpdatedAt = time.Now()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
if apierr = ah.AppDao().UpdatePAT(r.Context(), claims.OrgID, req, id); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, map[string]string{"data": "pat updated successfully"})
}
func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
pats, apierr := ah.AppDao().ListPATs(r.Context(), claims.OrgID)
if apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, pats)
}
func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(r.Context(), claims.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(r.Context(), existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
if apierr := ah.AppDao().RevokePAT(r.Context(), claims.OrgID, id, claims.UserID); apierr != nil {
RespondError(w, apierr, nil)
return
}
ah.Respond(w, map[string]string{"data": "pat revoked successfully"})
}

View File

@@ -33,3 +33,7 @@ func NewDataConnector(
ClickHouseReader: chReader,
}
}
func (r *ClickhouseReader) GetSQLStore() sqlstore.SQLStore {
return r.appdb
}

View File

@@ -11,16 +11,16 @@ import (
"github.com/gorilla/handlers"
"github.com/jmoiron/sqlx"
eemiddleware "github.com/SigNoz/signoz/ee/http/middleware"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/app/db"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/dao"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore"
@@ -30,14 +30,9 @@ import (
"github.com/rs/cors"
"github.com/soheilhy/cmux"
licensepkg "github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
baseexplorer "github.com/SigNoz/signoz/pkg/query-service/app/explorer"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
@@ -92,33 +87,11 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
// NewServer creates and initializes Server
func NewServer(serverOptions *ServerOptions) (*Server, error) {
modelDao, err := dao.InitDao(serverOptions.SigNoz.SQLStore)
if err != nil {
return nil, err
}
if err := baseexplorer.InitWithDSN(serverOptions.SigNoz.SQLStore); err != nil {
return nil, err
}
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
return nil, err
}
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
if err != nil {
return nil, err
}
// initiate license manager
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.Zeus)
if err != nil {
return nil, err
}
// set license manager as feature flag provider in dao
modelDao.SetFlagProvider(lm)
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
if err != nil {
return nil, err
@@ -141,6 +114,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
serverOptions.SigNoz.Modules.OrgGetter,
)
if err != nil {
@@ -148,10 +122,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
// initiate opamp
_, err = opAmpModel.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB())
if err != nil {
return nil, err
}
opAmpModel.InitDB(serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.Instrumentation.Logger(), serverOptions.SigNoz.Modules.OrgGetter)
integrationsController, err := integrations.NewController(serverOptions.SigNoz.SQLStore)
if err != nil {
@@ -169,7 +140,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
// ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
serverOptions.SigNoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations,
serverOptions.SigNoz.SQLStore,
integrationsController.GetPipelinesForInstalledIntegrations,
)
if err != nil {
return nil, err
@@ -177,7 +149,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
// initiate agent config handler
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
DB: serverOptions.SigNoz.SQLStore.SQLxDB(),
Store: serverOptions.SigNoz.SQLStore,
AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController},
})
if err != nil {
@@ -185,17 +157,23 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
// start the usagemanager
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus)
usageManager, err := usage.New(serverOptions.SigNoz.Licensing, serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus, serverOptions.SigNoz.Modules.OrgGetter)
if err != nil {
return nil, err
}
err = usageManager.Start()
err = usageManager.Start(context.Background())
if err != nil {
return nil, err
}
telemetry.GetInstance().SetReader(reader)
telemetry.GetInstance().SetSqlStore(serverOptions.SigNoz.SQLStore)
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
telemetry.GetInstance().SetSavedViewsInfoCallback(telemetry.GetSavedViewsInfo)
telemetry.GetInstance().SetAlertsInfoCallback(telemetry.GetAlertsInfo)
telemetry.GetInstance().SetGetUsersCallback(telemetry.GetUsers)
telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount)
telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo)
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
if err != nil {
@@ -205,11 +183,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
apiOpts := api.APIHandlerOptions{
DataConnector: reader,
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
AppDao: modelDao,
RulesManager: rm,
UsageManager: usageManager,
FeatureFlags: lm,
LicenseManager: lm,
IntegrationsController: integrationsController,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
@@ -250,7 +225,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
&opAmpModel.AllAgents, agentConfMgr,
)
orgs, err := apiHandler.Signoz.Modules.Organization.GetAll(context.Background())
orgs, err := apiHandler.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background())
if err != nil {
return nil, err
}
@@ -265,18 +240,17 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
r := baseapp.NewRouter()
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
r.Use(middleware.NewTimeout(zap.L(),
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewAnalytics(zap.L()).Wrap)
r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewAnalytics().Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterPrivateRoutes(r)
@@ -300,15 +274,15 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger())
r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap)
r.Use(eemiddleware.NewPat(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap)
r.Use(middleware.NewTimeout(zap.L(),
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewAnalytics(zap.L()).Wrap)
r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewAnalytics().Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am)
@@ -318,10 +292,12 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)
apiHandler.RegisterQueryRangeV5Routes(r, am)
apiHandler.RegisterWebSocketPaths(r, am)
apiHandler.RegisterMessagingQueuesRoutes(r, am)
apiHandler.RegisterThirdPartyApiRoutes(r, am)
apiHandler.MetricExplorerRoutes(r, am)
apiHandler.RegisterTraceFunnelsRoutes(r, am)
c := cors.New(cors.Options{
AllowedOrigins: []string{"*"},
@@ -442,15 +418,15 @@ func (s *Server) Start(ctx context.Context) error {
return nil
}
func (s *Server) Stop() error {
func (s *Server) Stop(ctx context.Context) error {
if s.httpServer != nil {
if err := s.httpServer.Shutdown(context.Background()); err != nil {
if err := s.httpServer.Shutdown(ctx); err != nil {
return err
}
}
if s.privateHTTP != nil {
if err := s.privateHTTP.Shutdown(context.Background()); err != nil {
if err := s.privateHTTP.Shutdown(ctx); err != nil {
return err
}
}
@@ -458,11 +434,11 @@ func (s *Server) Stop() error {
s.opampServer.Stop()
if s.ruleManager != nil {
s.ruleManager.Stop(context.Background())
s.ruleManager.Stop(ctx)
}
// stop usage manager
s.usageManager.Stop()
s.usageManager.Stop(ctx)
return nil
}
@@ -475,6 +451,7 @@ func makeRulesManager(
sqlstore sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus,
orgGetter organization.Getter,
) (*baserules.Manager, error) {
// create manager opts
managerOpts := &baserules.ManagerOptions{
@@ -490,6 +467,7 @@ func makeRulesManager(
PrepareTestRuleFunc: rules.TestNotification,
Alertmanager: alertmanager,
SQLStore: sqlstore,
OrgGetter: orgGetter,
}
// create Manager

View File

@@ -33,3 +33,13 @@ func GetOrDefaultEnv(key string, fallback string) string {
func GetDefaultSiteURL() string {
return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL)
}
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
var IsDotMetricsEnabled = false
func init() {
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
IsDotMetricsEnabled = true
}
}

View File

@@ -1,10 +0,0 @@
package dao
import (
"github.com/SigNoz/signoz/ee/query-service/dao/sqlite"
"github.com/SigNoz/signoz/pkg/sqlstore"
)
func InitDao(sqlStore sqlstore.SQLStore) (ModelDao, error) {
return sqlite.InitDB(sqlStore)
}

View File

@@ -1,44 +0,0 @@
package dao
import (
"context"
"net/url"
"github.com/SigNoz/signoz/ee/types"
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/uptrace/bun"
)
type ModelDao interface {
basedao.ModelDao
// SetFlagProvider sets the feature lookup provider
SetFlagProvider(flags baseint.FeatureLookup)
DB() *bun.DB
// auth methods
CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError)
GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error)
// org domain (auth domains) CRUD ops
ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError)
GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError)
CreateDomain(ctx context.Context, d *types.GettableOrgDomain) basemodel.BaseApiError
UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
}

View File

@@ -1,191 +0,0 @@
package sqlite
import (
"context"
"fmt"
"net/url"
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/model"
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/google/uuid"
"go.uber.org/zap"
)
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*types.User, basemodel.BaseApiError) {
// get auth domain from email domain
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
return nil, model.InternalErrorStr("failed to get domain from email")
}
if domain == nil {
zap.L().Error("email domain does not match any authenticated domain", zap.String("email", email))
return nil, model.InternalErrorStr("email domain does not match any authenticated domain")
}
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
if err != nil {
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
return nil, model.InternalErrorStr("failed to generate password hash")
}
user := &types.User{
ID: uuid.New().String(),
Name: "",
Email: email,
Password: hash,
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
},
ProfilePictureURL: "", // Currently unused
Role: authtypes.RoleViewer.String(),
OrgID: domain.OrgID,
}
user, apiErr := m.CreateUser(ctx, user, false)
if apiErr != nil {
zap.L().Error("CreateUser failed", zap.Error(apiErr))
return nil, apiErr
}
return user, nil
}
// PrepareSsoRedirect prepares redirect page link after SSO response
// is successfully parsed (i.e. valid email is available)
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError) {
userPayload, apierr := m.GetUserByEmail(ctx, email)
if !apierr.IsNil() {
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
return "", model.BadRequestStr("invalid user email received from the auth provider")
}
user := &types.User{}
if userPayload == nil {
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
user = newUser
if apiErr != nil {
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
return "", apiErr
}
} else {
user = &userPayload.User
}
tokenStore, err := baseauth.GenerateJWTForUser(user, jwt)
if err != nil {
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
return "", model.InternalErrorStr("failed to generate token for the user")
}
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
redirectUri,
tokenStore.AccessJwt,
user.ID,
tokenStore.RefreshJwt), nil
}
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
return false, apierr
}
if domain != nil && domain.SsoEnabled {
// sso is enabled, check if the user has admin role
userPayload, baseapierr := m.GetUserByEmail(ctx, email)
if baseapierr != nil || userPayload == nil {
return false, baseapierr
}
if userPayload.Role != authtypes.RoleAdmin.String() {
return false, model.BadRequest(fmt.Errorf("auth method not supported"))
}
}
return true, nil
}
// PrecheckLogin is called when the login or signup page is loaded
// to check sso login is to be prompted
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, basemodel.BaseApiError) {
// assume user is valid unless proven otherwise
resp := &basemodel.PrecheckResponse{IsUser: true, CanSelfRegister: false}
// check if email is a valid user
userPayload, baseApiErr := m.GetUserByEmail(ctx, email)
if baseApiErr != nil {
return resp, baseApiErr
}
if userPayload == nil {
resp.IsUser = false
}
ssoAvailable := true
err := m.checkFeature(model.SSO)
if err != nil {
switch err.(type) {
case basemodel.ErrFeatureUnavailable:
// do nothing, just skip sso
ssoAvailable = false
default:
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return resp, model.BadRequestStr(err.Error())
}
}
if ssoAvailable {
resp.IsUser = true
// find domain from email
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
return resp, apierr
}
if orgDomain != nil && orgDomain.SsoEnabled {
// saml is enabled for this domain, lets prepare sso url
if sourceUrl == "" {
sourceUrl = constants.GetDefaultSiteURL()
}
// parse source url that generated the login request
var err error
escapedUrl, _ := url.QueryUnescape(sourceUrl)
siteUrl, err := url.Parse(escapedUrl)
if err != nil {
zap.L().Error("failed to parse referer", zap.Error(err))
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
}
// build Idp URL that will authenticat the user
// the front-end will redirect user to this url
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
if err != nil {
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
return resp, model.InternalError(err)
}
// set SSO to true, as the url is generated correctly
resp.SSO = true
}
}
return resp, nil
}

View File

@@ -1,272 +0,0 @@
package sqlite
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"net/url"
"strings"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/google/uuid"
"go.uber.org/zap"
)
// GetDomainFromSsoResponse uses relay state received from IdP to fetch
// user domain. The domain is further used to process validity of the response.
// when sending login request to IdP we send relay state as URL (site url)
// with domainId or domainName as query parameter.
func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error) {
// derive domain id from relay state now
var domainIdStr string
var domainNameStr string
var domain *types.GettableOrgDomain
for k, v := range relayState.Query() {
if k == "domainId" && len(v) > 0 {
domainIdStr = strings.Replace(v[0], ":", "-", -1)
}
if k == "domainName" && len(v) > 0 {
domainNameStr = v[0]
}
}
if domainIdStr != "" {
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
zap.L().Error("failed to parse domainId from relay state", zap.Error(err))
return nil, fmt.Errorf("failed to parse domainId from IdP response")
}
domain, err = m.GetDomain(ctx, domainId)
if (err != nil) || domain == nil {
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
}
if domainNameStr != "" {
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
domain = domainFromDB
if (err != nil) || domain == nil {
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
}
if domain != nil {
return domain, nil
}
return nil, fmt.Errorf("failed to find domain received in IdP response")
}
// GetDomainByName returns org domain for a given domain name
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
stored := types.StorableOrgDomain{}
err := m.DB().NewSelect().
Model(&stored).
Where("name = ?", name).
Limit(1).
Scan(ctx)
if err != nil {
if err == sql.ErrNoRows {
return nil, model.BadRequest(fmt.Errorf("invalid domain name"))
}
return nil, model.InternalError(err)
}
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, model.InternalError(err)
}
return domain, nil
}
// GetDomain returns org domain for a given domain id
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError) {
stored := types.StorableOrgDomain{}
err := m.DB().NewSelect().
Model(&stored).
Where("id = ?", id).
Limit(1).
Scan(ctx)
if err != nil {
if err == sql.ErrNoRows {
return nil, model.BadRequest(fmt.Errorf("invalid domain id"))
}
return nil, model.InternalError(err)
}
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, model.InternalError(err)
}
return domain, nil
}
// ListDomains gets the list of auth domains by org id
func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError) {
domains := []types.GettableOrgDomain{}
stored := []types.StorableOrgDomain{}
err := m.DB().NewSelect().
Model(&stored).
Where("org_id = ?", orgId).
Scan(ctx)
if err != nil {
if err == sql.ErrNoRows {
return domains, nil
}
return nil, model.InternalError(err)
}
for _, s := range stored {
domain := types.GettableOrgDomain{StorableOrgDomain: s}
if err := domain.LoadConfig(s.Data); err != nil {
zap.L().Error("ListDomains() failed", zap.Error(err))
}
domains = append(domains, domain)
}
return domains, nil
}
// CreateDomain creates a new auth domain
func (m *modelDao) CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError {
if domain.ID == uuid.Nil {
domain.ID = uuid.New()
}
if domain.OrgID == "" || domain.Name == "" {
return model.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgID, Name "))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
storableDomain := types.StorableOrgDomain{
ID: domain.ID,
Name: domain.Name,
OrgID: domain.OrgID,
Data: string(configJson),
TimeAuditable: ossTypes.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
}
_, err = m.DB().NewInsert().
Model(&storableDomain).
Exec(ctx)
if err != nil {
zap.L().Error("failed to insert domain in db", zap.Error(err))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
return nil
}
// UpdateDomain updates stored config params for a domain
func (m *modelDao) UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError {
if domain.ID == uuid.Nil {
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return model.InternalError(fmt.Errorf("domain update failed"))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.L().Error("domain update failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain update failed"))
}
storableDomain := &types.StorableOrgDomain{
ID: domain.ID,
Name: domain.Name,
OrgID: domain.OrgID,
Data: string(configJson),
TimeAuditable: ossTypes.TimeAuditable{UpdatedAt: time.Now()},
}
_, err = m.DB().NewUpdate().
Model(storableDomain).
Column("data", "updated_at").
WherePK().
Exec(ctx)
if err != nil {
zap.L().Error("domain update failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain update failed"))
}
return nil
}
// DeleteDomain deletes an org domain
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
if id == uuid.Nil {
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
storableDomain := &types.StorableOrgDomain{ID: id}
_, err := m.DB().NewDelete().
Model(storableDomain).
WherePK().
Exec(ctx)
if err != nil {
zap.L().Error("domain delete failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
return nil
}
func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
if email == "" {
return nil, model.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email "))
}
components := strings.Split(email, "@")
if len(components) < 2 {
return nil, model.BadRequest(fmt.Errorf("invalid email address"))
}
parsedDomain := components[1]
stored := types.StorableOrgDomain{}
err := m.DB().NewSelect().
Model(&stored).
Where("name = ?", parsedDomain).
Limit(1).
Scan(ctx)
if err != nil {
if err == sql.ErrNoRows {
return nil, nil
}
return nil, model.InternalError(err)
}
domain := &types.GettableOrgDomain{StorableOrgDomain: stored}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, model.InternalError(err)
}
return domain, nil
}

View File

@@ -1,46 +0,0 @@
package sqlite
import (
"fmt"
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
basedsql "github.com/SigNoz/signoz/pkg/query-service/dao/sqlite"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
)
type modelDao struct {
*basedsql.ModelDaoSqlite
flags baseint.FeatureLookup
}
// SetFlagProvider sets the feature lookup provider
func (m *modelDao) SetFlagProvider(flags baseint.FeatureLookup) {
m.flags = flags
}
// CheckFeature confirms if a feature is available
func (m *modelDao) checkFeature(key string) error {
if m.flags == nil {
return fmt.Errorf("flag provider not set")
}
return m.flags.CheckFeature(key)
}
// InitDB creates and extends base model DB repository
func InitDB(sqlStore sqlstore.SQLStore) (*modelDao, error) {
dao, err := basedsql.InitDB(sqlStore)
if err != nil {
return nil, err
}
// set package variable so dependent base methods (e.g. AuthCache) will work
basedao.SetDB(dao)
m := &modelDao{ModelDaoSqlite: dao}
return m, nil
}
func (m *modelDao) DB() *bun.DB {
return m.ModelDaoSqlite.DB()
}

View File

@@ -1,198 +0,0 @@
package sqlite
import (
"context"
"fmt"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
p.StorablePersonalAccessToken.OrgID = orgID
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
_, err := m.DB().NewInsert().
Model(&p.StorablePersonalAccessToken).
Exec(ctx)
if err != nil {
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
createdByUser, _ := m.GetUser(ctx, p.UserID)
if createdByUser == nil {
p.CreatedByUser = types.PatUser{
NotFound: true,
}
} else {
p.CreatedByUser = types.PatUser{
User: ossTypes.User{
ID: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: createdByUser.CreatedAt,
UpdatedAt: createdByUser.UpdatedAt,
},
ProfilePictureURL: createdByUser.ProfilePictureURL,
},
NotFound: false,
}
}
return p, nil
}
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
_, err := m.DB().NewUpdate().
Model(&p.StorablePersonalAccessToken).
Column("role", "name", "updated_at", "updated_by_user_id").
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Where("revoked = false").
Exec(ctx)
if err != nil {
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT update failed"))
}
return nil
}
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
Model(&pats).
Where("revoked = false").
Where("org_id = ?", orgID).
Order("updated_at DESC").
Scan(ctx); err != nil {
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
}
patsWithUsers := []types.GettablePAT{}
for i := range pats {
patWithUser := types.GettablePAT{
StorablePersonalAccessToken: pats[i],
}
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
if createdByUser == nil {
patWithUser.CreatedByUser = types.PatUser{
NotFound: true,
}
} else {
patWithUser.CreatedByUser = types.PatUser{
User: ossTypes.User{
ID: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: createdByUser.CreatedAt,
UpdatedAt: createdByUser.UpdatedAt,
},
ProfilePictureURL: createdByUser.ProfilePictureURL,
},
NotFound: false,
}
}
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
if updatedByUser == nil {
patWithUser.UpdatedByUser = types.PatUser{
NotFound: true,
}
} else {
patWithUser.UpdatedByUser = types.PatUser{
User: ossTypes.User{
ID: updatedByUser.ID,
Name: updatedByUser.Name,
Email: updatedByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: updatedByUser.CreatedAt,
UpdatedAt: updatedByUser.UpdatedAt,
},
ProfilePictureURL: updatedByUser.ProfilePictureURL,
},
NotFound: false,
}
}
patsWithUsers = append(patsWithUsers, patWithUser)
}
return patsWithUsers, nil
}
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
updatedAt := time.Now().Unix()
_, err := m.DB().NewUpdate().
Model(&types.StorablePersonalAccessToken{}).
Set("revoked = ?", true).
Set("updated_by_user_id = ?", userID).
Set("updated_at = ?", updatedAt).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Exec(ctx)
if err != nil {
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT revoke failed"))
}
return nil
}
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
Model(&pats).
Where("token = ?", token).
Where("revoked = false").
Scan(ctx); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
if len(pats) != 1 {
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple PATs with same token, %s", token),
}
}
patWithUser := types.GettablePAT{
StorablePersonalAccessToken: pats[0],
}
return &patWithUser, nil
}
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
Model(&pats).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).
Where("revoked = false").
Scan(ctx); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
if len(pats) != 1 {
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple PATs with same token"),
}
}
patWithUser := types.GettablePAT{
StorablePersonalAccessToken: pats[0],
}
return &patWithUser, nil
}

View File

@@ -1,67 +0,0 @@
package signozio
import (
"context"
"encoding/json"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
func ValidateLicenseV3(ctx context.Context, licenseKey string, zeus zeus.Zeus) (*model.LicenseV3, error) {
data, err := zeus.GetLicense(ctx, licenseKey)
if err != nil {
return nil, err
}
var m map[string]any
if err = json.Unmarshal(data, &m); err != nil {
return nil, err
}
license, err := model.NewLicenseV3(m)
if err != nil {
return nil, err
}
return license, nil
}
// SendUsage reports the usage of signoz to license server
func SendUsage(ctx context.Context, usage model.UsagePayload, zeus zeus.Zeus) error {
body, err := json.Marshal(usage)
if err != nil {
return err
}
return zeus.PutMeters(ctx, usage.LicenseKey.String(), body)
}
func CheckoutSession(ctx context.Context, checkoutRequest *model.CheckoutRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
body, err := json.Marshal(checkoutRequest)
if err != nil {
return "", err
}
response, err := zeus.GetCheckoutURL(ctx, licenseKey, body)
if err != nil {
return "", err
}
return gjson.GetBytes(response, "url").String(), nil
}
func PortalSession(ctx context.Context, portalRequest *model.PortalRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
body, err := json.Marshal(portalRequest)
if err != nil {
return "", err
}
response, err := zeus.GetPortalURL(ctx, licenseKey, body)
if err != nil {
return "", err
}
return gjson.GetBytes(response, "url").String(), nil
}

View File

@@ -1,248 +0,0 @@
package license
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"time"
"github.com/jmoiron/sqlx"
"github.com/mattn/go-sqlite3"
"github.com/SigNoz/signoz/ee/query-service/model"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"go.uber.org/zap"
)
// Repo is license repo. stores license keys in a secured DB
type Repo struct {
db *sqlx.DB
store sqlstore.SQLStore
}
// NewLicenseRepo initiates a new license repo
func NewLicenseRepo(db *sqlx.DB, store sqlstore.SQLStore) Repo {
return Repo{
db: db,
store: store,
}
}
func (r *Repo) GetLicensesV3(ctx context.Context) ([]*model.LicenseV3, error) {
licensesData := []model.LicenseDB{}
licenseV3Data := []*model.LicenseV3{}
query := "SELECT id,key,data FROM licenses_v3"
err := r.db.Select(&licensesData, query)
if err != nil {
return nil, fmt.Errorf("failed to get licenses from db: %v", err)
}
for _, l := range licensesData {
var licenseData map[string]interface{}
err := json.Unmarshal([]byte(l.Data), &licenseData)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
}
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
if err != nil {
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
}
licenseV3Data = append(licenseV3Data, license)
}
return licenseV3Data, nil
}
// GetActiveLicense fetches the latest active license from DB.
// If the license is not present, expect a nil license and a nil error in the output.
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) {
activeLicenseV3, err := r.GetActiveLicenseV3(ctx)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
}
if activeLicenseV3 == nil {
return nil, nil
}
activeLicenseV2 := model.ConvertLicenseV3ToLicenseV2(activeLicenseV3)
return activeLicenseV2, nil
}
func (r *Repo) GetActiveLicenseV3(ctx context.Context) (*model.LicenseV3, error) {
var err error
licenses := []model.LicenseDB{}
query := "SELECT id,key,data FROM licenses_v3"
err = r.db.Select(&licenses, query)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
}
var active *model.LicenseV3
for _, l := range licenses {
var licenseData map[string]interface{}
err := json.Unmarshal([]byte(l.Data), &licenseData)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
}
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
if err != nil {
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
}
if active == nil &&
(license.ValidFrom != 0) &&
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
active = license
}
if active != nil &&
license.ValidFrom > active.ValidFrom &&
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
active = license
}
}
return active, nil
}
// InsertLicenseV3 inserts a new license v3 in db
func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) *model.ApiError {
query := `INSERT INTO licenses_v3 (id, key, data) VALUES ($1, $2, $3)`
// licsense is the entity of zeus so putting the entire license here without defining schema
licenseData, err := json.Marshal(l.Data)
if err != nil {
return &model.ApiError{Typ: basemodel.ErrorBadData, Err: err}
}
_, err = r.db.ExecContext(ctx,
query,
l.ID,
l.Key,
string(licenseData),
)
if err != nil {
if sqliteErr, ok := err.(sqlite3.Error); ok {
if sqliteErr.ExtendedCode == sqlite3.ErrConstraintUnique {
zap.L().Error("error in inserting license data: ", zap.Error(sqliteErr))
return &model.ApiError{Typ: model.ErrorConflict, Err: sqliteErr}
}
}
zap.L().Error("error in inserting license data: ", zap.Error(err))
return &model.ApiError{Typ: basemodel.ErrorExec, Err: err}
}
return nil
}
// UpdateLicenseV3 updates a new license v3 in db
func (r *Repo) UpdateLicenseV3(ctx context.Context, l *model.LicenseV3) error {
// the key and id for the license can't change so only update the data here!
query := `UPDATE licenses_v3 SET data=$1 WHERE id=$2;`
license, err := json.Marshal(l.Data)
if err != nil {
return fmt.Errorf("insert license failed: license marshal error")
}
_, err = r.db.ExecContext(ctx,
query,
license,
l.ID,
)
if err != nil {
zap.L().Error("error in updating license data: ", zap.Error(err))
return fmt.Errorf("failed to update license in db: %v", err)
}
return nil
}
func (r *Repo) CreateFeature(req *types.FeatureStatus) *basemodel.ApiError {
_, err := r.store.BunDB().NewInsert().
Model(req).
Exec(context.Background())
if err != nil {
return &basemodel.ApiError{Typ: basemodel.ErrorInternal, Err: err}
}
return nil
}
func (r *Repo) GetFeature(featureName string) (types.FeatureStatus, error) {
var feature types.FeatureStatus
err := r.store.BunDB().NewSelect().
Model(&feature).
Where("name = ?", featureName).
Scan(context.Background())
if err != nil {
return feature, err
}
if feature.Name == "" {
return feature, basemodel.ErrFeatureUnavailable{Key: featureName}
}
return feature, nil
}
func (r *Repo) GetAllFeatures() ([]basemodel.Feature, error) {
var feature []basemodel.Feature
err := r.db.Select(&feature,
`SELECT * FROM feature_status;`)
if err != nil {
return feature, err
}
return feature, nil
}
func (r *Repo) UpdateFeature(req types.FeatureStatus) error {
_, err := r.store.BunDB().NewUpdate().
Model(&req).
Where("name = ?", req.Name).
Exec(context.Background())
if err != nil {
return err
}
return nil
}
func (r *Repo) InitFeatures(req []types.FeatureStatus) error {
// get a feature by name, if it doesn't exist, create it. If it does exist, update it.
for _, feature := range req {
currentFeature, err := r.GetFeature(feature.Name)
if err != nil && err == sql.ErrNoRows {
err := r.CreateFeature(&feature)
if err != nil {
return err
}
continue
} else if err != nil {
return err
}
feature.Usage = int(currentFeature.Usage)
if feature.Usage >= feature.UsageLimit && feature.UsageLimit != -1 {
feature.Active = false
}
err = r.UpdateFeature(feature)
if err != nil {
return err
}
}
return nil
}

View File

@@ -1,318 +0,0 @@
package license
import (
"context"
"sync/atomic"
"time"
"github.com/jmoiron/sqlx"
"sync"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/zeus"
validate "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/model"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
"go.uber.org/zap"
)
var LM *Manager
// validate and update license every 24 hours
var validationFrequency = 24 * 60 * time.Minute
type Manager struct {
repo *Repo
zeus zeus.Zeus
mutex sync.Mutex
validatorRunning bool
// end the license validation, this is important to gracefully
// stopping validation and protect in-consistent updates
done chan struct{}
// terminated waits for the validate go routine to end
terminated chan struct{}
// last time the license was validated
lastValidated int64
// keep track of validation failure attempts
failedAttempts uint64
// keep track of active license and features
activeLicenseV3 *model.LicenseV3
activeFeatures basemodel.FeatureSet
}
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, zeus zeus.Zeus, features ...basemodel.Feature) (*Manager, error) {
if LM != nil {
return LM, nil
}
repo := NewLicenseRepo(db, store)
m := &Manager{
repo: &repo,
zeus: zeus,
}
if err := m.start(features...); err != nil {
return m, err
}
LM = m
return m, nil
}
// start loads active license in memory and initiates validator
func (lm *Manager) start(features ...basemodel.Feature) error {
return lm.LoadActiveLicenseV3(features...)
}
func (lm *Manager) Stop() {
close(lm.done)
<-lm.terminated
}
func (lm *Manager) SetActiveV3(l *model.LicenseV3, features ...basemodel.Feature) {
lm.mutex.Lock()
defer lm.mutex.Unlock()
if l == nil {
return
}
lm.activeLicenseV3 = l
lm.activeFeatures = append(l.Features, features...)
// set default features
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.L().Panic("Couldn't activate features", zap.Error(err))
}
if !lm.validatorRunning {
// we want to make sure only one validator runs,
// we already have lock() so good to go
lm.validatorRunning = true
go lm.ValidatorV3(context.Background())
}
}
func setDefaultFeatures(lm *Manager) {
lm.activeFeatures = append(lm.activeFeatures, baseconstants.DEFAULT_FEATURE_SET...)
}
func (lm *Manager) LoadActiveLicenseV3(features ...basemodel.Feature) error {
active, err := lm.repo.GetActiveLicenseV3(context.Background())
if err != nil {
return err
}
if active != nil {
lm.SetActiveV3(active, features...)
} else {
zap.L().Info("No active license found, defaulting to basic plan")
// if no active license is found, we default to basic(free) plan with all default features
lm.activeFeatures = model.BasicPlan
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.L().Error("Couldn't initialize features", zap.Error(err))
return err
}
}
return nil
}
func (lm *Manager) GetLicensesV3(ctx context.Context) (response []*model.LicenseV3, apiError *model.ApiError) {
licenses, err := lm.repo.GetLicensesV3(ctx)
if err != nil {
return nil, model.InternalError(err)
}
for _, l := range licenses {
if lm.activeLicenseV3 != nil && l.Key == lm.activeLicenseV3.Key {
l.IsCurrent = true
}
if l.ValidUntil == -1 {
// for subscriptions, there is no end-date as such
// but for showing user some validity we default one year timespan
l.ValidUntil = l.ValidFrom + 31556926
}
response = append(response, l)
}
return response, nil
}
// Validator validates license after an epoch of time
func (lm *Manager) ValidatorV3(ctx context.Context) {
zap.L().Info("ValidatorV3 started!")
defer close(lm.terminated)
tick := time.NewTicker(validationFrequency)
defer tick.Stop()
_ = lm.ValidateV3(ctx)
for {
select {
case <-lm.done:
return
default:
select {
case <-lm.done:
return
case <-tick.C:
_ = lm.ValidateV3(ctx)
}
}
}
}
func (lm *Manager) RefreshLicense(ctx context.Context) error {
license, err := validate.ValidateLicenseV3(ctx, lm.activeLicenseV3.Key, lm.zeus)
if err != nil {
return err
}
err = lm.repo.UpdateLicenseV3(ctx, license)
if err != nil {
return err
}
lm.SetActiveV3(license)
return nil
}
func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
if lm.activeLicenseV3 == nil {
return nil
}
defer func() {
lm.mutex.Lock()
lm.lastValidated = time.Now().Unix()
if reterr != nil {
zap.L().Error("License validation completed with error", zap.Error(reterr))
atomic.AddUint64(&lm.failedAttempts, 1)
// default to basic plan if validation fails for three consecutive times
if atomic.LoadUint64(&lm.failedAttempts) > 3 {
zap.L().Error("License validation completed with error for three consecutive times, defaulting to basic plan", zap.String("license_id", lm.activeLicenseV3.ID), zap.Bool("license_validation", false))
lm.activeLicenseV3 = nil
lm.activeFeatures = model.BasicPlan
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.L().Error("Couldn't initialize features", zap.Error(err))
}
lm.done <- struct{}{}
lm.validatorRunning = false
}
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
map[string]interface{}{"err": reterr.Error()}, "", true, false)
} else {
// reset the failed attempts counter
atomic.StoreUint64(&lm.failedAttempts, 0)
zap.L().Info("License validation completed with no errors")
}
lm.mutex.Unlock()
}()
err := lm.RefreshLicense(ctx)
if err != nil {
return err
}
return nil
}
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (*model.LicenseV3, error) {
license, err := validate.ValidateLicenseV3(ctx, licenseKey, lm.zeus)
if err != nil {
return nil, err
}
// insert the new license to the sqlite db
modelErr := lm.repo.InsertLicenseV3(ctx, license)
if modelErr != nil {
zap.L().Error("failed to activate license", zap.Error(modelErr))
return nil, modelErr
}
// license is valid, activate it
lm.SetActiveV3(license)
return license, nil
}
func (lm *Manager) GetActiveLicense() *model.LicenseV3 {
return lm.activeLicenseV3
}
// CheckFeature will be internally used by backend routines
// for feature gating
func (lm *Manager) CheckFeature(featureKey string) error {
feature, err := lm.repo.GetFeature(featureKey)
if err != nil {
return err
}
if feature.Active {
return nil
}
return basemodel.ErrFeatureUnavailable{Key: featureKey}
}
// GetFeatureFlags returns current active features
func (lm *Manager) GetFeatureFlags() (basemodel.FeatureSet, error) {
return lm.repo.GetAllFeatures()
}
func (lm *Manager) InitFeatures(features basemodel.FeatureSet) error {
featureStatus := make([]types.FeatureStatus, len(features))
for i, f := range features {
featureStatus[i] = types.FeatureStatus{
Name: f.Name,
Active: f.Active,
Usage: int(f.Usage),
UsageLimit: int(f.UsageLimit),
Route: f.Route,
}
}
return lm.repo.InitFeatures(featureStatus)
}
func (lm *Manager) UpdateFeatureFlag(feature basemodel.Feature) error {
return lm.repo.UpdateFeature(types.FeatureStatus{
Name: feature.Name,
Active: feature.Active,
Usage: int(feature.Usage),
UsageLimit: int(feature.UsageLimit),
Route: feature.Route,
})
}
func (lm *Manager) GetFeatureFlag(key string) (basemodel.Feature, error) {
featureStatus, err := lm.repo.GetFeature(key)
if err != nil {
return basemodel.Feature{}, err
}
return basemodel.Feature{
Name: featureStatus.Name,
Active: featureStatus.Active,
Usage: int64(featureStatus.Usage),
UsageLimit: int64(featureStatus.UsageLimit),
Route: featureStatus.Route,
}, nil
}
// GetRepo return the license repo
func (lm *Manager) GetRepo() *Repo {
return lm.repo
}

View File

@@ -6,18 +6,26 @@ import (
"os"
"time"
"github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/ee/zeus"
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider"
"github.com/SigNoz/signoz/pkg/config/fileprovider"
"github.com/SigNoz/signoz/pkg/factory"
pkglicensing "github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
pkgzeus "github.com/SigNoz/signoz/pkg/zeus"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
@@ -85,8 +93,9 @@ func main() {
loggerMgr := initZapLog()
zap.ReplaceGlobals(loggerMgr)
defer loggerMgr.Sync() // flushes buffer, if any
ctx := context.Background()
config, err := signoz.NewConfig(context.Background(), config.ResolverConfig{
config, err := signoz.NewConfig(ctx, config.ResolverConfig{
Uris: []string{"env:"},
ProviderFactories: []config.ProviderFactory{
envprovider.NewFactory(),
@@ -109,20 +118,6 @@ func main() {
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
}
signoz, err := signoz.New(
context.Background(),
config,
zeus.Config(),
httpzeus.NewProviderFactory(),
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
)
if err != nil {
zap.L().Fatal("Failed to create signoz", zap.Error(err))
}
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
if len(jwtSecret) == 0 {
@@ -133,6 +128,26 @@ func main() {
jwt := authtypes.NewJWT(jwtSecret, 30*time.Minute, 30*24*time.Hour)
signoz, err := signoz.New(
context.Background(),
config,
jwt,
zeus.Config(),
httpzeus.NewProviderFactory(),
licensing.Config(24*time.Hour, 3),
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics)
},
signoz.NewEmailingProviderFactories(),
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
)
if err != nil {
zap.L().Fatal("Failed to create signoz", zap.Error(err))
}
serverOptions := &app.ServerOptions{
Config: config,
SigNoz: signoz,
@@ -151,22 +166,22 @@ func main() {
zap.L().Fatal("Failed to create server", zap.Error(err))
}
if err := server.Start(context.Background()); err != nil {
if err := server.Start(ctx); err != nil {
zap.L().Fatal("Could not start server", zap.Error(err))
}
signoz.Start(context.Background())
signoz.Start(ctx)
if err := signoz.Wait(context.Background()); err != nil {
if err := signoz.Wait(ctx); err != nil {
zap.L().Fatal("Failed to start signoz", zap.Error(err))
}
err = server.Stop()
err = server.Stop(ctx)
if err != nil {
zap.L().Fatal("Failed to stop server", zap.Error(err))
}
err = signoz.Stop(context.Background())
err = signoz.Stop(ctx)
if err != nil {
zap.L().Fatal("Failed to stop signoz", zap.Error(err))
}

View File

@@ -1,12 +0,0 @@
package model
import (
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
)
// GettableInvitation overrides base object and adds precheck into
// response
type GettableInvitation struct {
*basemodel.InvitationResponseObject
Precheck *basemodel.PrecheckResponse `json:"precheck"`
}

View File

@@ -1,244 +0,0 @@
package model
import (
"encoding/json"
"fmt"
"reflect"
"time"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/pkg/errors"
)
type License struct {
Key string `json:"key" db:"key"`
ActivationId string `json:"activationId" db:"activationId"`
CreatedAt time.Time `db:"created_at"`
// PlanDetails contains the encrypted plan info
PlanDetails string `json:"planDetails" db:"planDetails"`
// stores parsed license details
LicensePlan
FeatureSet basemodel.FeatureSet
// populated in case license has any errors
ValidationMessage string `db:"validationMessage"`
// used only for sending details to front-end
IsCurrent bool `json:"isCurrent"`
}
func (l *License) MarshalJSON() ([]byte, error) {
return json.Marshal(&struct {
Key string `json:"key" db:"key"`
ActivationId string `json:"activationId" db:"activationId"`
ValidationMessage string `db:"validationMessage"`
IsCurrent bool `json:"isCurrent"`
PlanKey string `json:"planKey"`
ValidFrom time.Time `json:"ValidFrom"`
ValidUntil time.Time `json:"ValidUntil"`
Status string `json:"status"`
}{
Key: l.Key,
ActivationId: l.ActivationId,
IsCurrent: l.IsCurrent,
PlanKey: l.PlanKey,
ValidFrom: time.Unix(l.ValidFrom, 0),
ValidUntil: time.Unix(l.ValidUntil, 0),
Status: l.Status,
ValidationMessage: l.ValidationMessage,
})
}
type LicensePlan struct {
PlanKey string `json:"planKey"`
ValidFrom int64 `json:"validFrom"`
ValidUntil int64 `json:"validUntil"`
Status string `json:"status"`
}
type Licenses struct {
TrialStart int64 `json:"trialStart"`
TrialEnd int64 `json:"trialEnd"`
OnTrial bool `json:"onTrial"`
WorkSpaceBlock bool `json:"workSpaceBlock"`
TrialConvertedToSubscription bool `json:"trialConvertedToSubscription"`
GracePeriodEnd int64 `json:"gracePeriodEnd"`
Licenses []License `json:"licenses"`
}
type SubscriptionServerResp struct {
Status string `json:"status"`
Data Licenses `json:"data"`
}
type Plan struct {
Name string `json:"name"`
}
type LicenseDB struct {
ID string `json:"id"`
Key string `json:"key"`
Data string `json:"data"`
}
type LicenseV3 struct {
ID string
Key string
Data map[string]interface{}
PlanName string
Features basemodel.FeatureSet
Status string
IsCurrent bool
ValidFrom int64
ValidUntil int64
}
func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) {
var zeroValue T
if val, ok := data[key]; ok {
if value, ok := val.(T); ok {
return value, nil
}
return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue))
}
return zeroValue, fmt.Errorf("%s key is missing", key)
}
func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
var features basemodel.FeatureSet
// extract id from data
licenseID, err := extractKeyFromMapStringInterface[string](data, "id")
if err != nil {
return nil, err
}
delete(data, "id")
// extract key from data
licenseKey, err := extractKeyFromMapStringInterface[string](data, "key")
if err != nil {
return nil, err
}
delete(data, "key")
// extract status from data
status, err := extractKeyFromMapStringInterface[string](data, "status")
if err != nil {
return nil, err
}
planMap, err := extractKeyFromMapStringInterface[map[string]any](data, "plan")
if err != nil {
return nil, err
}
planName, err := extractKeyFromMapStringInterface[string](planMap, "name")
if err != nil {
return nil, err
}
// if license status is invalid then default it to basic
if status == LicenseStatusInvalid {
planName = PlanNameBasic
}
featuresFromZeus := basemodel.FeatureSet{}
if _features, ok := data["features"]; ok {
featuresData, err := json.Marshal(_features)
if err != nil {
return nil, errors.Wrap(err, "failed to marshal features data")
}
if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil {
return nil, errors.Wrap(err, "failed to unmarshal features data")
}
}
switch planName {
case PlanNameEnterprise:
features = append(features, EnterprisePlan...)
case PlanNameBasic:
features = append(features, BasicPlan...)
default:
features = append(features, BasicPlan...)
}
if len(featuresFromZeus) > 0 {
for _, feature := range featuresFromZeus {
exists := false
for i, existingFeature := range features {
if existingFeature.Name == feature.Name {
features[i] = feature // Replace existing feature
exists = true
break
}
}
if !exists {
features = append(features, feature) // Append if it doesn't exist
}
}
}
data["features"] = features
_validFrom, err := extractKeyFromMapStringInterface[float64](data, "valid_from")
if err != nil {
_validFrom = 0
}
validFrom := int64(_validFrom)
_validUntil, err := extractKeyFromMapStringInterface[float64](data, "valid_until")
if err != nil {
_validUntil = 0
}
validUntil := int64(_validUntil)
return &LicenseV3{
ID: licenseID,
Key: licenseKey,
Data: data,
PlanName: planName,
Features: features,
ValidFrom: validFrom,
ValidUntil: validUntil,
Status: status,
}, nil
}
func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}) (*LicenseV3, error) {
licenseDataWithIdAndKey := data
licenseDataWithIdAndKey["id"] = id
licenseDataWithIdAndKey["key"] = key
return NewLicenseV3(licenseDataWithIdAndKey)
}
func ConvertLicenseV3ToLicenseV2(l *LicenseV3) *License {
planKeyFromPlanName, ok := MapOldPlanKeyToNewPlanName[l.PlanName]
if !ok {
planKeyFromPlanName = Basic
}
return &License{
Key: l.Key,
ActivationId: "",
PlanDetails: "",
FeatureSet: l.Features,
ValidationMessage: "",
IsCurrent: l.IsCurrent,
LicensePlan: LicensePlan{
PlanKey: planKeyFromPlanName,
ValidFrom: l.ValidFrom,
ValidUntil: l.ValidUntil,
Status: l.Status},
}
}
type CheckoutRequest struct {
SuccessURL string `json:"url"`
}
type PortalRequest struct {
SuccessURL string `json:"url"`
}

View File

@@ -1,170 +0,0 @@
package model
import (
"encoding/json"
"testing"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewLicenseV3(t *testing.T) {
testCases := []struct {
name string
data []byte
pass bool
expected *LicenseV3
error error
}{
{
name: "Error for missing license id",
data: []byte(`{}`),
pass: false,
error: errors.New("id key is missing"),
},
{
name: "Error for license id not being a valid string",
data: []byte(`{"id": 10}`),
pass: false,
error: errors.New("id key is not a valid string"),
},
{
name: "Error for missing license key",
data: []byte(`{"id":"does-not-matter"}`),
pass: false,
error: errors.New("key key is missing"),
},
{
name: "Error for invalid string license key",
data: []byte(`{"id":"does-not-matter","key":10}`),
pass: false,
error: errors.New("key key is not a valid string"),
},
{
name: "Error for missing license status",
data: []byte(`{"id":"does-not-matter", "key": "does-not-matter","category":"FREE"}`),
pass: false,
error: errors.New("status key is missing"),
},
{
name: "Error for invalid string license status",
data: []byte(`{"id":"does-not-matter","key": "does-not-matter", "category":"FREE", "status":10}`),
pass: false,
error: errors.New("status key is not a valid string"),
},
{
name: "Error for missing license plan",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`),
pass: false,
error: errors.New("plan key is missing"),
},
{
name: "Error for invalid json license plan",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`),
pass: false,
error: errors.New("plan key is not a valid map[string]interface {}"),
},
{
name: "Error for invalid license plan",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`),
pass: false,
error: errors.New("name key is missing"),
},
{
name: "Parse the entire license properly",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "ENTERPRISE",
},
"category": "FREE",
"status": "ACTIVE",
"valid_from": float64(1730899309),
"valid_until": float64(-1),
},
PlanName: PlanNameEnterprise,
ValidFrom: 1730899309,
ValidUntil: -1,
Status: "ACTIVE",
IsCurrent: false,
Features: model.FeatureSet{},
},
},
{
name: "Fallback to basic plan if license status is invalid",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "ENTERPRISE",
},
"category": "FREE",
"status": "INVALID",
"valid_from": float64(1730899309),
"valid_until": float64(-1),
},
PlanName: PlanNameBasic,
ValidFrom: 1730899309,
ValidUntil: -1,
Status: "INVALID",
IsCurrent: false,
Features: model.FeatureSet{},
},
},
{
name: "fallback states for validFrom and validUntil",
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`),
pass: true,
expected: &LicenseV3{
ID: "does-not-matter",
Key: "does-not-matter-key",
Data: map[string]interface{}{
"plan": map[string]interface{}{
"name": "ENTERPRISE",
},
"valid_from": 1234.456,
"valid_until": 5678.567,
"category": "FREE",
"status": "ACTIVE",
},
PlanName: PlanNameEnterprise,
ValidFrom: 1234,
ValidUntil: 5678,
Status: "ACTIVE",
IsCurrent: false,
Features: model.FeatureSet{},
},
},
}
for _, tc := range testCases {
var licensePayload map[string]interface{}
err := json.Unmarshal(tc.data, &licensePayload)
require.NoError(t, err)
license, err := NewLicenseV3(licensePayload)
if license != nil {
license.Features = make(model.FeatureSet, 0)
delete(license.Data, "features")
}
if tc.pass {
require.NoError(t, err)
require.NotNil(t, license)
assert.Equal(t, tc.expected, license)
} else {
require.Error(t, err)
assert.EqualError(t, err, tc.error.Error())
require.Nil(t, license)
}
}
}

View File

@@ -1,7 +0,0 @@
package model
type CreatePATRequestBody struct {
Name string `json:"name"`
Role string `json:"role"`
ExpiresInDays int64 `json:"expiresInDays"`
}

View File

@@ -1,131 +0,0 @@
package model
import (
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
)
const SSO = "SSO"
const Basic = "BASIC_PLAN"
const Enterprise = "ENTERPRISE_PLAN"
var (
PlanNameEnterprise = "ENTERPRISE"
PlanNameBasic = "BASIC"
)
var (
MapOldPlanKeyToNewPlanName map[string]string = map[string]string{PlanNameBasic: Basic, PlanNameEnterprise: Enterprise}
)
var (
LicenseStatusInvalid = "INVALID"
)
const Onboarding = "ONBOARDING"
const ChatSupport = "CHAT_SUPPORT"
const Gateway = "GATEWAY"
const PremiumSupport = "PREMIUM_SUPPORT"
var BasicPlan = basemodel.FeatureSet{
basemodel.Feature{
Name: SSO,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Gateway,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: PremiumSupport,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AnomalyDetection,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.TraceFunnels,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}
var EnterprisePlan = basemodel.FeatureSet{
basemodel.Feature{
Name: SSO,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.UseSpanMetrics,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Onboarding,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: ChatSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Gateway,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: PremiumSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.AnomalyDetection,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: basemodel.TraceFunnels,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}

View File

@@ -1,31 +0,0 @@
package sso
import (
"net/http"
)
// SSOIdentity contains details of user received from SSO provider
type SSOIdentity struct {
UserID string
Username string
PreferredUsername string
Email string
EmailVerified bool
ConnectorData []byte
}
// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth
// style redirect flow to determine user information.
type OAuthCallbackProvider interface {
// The initial URL user would be redirect to.
// OAuth2 implementations support various scopes but we only need profile and user as
// the roles are still being managed in SigNoz.
BuildAuthURL(state string) (string, error)
// Handle the callback to the server (after login at oauth provider site)
// and return a email identity.
// At the moment we dont support auto signup flow (based on domain), so
// the full identity (including name, group etc) is not required outside of the
// connector
HandleCallback(r *http.Request) (identity *SSOIdentity, err error)
}

View File

@@ -14,9 +14,9 @@ import (
"go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/dao"
"github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
"github.com/SigNoz/signoz/pkg/zeus"
)
@@ -35,64 +35,68 @@ var (
type Manager struct {
clickhouseConn clickhouse.Conn
licenseRepo *license.Repo
licenseService licensing.Licensing
scheduler *gocron.Scheduler
modelDao dao.ModelDao
zeus zeus.Zeus
orgGetter organization.Getter
}
func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, zeus zeus.Zeus) (*Manager, error) {
func New(licenseService licensing.Licensing, clickhouseConn clickhouse.Conn, zeus zeus.Zeus, orgGetter organization.Getter) (*Manager, error) {
m := &Manager{
clickhouseConn: clickhouseConn,
licenseRepo: licenseRepo,
licenseService: licenseService,
scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC
modelDao: modelDao,
zeus: zeus,
orgGetter: orgGetter,
}
return m, nil
}
// start loads collects and exports any exported snapshot and starts the exporter
func (lm *Manager) Start() error {
func (lm *Manager) Start(ctx context.Context) error {
// compares the locker and stateUnlocked if both are same lock is applied else returns error
if !atomic.CompareAndSwapUint32(&locker, stateUnlocked, stateLocked) {
return fmt.Errorf("usage exporter is locked")
}
_, err := lm.scheduler.Do(func() { lm.UploadUsage() })
// upload usage once when starting the service
_, err := lm.scheduler.Do(func() { lm.UploadUsage(ctx) })
if err != nil {
return err
}
// upload usage once when starting the service
lm.UploadUsage()
lm.UploadUsage(ctx)
lm.scheduler.StartAsync()
return nil
}
func (lm *Manager) UploadUsage() {
ctx := context.Background()
// check if license is present or not
license, err := lm.licenseRepo.GetActiveLicense(ctx)
func (lm *Manager) UploadUsage(ctx context.Context) {
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx)
if err != nil {
zap.L().Error("failed to get active license", zap.Error(err))
return
}
if license == nil {
// we will not start the usage reporting if license is not present.
zap.L().Info("no license present, skipping usage reporting")
zap.L().Error("failed to get organizations", zap.Error(err))
return
}
for _, organization := range organizations {
// check if license is present or not
license, err := lm.licenseService.GetActive(ctx, organization.ID)
if err != nil {
zap.L().Error("failed to get active license", zap.Error(err))
return
}
if license == nil {
// we will not start the usage reporting if license is not present.
zap.L().Info("no license present, skipping usage reporting")
return
}
usages := []model.UsageDB{}
usages := []model.UsageDB{}
// get usage from clickhouse
dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"}
query := `
// get usage from clickhouse
dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"}
query := `
SELECT tenant, collector_id, exporter_id, timestamp, data
FROM %s.distributed_usage as u1
GLOBAL INNER JOIN
@@ -107,76 +111,76 @@ func (lm *Manager) UploadUsage() {
order by timestamp
`
for _, db := range dbs {
dbusages := []model.UsageDB{}
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
return
for _, db := range dbs {
dbusages := []model.UsageDB{}
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
return
}
for _, u := range dbusages {
u.Type = db
usages = append(usages, u)
}
}
for _, u := range dbusages {
u.Type = db
usages = append(usages, u)
}
}
if len(usages) <= 0 {
zap.L().Info("no snapshots to upload, skipping.")
return
}
zap.L().Info("uploading usage data")
usagesPayload := []model.Usage{}
for _, usage := range usages {
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
if err != nil {
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
if len(usages) <= 0 {
zap.L().Info("no snapshots to upload, skipping.")
return
}
usageData := model.Usage{}
err = json.Unmarshal(usageDataBytes, &usageData)
if err != nil {
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
zap.L().Info("uploading usage data")
usagesPayload := []model.Usage{}
for _, usage := range usages {
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
if err != nil {
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
return
}
usageData := model.Usage{}
err = json.Unmarshal(usageDataBytes, &usageData)
if err != nil {
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
return
}
usageData.CollectorID = usage.CollectorID
usageData.ExporterID = usage.ExporterID
usageData.Type = usage.Type
usageData.Tenant = "default"
usageData.OrgName = "default"
usageData.TenantId = "default"
usagesPayload = append(usagesPayload, usageData)
}
key, _ := uuid.Parse(license.Key)
payload := model.UsagePayload{
LicenseKey: key,
Usage: usagesPayload,
}
body, errv2 := json.Marshal(payload)
if errv2 != nil {
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
return
}
usageData.CollectorID = usage.CollectorID
usageData.ExporterID = usage.ExporterID
usageData.Type = usage.Type
usageData.Tenant = "default"
usageData.OrgName = "default"
usageData.TenantId = "default"
usagesPayload = append(usagesPayload, usageData)
}
key, _ := uuid.Parse(license.Key)
payload := model.UsagePayload{
LicenseKey: key,
Usage: usagesPayload,
}
body, errv2 := json.Marshal(payload)
if errv2 != nil {
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
return
}
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
if errv2 != nil {
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
// not returning error here since it is captured in the failed count
return
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
if errv2 != nil {
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
// not returning error here since it is captured in the failed count
return
}
}
}
func (lm *Manager) Stop() {
func (lm *Manager) Stop(ctx context.Context) {
lm.scheduler.Stop()
zap.L().Info("sending usage data before shutting down")
// send usage before shutting down
lm.UploadUsage()
lm.UploadUsage(ctx)
atomic.StoreUint32(&locker, stateUnlocked)
}

View File

@@ -17,15 +17,21 @@ var (
)
var (
Org = "org"
User = "user"
CloudIntegration = "cloud_integration"
Org = "org"
User = "user"
UserNoCascade = "user_no_cascade"
FactorPassword = "factor_password"
CloudIntegration = "cloud_integration"
AgentConfigVersion = "agent_config_version"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
AgentConfigVersionReference = `("version_id") REFERENCES "agent_config_version" ("id")`
)
type dialect struct{}
@@ -264,8 +270,14 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, OrgReference)
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
fkReferences = append(fkReferences, UserReference)
} else if reference == UserNoCascade && !slices.Contains(fkReferences, UserReferenceNoCascade) {
fkReferences = append(fkReferences, UserReferenceNoCascade)
} else if reference == FactorPassword && !slices.Contains(fkReferences, FactorPasswordReference) {
fkReferences = append(fkReferences, FactorPasswordReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
} else if reference == AgentConfigVersion && !slices.Contains(fkReferences, AgentConfigVersionReference) {
fkReferences = append(fkReferences, AgentConfigVersionReference)
}
}

View File

@@ -1,76 +0,0 @@
package types
import (
"crypto/rand"
"encoding/base64"
"time"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
type GettablePAT struct {
CreatedByUser PatUser `json:"createdByUser"`
UpdatedByUser PatUser `json:"updatedByUser"`
StorablePersonalAccessToken
}
type PatUser struct {
types.User
NotFound bool `json:"notFound"`
}
func NewGettablePAT(name, role, userID string, expiresAt int64) GettablePAT {
return GettablePAT{
StorablePersonalAccessToken: NewStorablePersonalAccessToken(name, role, userID, expiresAt),
}
}
type StorablePersonalAccessToken struct {
bun.BaseModel `bun:"table:personal_access_token"`
types.Identifiable
types.TimeAuditable
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
Token string `json:"token" bun:"token,type:text,notnull,unique"`
Name string `json:"name" bun:"name,type:text,notnull"`
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
}
func NewStorablePersonalAccessToken(name, role, userID string, expiresAt int64) StorablePersonalAccessToken {
now := time.Now()
if expiresAt != 0 {
// convert expiresAt to unix timestamp from days
expiresAt = now.Unix() + (expiresAt * 24 * 60 * 60)
}
// Generate a 32-byte random token.
token := make([]byte, 32)
rand.Read(token)
// Encode the token in base64.
encodedToken := base64.StdEncoding.EncodeToString(token)
return StorablePersonalAccessToken{
Token: encodedToken,
Name: name,
Role: role,
UserID: userID,
ExpiresAt: expiresAt,
LastUsed: 0,
Revoked: false,
UpdatedByUserID: "",
TimeAuditable: types.TimeAuditable{
CreatedAt: now,
UpdatedAt: now,
},
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
}
}

View File

@@ -110,6 +110,8 @@ module.exports = {
// eslint rules need to remove
'@typescript-eslint/no-shadow': 'off',
'import/no-cycle': 'off',
// https://typescript-eslint.io/rules/consistent-return/ check the warning for details
'consistent-return': 'off',
'prettier/prettier': [
'error',
{},

View File

@@ -1,6 +1,10 @@
# Ignore artifacts:
build
coverage
public/
# Ignore all MD files:
**/*.md
**/*.md
# Ignore all JSON files:
**/*.json

View File

@@ -1,7 +1,7 @@
NODE_ENV="development"
BUNDLE_ANALYSER="true"
FRONTEND_API_ENDPOINT="http://localhost:8080/"
INTERCOM_APP_ID="intercom-app-id"
PYLON_APP_ID="pylon-app-id"
APPCUES_APP_ID="appcess-app-id"
PLAYWRIGHT_TEST_BASE_URL="http://localhost:8080"
CI="1"

View File

@@ -15,6 +15,7 @@ const config: Config.InitialOptions = {
extensionsToTreatAsEsm: ['.ts'],
'ts-jest': {
useESM: true,
isolatedModules: true,
},
},
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],
@@ -30,11 +31,6 @@ const config: Config.InitialOptions = {
testPathIgnorePatterns: ['/node_modules/', '/public/'],
moduleDirectories: ['node_modules', 'src'],
testEnvironment: 'jest-environment-jsdom',
testEnvironmentOptions: {
'jest-playwright': {
browsers: ['chromium', 'firefox', 'webkit'],
},
},
coverageThreshold: {
global: {
statements: 80,

View File

@@ -15,10 +15,6 @@
"jest:coverage": "jest --coverage",
"jest:watch": "jest --watch",
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
"playwright": "NODE_ENV=testing playwright test --config=./playwright.config.ts",
"playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium",
"playwright:codegen:local": "playwright codegen http://localhost:3301",
"playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json",
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
"commitlint": "commitlint --edit $1",
"test": "jest",
@@ -35,6 +31,7 @@
"@dnd-kit/core": "6.1.0",
"@dnd-kit/modifiers": "7.0.0",
"@dnd-kit/sortable": "8.0.0",
"@dnd-kit/utilities": "3.2.2",
"@grafana/data": "^11.2.3",
"@mdx-js/loader": "2.3.0",
"@mdx-js/react": "2.3.0",
@@ -81,7 +78,8 @@
"fontfaceobserver": "2.3.0",
"history": "4.10.1",
"html-webpack-plugin": "5.5.0",
"http-proxy-middleware": "3.0.3",
"http-proxy-middleware": "3.0.5",
"http-status-codes": "2.3.0",
"i18next": "^21.6.12",
"i18next-browser-languagedetector": "^6.1.3",
"i18next-http-backend": "^1.3.2",
@@ -90,7 +88,7 @@
"less": "^4.1.2",
"less-loader": "^10.2.0",
"lodash-es": "^4.17.21",
"lucide-react": "0.427.0",
"lucide-react": "0.498.0",
"mini-css-extract-plugin": "2.4.5",
"motion": "12.4.13",
"overlayscrollbars": "^2.8.1",
@@ -136,7 +134,7 @@
"uuid": "^8.3.2",
"web-vitals": "^0.2.4",
"webpack": "5.94.0",
"webpack-dev-server": "^4.15.2",
"webpack-dev-server": "^5.2.1",
"webpack-retry-chunk-load-plugin": "3.1.1",
"xstate": "^4.31.0"
},
@@ -163,7 +161,6 @@
"@commitlint/config-conventional": "^16.2.4",
"@faker-js/faker": "9.3.0",
"@jest/globals": "^27.5.1",
"@playwright/test": "^1.22.0",
"@testing-library/jest-dom": "5.16.5",
"@testing-library/react": "13.4.0",
"@testing-library/user-event": "14.4.3",
@@ -200,7 +197,6 @@
"babel-plugin-styled-components": "^1.12.0",
"compression-webpack-plugin": "9.0.0",
"copy-webpack-plugin": "^11.0.0",
"critters-webpack-plugin": "^3.0.1",
"eslint": "^7.32.0",
"eslint-config-airbnb": "^19.0.4",
"eslint-config-airbnb-typescript": "^16.1.4",
@@ -238,7 +234,7 @@
"ts-node": "^10.2.1",
"typescript-plugin-css-modules": "5.0.1",
"webpack-bundle-analyzer": "^4.5.0",
"webpack-cli": "^4.9.2"
"webpack-cli": "^5.1.4"
},
"lint-staged": {
"*.(js|jsx|ts|tsx)": [
@@ -254,9 +250,10 @@
"xml2js": "0.5.0",
"phin": "^3.7.1",
"body-parser": "1.20.3",
"http-proxy-middleware": "3.0.3",
"http-proxy-middleware": "3.0.5",
"cross-spawn": "7.0.5",
"cookie": "^0.7.1",
"serialize-javascript": "6.0.2"
"serialize-javascript": "6.0.2",
"prismjs": "1.30.0"
}
}

View File

@@ -1,23 +0,0 @@
import { PlaywrightTestConfig } from '@playwright/test';
import dotenv from 'dotenv';
dotenv.config();
const config: PlaywrightTestConfig = {
forbidOnly: !!process.env.CI,
retries: process.env.CI ? 2 : 0,
preserveOutput: 'always',
name: 'Signoz',
testDir: './tests',
use: {
trace: 'retain-on-failure',
baseURL: process.env.PLAYWRIGHT_TEST_BASE_URL || 'http://localhost:3301',
},
updateSnapshots: 'all',
fullyParallel: !!process.env.CI,
quiet: false,
testMatch: ['**/*.spec.ts'],
reporter: process.env.CI ? 'github' : 'list',
};
export default config;

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.9 KiB

View File

@@ -0,0 +1 @@
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="m12.192 3.18-1.167 2.33-.583 1.165M7.31 12.74a.583.583 0 0 1-.835-.24L1.808 3.179"/><path d="M7 1.167c2.9 0 5.25.783 5.25 1.75 0 .966-2.35 1.75-5.25 1.75s-5.25-.784-5.25-1.75c0-.967 2.35-1.75 5.25-1.75ZM8.75 10.5h3.5M10.5 12.25v-3.5"/></g></svg>

After

Width:  |  Height:  |  Size: 418 B

View File

@@ -0,0 +1 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#a)" stroke-linecap="round" stroke-linejoin="round"><path d="M8 14.666A6.667 6.667 0 1 0 8 1.333a6.667 6.667 0 0 0 0 13.333Z" fill="#C0C1C3" stroke="#C0C1C3" stroke-width="2"/><path d="M8 11.333v-4H6.333M8 4.667h.007" stroke="#121317" stroke-width="1.333"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 439 B

View File

@@ -9,8 +9,8 @@
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels",
"page_title_create": "New Notification Channel",
"page_title_edit": "Edit Notification Channel",
"button_save_channel": "Save",
"button_test_channel": "Test",
"button_return": "Back",

View File

@@ -1,3 +1,3 @@
{
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via chat support or "
}

View File

@@ -9,8 +9,8 @@
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels",
"page_title_create": "New Notification Channel",
"page_title_edit": "Edit Notification Channel",
"button_save_channel": "Save",
"button_test_channel": "Test",
"button_return": "Back",

View File

@@ -1,3 +1,3 @@
{
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support or "
"rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via chat support or "
}

View File

@@ -69,5 +69,5 @@
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
"API_MONITORING": "SigNoz | API Monitoring"
"API_MONITORING": "SigNoz | External APIs"
}

View File

@@ -9,7 +9,7 @@ done
# create temporary tsconfig which includes only passed files
str="{
\"extends\": \"./tsconfig.json\",
\"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./conf/default.conf\",\"./public\",\"./tests\",\"./playwright.config.ts\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files]
\"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./public\",\"./tests\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files]
}"
echo $str > tsconfig.tmp

View File

@@ -1,8 +1,9 @@
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import getOrgUser from 'api/user/getOrgUser';
import getAll from 'api/v1/user/get';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import { ORG_PREFERENCES } from 'constants/orgPreferences';
import ROUTES from 'constants/routes';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import history from 'lib/history';
@@ -11,8 +12,12 @@ import { useAppContext } from 'providers/App/App';
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { matchPath, useLocation } from 'react-router-dom';
import { SuccessResponseV2 } from 'types/api';
import APIError from 'types/api/error';
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
import { OrgPreference } from 'types/api/preferences/preference';
import { Organization } from 'types/api/user/getOrganization';
import { UserResponse } from 'types/api/user/getUser';
import { USER_ROLES } from 'types/roles';
import { routePermission } from 'utils/permission';
@@ -33,8 +38,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
user,
isLoggedIn: isLoggedInState,
isFetchingOrgPreferences,
activeLicenseV3,
isFetchingActiveLicenseV3,
activeLicense,
isFetchingActiveLicense,
trialInfo,
featureFlags,
} = useAppContext();
@@ -58,12 +63,13 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const [orgData, setOrgData] = useState<Organization | undefined>(undefined);
const { data: orgUsers, isFetching: isFetchingOrgUsers } = useQuery({
const { data: usersData, isFetching: isFetchingUsers } = useQuery<
SuccessResponseV2<UserResponse[]> | undefined,
APIError
>({
queryFn: () => {
if (orgData && orgData.id !== undefined) {
return getOrgUser({
orgId: orgData.id,
});
return getAll();
}
return undefined;
},
@@ -72,26 +78,27 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
});
const checkFirstTimeUser = useCallback((): boolean => {
const users = orgUsers?.payload || [];
const users = usersData?.data || [];
const remainingUsers = users.filter(
const remainingUsers = (Array.isArray(users) ? users : []).filter(
(user) => user.email !== 'admin@signoz.cloud',
);
return remainingUsers.length === 1;
}, [orgUsers?.payload]);
}, [usersData?.data]);
useEffect(() => {
if (
isCloudUserVal &&
!isFetchingOrgPreferences &&
orgPreferences &&
!isFetchingOrgUsers &&
orgUsers &&
orgUsers.payload
!isFetchingUsers &&
usersData &&
usersData.data
) {
const isOnboardingComplete = orgPreferences?.find(
(preference: Record<string, any>) => preference.key === 'ORG_ONBOARDING',
(preference: OrgPreference) =>
preference.name === ORG_PREFERENCES.ORG_ONBOARDING,
)?.value;
const isFirstUser = checkFirstTimeUser();
@@ -108,9 +115,9 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
checkFirstTimeUser,
isCloudUserVal,
isFetchingOrgPreferences,
isFetchingOrgUsers,
isFetchingUsers,
orgPreferences,
orgUsers,
usersData,
pathname,
]);
@@ -141,16 +148,16 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
};
useEffect(() => {
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
if (!isFetchingActiveLicense && activeLicense) {
const currentRoute = mapRoutes.get('current');
const isTerminated = activeLicenseV3.state === LicenseState.TERMINATED;
const isExpired = activeLicenseV3.state === LicenseState.EXPIRED;
const isCancelled = activeLicenseV3.state === LicenseState.CANCELLED;
const isTerminated = activeLicense.state === LicenseState.TERMINATED;
const isExpired = activeLicense.state === LicenseState.EXPIRED;
const isCancelled = activeLicense.state === LicenseState.CANCELLED;
const isWorkspaceAccessRestricted = isTerminated || isExpired || isCancelled;
const { platform } = activeLicenseV3;
const { platform } = activeLicense;
if (
isWorkspaceAccessRestricted &&
@@ -160,26 +167,26 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
navigateToWorkSpaceAccessRestricted(currentRoute);
}
}
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
}, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]);
useEffect(() => {
if (!isFetchingActiveLicenseV3) {
if (!isFetchingActiveLicense) {
const currentRoute = mapRoutes.get('current');
const shouldBlockWorkspace = trialInfo?.workSpaceBlock;
if (
shouldBlockWorkspace &&
currentRoute &&
activeLicenseV3?.platform === LicensePlatform.CLOUD
activeLicense?.platform === LicensePlatform.CLOUD
) {
navigateToWorkSpaceBlocked(currentRoute);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
isFetchingActiveLicenseV3,
isFetchingActiveLicense,
trialInfo?.workSpaceBlock,
activeLicenseV3?.platform,
activeLicense?.platform,
mapRoutes,
pathname,
]);
@@ -193,20 +200,20 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
};
useEffect(() => {
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
if (!isFetchingActiveLicense && activeLicense) {
const currentRoute = mapRoutes.get('current');
const shouldSuspendWorkspace =
activeLicenseV3.state === LicenseState.DEFAULTED;
activeLicense.state === LicenseState.DEFAULTED;
if (
shouldSuspendWorkspace &&
currentRoute &&
activeLicenseV3.platform === LicensePlatform.CLOUD
activeLicense.platform === LicensePlatform.CLOUD
) {
navigateToWorkSpaceSuspended(currentRoute);
}
}
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
}, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]);
useEffect(() => {
if (org && org.length > 0 && org[0].id !== undefined) {

View File

@@ -5,6 +5,7 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
import logEvent from 'api/common/logEvent';
import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
@@ -12,9 +13,9 @@ import AppLayout from 'container/AppLayout';
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useThemeConfig } from 'hooks/useDarkMode';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
import { NotificationProvider } from 'hooks/useNotifications';
import { ResourceProvider } from 'hooks/useResourceAttribute';
import { StatusCodes } from 'http-status-codes';
import history from 'lib/history';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import posthog from 'posthog-js';
@@ -22,10 +23,12 @@ import AlertRuleProvider from 'providers/Alert';
import { useAppContext } from 'providers/App/App';
import { IUser } from 'providers/App/types';
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useCallback, useEffect, useState } from 'react';
import { Route, Router, Switch } from 'react-router-dom';
import { CompatRouter } from 'react-router-dom-v5-compat';
import { LicenseStatus } from 'types/api/licensesV3/getActive';
import { Userpilot } from 'userpilot';
import { extractDomain } from 'utils/app';
@@ -40,14 +43,13 @@ import defaultRoutes, {
function App(): JSX.Element {
const themeConfig = useThemeConfig();
const {
licenses,
user,
isFetchingUser,
isFetchingLicenses,
isFetchingFeatureFlags,
trialInfo,
activeLicenseV3,
isFetchingActiveLicenseV3,
activeLicense,
isFetchingActiveLicense,
activeLicenseFetchError,
userFetchError,
featureFlagsFetchError,
isLoggedIn: isLoggedInState,
@@ -65,18 +67,18 @@ function App(): JSX.Element {
const enableAnalytics = useCallback(
(user: IUser): void => {
// wait for the required data to be loaded before doing init for anything!
if (!isFetchingActiveLicenseV3 && activeLicenseV3 && org) {
if (!isFetchingActiveLicense && activeLicense && org) {
const orgName =
org && Array.isArray(org) && org.length > 0 ? org[0].displayName : '';
const { name, email, role } = user;
const { displayName, email, role } = user;
const domain = extractDomain(email);
const hostNameParts = hostname.split('.');
const identifyPayload = {
email,
name,
name: displayName,
company_name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
@@ -102,10 +104,24 @@ function App(): JSX.Element {
if (domain) {
logEvent('Domain Identified', groupTraits, 'group');
}
if (window && window.Appcues) {
window.Appcues.identify(email, {
name: displayName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
companyName: orgName,
email,
paidUser: !!trialInfo?.trialConvertedToSubscription,
});
}
Userpilot.identify(email, {
email,
name,
name: displayName,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
@@ -117,7 +133,7 @@ function App(): JSX.Element {
posthog?.identify(email, {
email,
name,
name: displayName,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
@@ -136,24 +152,12 @@ function App(): JSX.Element {
source: 'signoz-ui',
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
if (
window.cioanalytics &&
typeof window.cioanalytics.identify === 'function'
) {
window.cioanalytics.reset();
window.cioanalytics.identify(email, {
name: user.name,
email,
role: user.role,
});
}
}
},
[
hostname,
isFetchingActiveLicenseV3,
activeLicenseV3,
isFetchingActiveLicense,
activeLicense,
org,
trialInfo?.trialConvertedToSubscription,
],
@@ -162,18 +166,19 @@ function App(): JSX.Element {
// eslint-disable-next-line sonarjs/cognitive-complexity
useEffect(() => {
if (
!isFetchingLicenses &&
licenses &&
!isFetchingActiveLicense &&
(activeLicense || activeLicenseFetchError) &&
!isFetchingUser &&
user &&
!!user.email
) {
// either the active API returns error with 404 or 501 and if it returns a terminated license means it's on basic plan
const isOnBasicPlan =
licenses.licenses?.some(
(license) =>
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
) || licenses.licenses === null;
(activeLicenseFetchError &&
[StatusCodes.NOT_FOUND, StatusCodes.NOT_IMPLEMENTED].includes(
activeLicenseFetchError?.getHttpStatusCode(),
)) ||
(activeLicense?.status && activeLicense.status === LicenseStatus.INVALID);
const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER);
if (isLoggedInState && user && user.id && user.email && !isIdentifiedUser) {
@@ -189,6 +194,11 @@ function App(): JSX.Element {
(route) => route?.path !== ROUTES.BILLING,
);
}
if (isEnterpriseSelfHostedUser) {
updatedRoutes.push(LIST_LICENSES);
}
// always add support route for cloud users
updatedRoutes = [...updatedRoutes, SUPPORT_ROUTE];
} else {
@@ -203,22 +213,23 @@ function App(): JSX.Element {
}, [
isLoggedInState,
user,
licenses,
isCloudUser,
isEnterpriseSelfHostedUser,
isFetchingLicenses,
isFetchingActiveLicense,
isFetchingUser,
activeLicense,
activeLicenseFetchError,
]);
useEffect(() => {
if (pathname === ROUTES.ONBOARDING) {
window.Intercom('update', {
hide_default_launcher: true,
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
window.Pylon('hideChatBubble');
} else {
window.Intercom('update', {
hide_default_launcher: false,
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
window.Pylon('showChatBubble');
}
}, [pathname]);
@@ -230,8 +241,7 @@ function App(): JSX.Element {
if (
!isFetchingFeatureFlags &&
(featureFlags || featureFlagsFetchError) &&
licenses &&
activeLicenseV3 &&
activeLicense &&
trialInfo
) {
let isChatSupportEnabled = false;
@@ -254,11 +264,13 @@ function App(): JSX.Element {
!showAddCreditCardModal &&
(isCloudUser || isEnterpriseSelfHostedUser)
) {
window.Intercom('boot', {
app_id: process.env.INTERCOM_APP_ID,
email: user?.email || '',
name: user?.name || '',
});
window.pylon = {
chat_settings: {
app_id: process.env.PYLON_APP_ID,
email: user.email,
name: user.displayName,
},
};
}
}
}, [
@@ -269,8 +281,7 @@ function App(): JSX.Element {
featureFlags,
isFetchingFeatureFlags,
featureFlagsFetchError,
licenses,
activeLicenseV3,
activeLicense,
trialInfo,
isCloudUser,
isEnterpriseSelfHostedUser,
@@ -321,10 +332,6 @@ function App(): JSX.Element {
} else {
posthog.reset();
Sentry.close();
if (window.cioanalytics && typeof window.cioanalytics.reset === 'function') {
window.cioanalytics.reset();
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isCloudUser, isEnterpriseSelfHostedUser]);
@@ -332,7 +339,7 @@ function App(): JSX.Element {
// if the user is in logged in state
if (isLoggedInState) {
// if the setup calls are loading then return a spinner
if (isFetchingLicenses || isFetchingUser || isFetchingFeatureFlags) {
if (isFetchingActiveLicense || isFetchingUser || isFetchingFeatureFlags) {
return <Spinner tip="Loading..." />;
}
@@ -344,7 +351,11 @@ function App(): JSX.Element {
}
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
if ((!licenses || !user.email || !featureFlags) && !userFetchError) {
if (
(!activeLicense || !user.email || !featureFlags) &&
!userFetchError &&
!activeLicenseFetchError
) {
return <Spinner tip="Loading..." />;
}
}
@@ -354,35 +365,38 @@ function App(): JSX.Element {
<ConfigProvider theme={themeConfig}>
<Router history={history}>
<CompatRouter>
<UserpilotRouteTracker />
<NotificationProvider>
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>
</PrivateRoute>
<ErrorModalProvider>
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>
</PrivateRoute>
</ErrorModalProvider>
</NotificationProvider>
</CompatRouter>
</Router>

View File

@@ -47,9 +47,10 @@ export const TracesFunnels = Loadable(
import(/* webpackChunkName: "Traces Funnels" */ 'pages/TracesModulePage'),
);
export const TracesFunnelDetails = Loadable(
// eslint-disable-next-line sonarjs/no-identical-functions
() =>
import(
/* webpackChunkName: "Traces Funnel Details" */ 'pages/TracesFunnelDetails'
/* webpackChunkName: "Traces Funnel Details" */ 'pages/TracesModulePage'
),
);
@@ -127,12 +128,11 @@ export const AlertOverview = Loadable(
);
export const CreateAlertChannelAlerts = Loadable(
() =>
import(/* webpackChunkName: "Create Channels" */ 'pages/AlertChannelCreate'),
() => import(/* webpackChunkName: "Create Channels" */ 'pages/Settings'),
);
export const EditAlertChannelsAlerts = Loadable(
() => import(/* webpackChunkName: "Edit Channels" */ 'pages/ChannelsEdit'),
() => import(/* webpackChunkName: "Edit Channels" */ 'pages/Settings'),
);
export const AllAlertChannels = Loadable(
@@ -164,7 +164,7 @@ export const APIKeys = Loadable(
);
export const MySettings = Loadable(
() => import(/* webpackChunkName: "All MySettings" */ 'pages/MySettings'),
() => import(/* webpackChunkName: "All MySettings" */ 'pages/Settings'),
);
export const CustomDomainSettings = Loadable(
@@ -221,7 +221,7 @@ export const LogsIndexToFields = Loadable(
);
export const BillingPage = Loadable(
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Billing'),
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Settings'),
);
export const SupportPage = Loadable(
@@ -248,7 +248,7 @@ export const WorkspaceAccessRestricted = Loadable(
);
export const ShortcutsPage = Loadable(
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'),
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Settings'),
);
export const InstalledIntegrations = Loadable(

View File

@@ -7,12 +7,9 @@ import {
AlertOverview,
AllAlertChannels,
AllErrors,
APIKeys,
ApiMonitoring,
BillingPage,
CreateAlertChannelAlerts,
CreateNewAlerts,
CustomDomainSettings,
DashboardPage,
DashboardWidget,
EditAlertChannelsAlerts,
@@ -20,7 +17,6 @@ import {
ErrorDetails,
Home,
InfrastructureMonitoring,
IngestionSettings,
InstalledIntegrations,
LicensePage,
ListAllALertsPage,
@@ -31,12 +27,10 @@ import {
LogsIndexToFields,
LogsSaveViews,
MetricsExplorer,
MySettings,
NewDashboardPage,
OldLogsExplorer,
Onboarding,
OnboardingV2,
OrganizationSettings,
OrgOnboarding,
PasswordReset,
PipelinePage,
@@ -45,7 +39,6 @@ import {
ServicesTablePage,
ServiceTopLevelOperationsPage,
SettingsPage,
ShortcutsPage,
SignupPage,
SomethingWentWrong,
StatusPage,
@@ -150,7 +143,7 @@ const routes: AppRoutes[] = [
},
{
path: ROUTES.SETTINGS,
exact: true,
exact: false,
component: SettingsPage,
isPrivate: true,
key: 'SETTINGS',
@@ -295,41 +288,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'VERSION',
},
{
path: ROUTES.ORG_SETTINGS,
exact: true,
component: OrganizationSettings,
isPrivate: true,
key: 'ORG_SETTINGS',
},
{
path: ROUTES.INGESTION_SETTINGS,
exact: true,
component: IngestionSettings,
isPrivate: true,
key: 'INGESTION_SETTINGS',
},
{
path: ROUTES.API_KEYS,
exact: true,
component: APIKeys,
isPrivate: true,
key: 'API_KEYS',
},
{
path: ROUTES.MY_SETTINGS,
exact: true,
component: MySettings,
isPrivate: true,
key: 'MY_SETTINGS',
},
{
path: ROUTES.CUSTOM_DOMAIN_SETTINGS,
exact: true,
component: CustomDomainSettings,
isPrivate: true,
key: 'CUSTOM_DOMAIN_SETTINGS',
},
{
path: ROUTES.LOGS,
exact: true,
@@ -393,13 +351,6 @@ const routes: AppRoutes[] = [
key: 'SOMETHING_WENT_WRONG',
isPrivate: false,
},
{
path: ROUTES.BILLING,
exact: true,
component: BillingPage,
key: 'BILLING',
isPrivate: true,
},
{
path: ROUTES.WORKSPACE_LOCKED,
exact: true,
@@ -421,13 +372,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'WORKSPACE_ACCESS_RESTRICTED',
},
{
path: ROUTES.SHORTCUTS,
exact: true,
component: ShortcutsPage,
isPrivate: true,
key: 'SHORTCUTS',
},
{
path: ROUTES.INTEGRATIONS,
exact: true,

View File

@@ -1,26 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { APIKeyProps, CreateAPIKeyProps } from 'types/api/pat/types';
const createAPIKey = async (
props: CreateAPIKeyProps,
): Promise<SuccessResponse<APIKeyProps> | ErrorResponse> => {
try {
const response = await axios.post('/pats', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default createAPIKey;

View File

@@ -1,24 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllAPIKeyProps } from 'types/api/pat/types';
const deleteAPIKey = async (
id: string,
): Promise<SuccessResponse<AllAPIKeyProps> | ErrorResponse> => {
try {
const response = await axios.delete(`/pats/${id}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteAPIKey;

View File

@@ -1,24 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/get';
const get = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.get(`/pats/${props.id}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default get;

View File

@@ -1,6 +0,0 @@
import axios from 'api';
import { AxiosResponse } from 'axios';
import { AllAPIKeyProps } from 'types/api/pat/types';
export const getAllAPIKeys = (): Promise<AxiosResponse<AllAPIKeyProps>> =>
axios.get(`/pats`);

View File

@@ -1,26 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, UpdateAPIKeyProps } from 'types/api/pat/types';
const updateAPIKey = async (
props: UpdateAPIKeyProps,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.put(`/pats/${props.id}`, {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateAPIKey;

View File

@@ -0,0 +1,46 @@
import { AxiosError } from 'axios';
import { ErrorV2Resp } from 'types/api';
import APIError from 'types/api/error';
// reference - https://axios-http.com/docs/handling_errors
export function ErrorResponseHandlerV2(error: AxiosError<ErrorV2Resp>): never {
const { response, request } = error;
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
if (response) {
throw new APIError({
httpStatusCode: response.status || 500,
error: {
code: response.data.error.code,
message: response.data.error.message,
url: response.data.error.url,
errors: response.data.error.errors,
},
});
}
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
if (request) {
throw new APIError({
httpStatusCode: error.status || 500,
error: {
code: error.code || error.name,
message: error.message,
url: '',
errors: [],
},
});
}
// Something happened in setting up the request that triggered an Error
throw new APIError({
httpStatusCode: error.status || 500,
error: {
code: error.name,
message: error.message,
url: '',
errors: [],
},
});
}

View File

@@ -1,24 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/SAML/deleteDomain';
const deleteDomain = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.delete(`/domains/${props.id}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteDomain;

View File

@@ -1,24 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/SAML/listDomain';
const listAllDomain = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.get(`/orgs/${props.orgId}/domains`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default listAllDomain;

View File

@@ -1,24 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/SAML/updateDomain';
const updateDomain = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.put(`/domains/${props.id}`, props);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateDomain;

View File

@@ -1,29 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
CheckoutRequestPayloadProps,
CheckoutSuccessPayloadProps,
} from 'types/api/billing/checkout';
const updateCreditCardApi = async (
props: CheckoutRequestPayloadProps,
): Promise<SuccessResponse<CheckoutSuccessPayloadProps> | ErrorResponse> => {
try {
const response = await axios.post('/checkout', {
url: props.url,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateCreditCardApi;

View File

@@ -1,29 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
CheckoutRequestPayloadProps,
CheckoutSuccessPayloadProps,
} from 'types/api/billing/checkout';
const manageCreditCardApi = async (
props: CheckoutRequestPayloadProps,
): Promise<SuccessResponse<CheckoutSuccessPayloadProps> | ErrorResponse> => {
try {
const response = await axios.post('/portal', {
url: props.url,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default manageCreditCardApi;

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createEmail';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
email_configs: [
{
@@ -21,13 +21,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createMsTeams';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
msteamsv2_configs: [
{
@@ -21,13 +21,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createOpsgenie';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
opsgenie_configs: [
{
@@ -24,13 +24,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createPager';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
pagerduty_configs: [
{
@@ -29,13 +29,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createSlack';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
slack_configs: [
{
@@ -22,13 +22,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,12 +1,12 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createWebhook';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
let httpConfig = {};
const username = props.username ? props.username.trim() : '';
@@ -28,7 +28,7 @@ const create = async (
};
}
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
webhook_configs: [
{
@@ -40,13 +40,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,23 +1,22 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/delete';
const deleteChannel = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.delete(`/channels/${props.id}`);
const response = await axios.delete<PayloadProps>(`/channels/${props.id}`);
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editEmail';
const editEmail = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
email_configs: [
{
@@ -21,13 +21,12 @@ const editEmail = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editMsTeams';
const editMsTeams = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
msteamsv2_configs: [
{
@@ -21,13 +21,12 @@ const editMsTeams = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorResponse, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editOpsgenie';
const editOpsgenie = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
opsgenie_configs: [
{
@@ -25,13 +25,12 @@ const editOpsgenie = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editPager';
const editPager = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
pagerduty_configs: [
{
@@ -29,13 +29,12 @@ const editPager = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editSlack';
const editSlack = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
slack_configs: [
{
@@ -22,13 +22,12 @@ const editSlack = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

Some files were not shown because too many files have changed in this diff Show More