Compare commits

...

83 Commits

Author SHA1 Message Date
primus-bot[bot]
8d4c4dc5f2 chore(release): bump to v0.84.1 (#7998)
#### Summary
 - Release SigNoz v0.84.1
2025-05-21 13:49:38 +05:30
Ekansh Gupta
91fae8c0f3 fix: changed the get request access from admin to view (#7997) 2025-05-21 08:01:34 +00:00
primus-bot[bot]
4bbe8c0ee7 chore(release): bump to v0.84.0 (#7995)
#### Summary
 - Release SigNoz v0.84.0
2025-05-21 12:18:14 +05:30
Nityananda Gohain
0f7d226b9b Fix: exists clause in logs QB (#7987)
* fix: exists in logs QB

* fix: exists in logs json qb
2025-05-21 10:22:42 +05:30
Shaheer Kochai
e03342e001 feat: add support for request integrations in aws integrations page (#7968)
* feat: add support for request integrations in aws integrations page

* chore: write test for request aws integrations
2025-05-20 21:39:40 +05:30
Shaheer Kochai
57f96574ff fix: trace funnel bugfixes and improvements (#7922)
* fix: display the inter-step latency type in step metrics table

* chore: send latency type with n+1th step + make latency type optional

* fix: fetch and format get funnel steps overview metrics

* chore: remove dev env check

* fix: overall fixes

* fix: don't cache validate query + trigger validate on changing error and where clause as well

* fix: display the latency type in step overview metrics table + p99_latency to latency

* chore: revert dev env check removal (remove after BE changes are merged)

* fix: adjust create API response

* chore: useLocalStorage custom hook

* feat: improve the run funnel flow

- for the initial fetch of funnel results, require the user to run the funnel
- subsequently change the run funnel button to a refresh button
- display loading state while any of the funnel results APIs are being fetched

* fix: fix the issue of add step details breaking

* fix: refetch funnel details on rename success

* fix: redirect 'learn more' to trace funnels docs

* fix: handle potential undefined step in latency type calculation

* fix: properly handle incomplete steps state

* fix: fix the edge case of stale validation state on transitioning from invalid steps to valid steps

* fix: remove the side effect from render and move to useEffect
2025-05-20 19:44:05 +04:30
Yunus M
354e4b4b8f feat: show pricing update banner in home page (#7990)
* feat: show pricing update banner in  home page
2025-05-20 19:40:01 +05:30
Shaheer Kochai
d7102f69a9 feat: add support for S3 region buckets syncing (#7874)
* feat: add support for S3 region buckets syncing

* fix: hide the dropdown icon and not found dropdown for s3 buckets selector

* fix: display s3 buckets selector only for s3 sync service

* chore: tests for configure service s3 sync

---------

Co-authored-by: Piyush Singariya <piyushsingariya@gmail.com>
2025-05-20 13:32:32 +00:00
Aditya Singh
040c45b144 Custom Quick Filters: Logs (#7986)
* chore: quick filters - added filters init (#7867)

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

* Custom quick filter: Other Filters | Search integration (#7939)

* chore: added filters init

* chore: handle save and discard

* chore: search and api intergrations

* feat: search on filters

* feat: style fix

* feat: style fix

* feat: signal to data source config

* feat: search styles

* feat: update drawer slide style

* feat: no results state

* fix: minor fix

* Custom Quick FIlters: UI fixes and Announcement Tooltip (#7950)

* feat: qf setting ui

* feat: add skeleton to dynamic qf

* fix: minor fix

* feat: announcement tooltip added

* feat: announcement tooltip added refactor

* feat: announcement tooltip styles

* feat: announcement tooltip integration

* fix: number vals in filter list

* feat: announcement tooltip show logic added

* feat: light mode styles

* feat: remove unwanted styles

* feat: remove filter disable when one filter added

* style: minor style

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

* Custom quick filters: Tests and pr review comments (#7967)

* chore: added filters init

* chore: handle save and discard

* chore: search and api intergrations

* feat: search on filters

* feat: style fix

* feat: style fix

* feat: signal to data source config

* feat: search styles

* feat: update drawer slide style

* feat: no results state

* fix: minor fix

* feat: qf setting ui

* feat: add skeleton to dynamic qf

* fix: minor fix

* feat: announcement tooltip added

* feat: announcement tooltip added refactor

* feat: announcement tooltip styles

* feat: announcement tooltip integration

* fix: number vals in filter list

* feat: announcement tooltip show logic added

* feat: light mode styles

* feat: remove unwanted styles

* feat: remove filter disable when one filter added

* style: minor style

* fix: minor refactor

* test: added test cases

* feat: integrate custom quick filters in logs

* feat: code refactor

* feat: debounce search

* feat: refactor

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-05-20 12:39:49 +00:00
Sahil Khan
207d7602ab chore: changed name of api monitoring from third party apis to external apis (#7989) 2025-05-20 17:24:09 +05:30
Srikanth Chekuri
018346ca18 chore: add aggregation expr rewriter and exhaustive tests for logs filter (#7972) 2025-05-20 16:54:34 +05:30
Ekansh Gupta
7290ab3602 feat: added entry point operations api for the service overview page (#7957)
* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

* feat: added entry point operations api for the service overview page

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2025-05-20 05:58:40 +00:00
Shaheer Kochai
88239cec4d fix: AWS integration bugfixes (#7886)
* fix(AccountSettingsModal): add region deselect functionality to region selector

* fix(AWS integration): redirect help button to aws integration documentation

* style(Header): update button color on hover for improved visibility
2025-05-20 03:48:11 +00:00
SagarRajput-7
10ba0e6b4f feat: added error preview and warning text with info on cyclic dependency detected (#7893)
* feat: added error preview and warning text with info on cyclic dependency detected

* feat: removed console.log

* feat: restricted save when cycle found

* feat: added test cases for variableitem flow and update test cases

* feat: updated test cases

* feat: corrected the recent resolved title text
2025-05-19 17:28:26 +07:00
Shaheer Kochai
88e1e42bf0 chore: add analytics events for trace funnels (#7638) 2025-05-19 09:22:35 +00:00
Piyush Singariya
a0d896557e feat: introducing ECS + SQS integration (#7840)
* feat: introducing S3 sync as AWS integration

* chore: trying restructuring

* chore: in progress

* chore: restructuring looks ok

* chore: minor fix in tests

* feat: integration with Agent check-in complete

* chore: minor change in validation

* fix: removing validation and altering overview

* fix: aftermath of merge conflicts

* test: updating agent version

* test: updating agent version

* test: updating agent version 3

* test: updating agent version 11

* test: updating agent version 14

* chore: replace with newer error utility

* feat: introducing ECS integration (AWS Integrations)

* chore: adding metrics to ecs integration

* feat: adding base SQS files

* feat: adding metrics for SQS

* feat: adding ECS dashboard

* feat: adding dashboards for SQS

* fix: adding SentMessageSize metrics in SQS

* fix: for calculating log connection status for S3 Sync

* fix: adding check for svc type, fixing cw logs integration.json S3 Sync

* fix: in compiledCollectionStrat for servicetype s3sync

* test: testing agent version

* fix: change in data collected for S3 Sync logs

* test: testing agent 19

* chore: replace fmt.Errorf

* fix: tests and adding validation in S3 buckets

* fix: test TestAvailableServices

* chore: replacing fmt.Errorf

* chore: updating the agent version to latest

* chore: reverting some changes

* fix: remove services from Variables

* chore: change overview.png
2025-05-19 14:17:52 +05:30
Amlan Kumar Nandy
2b28c5f2e2 chore: persist the filters and time selection, modal open state in summary view (#7942) 2025-05-19 11:54:05 +07:00
Vibhu Pandey
6dbcc5fb9d fix(analytics): fix heartbeat event (#7975) 2025-05-19 08:04:33 +05:30
Vikrant Gupta
175e9a4c5e fix(apm): update the apdex to latest response structure (#7966)
* fix(apm): update the apdex to latest response structure

* fix(apm): update the apdex to latest response structure
2025-05-17 16:23:11 +05:30
SagarRajput-7
33506cafce fix: cover the title as reactNode case for useGetResolvedText (#7965)
* fix: cover the title as reactNode case for useResolvedText

* fix: added more test cases
2025-05-16 22:15:19 +00:00
SagarRajput-7
e34e61a20d feat: removed allow clear icon from when ALL option is selected (#7894) 2025-05-17 00:48:41 +05:30
Vibhu Pandey
da084b4686 chore(savedview|apdex|dashboard): create modules and handlers (#7960)
* chore(savedview): refactor into module and handler

* chore(rule): move telemetry inside telemetry

* chore(apdex): refactor apdex and delete dao

* chore(dashboard): create a dashboard module

* chore(ee): get rid of the init nonesense

* chore(dashboard): fix err and apierror confusion

* chore: address comments
2025-05-17 00:15:00 +05:30
Srikanth Chekuri
6821efeb99 chore: move visitor impl out of generated files (#7956) 2025-05-16 14:47:23 +00:00
Srikanth Chekuri
c5d5c84a0e chore: add fieldmapper implementation (#7955) 2025-05-16 20:09:57 +05:30
SagarRajput-7
9c298e83a5 feat: added user role restriction on crud for planned downtime feat (#7896) 2025-05-16 16:59:52 +05:30
SagarRajput-7
9383b6576d feat: added variable description icon and details on tooltip (#7897) 2025-05-16 16:46:19 +05:30
SagarRajput-7
f10f7a806f feat: suggest and allow variables in panel title (#7898)
* feat: suggest and allow variables in panel title

* feat: refined the logic for suggestion and addition with $

* feat: added logic for panel title resolved string and added test cases

* feat: added support to full view
2025-05-16 16:35:11 +05:30
Srikanth Chekuri
03600f4d6f chore: add query builder types (#7940) 2025-05-16 00:00:01 +05:30
Srikanth Chekuri
9fbf111976 chore: less strict context for fetching field values (#7807) 2025-05-15 19:59:40 +05:30
Nityananda Gohain
b8dff86a56 fix: refresh token to access token (#7949)
* fix: fix refresh token to access token

* fix: update the if condition
2025-05-15 12:52:14 +05:30
Vikrant Gupta
f525647b40 chore(auth): refactor the client handlers in preparation for multi tenant login (#7902)
* chore: update auth

* chore: password changes

* chore: make changes in oss code

* chore: login

* chore: get to a running state

* fix: migration inital commit

* fix: signoz cloud intgtn tests

* fix: minor fixes

* chore: sso code fixed with org domain

* fix: tests

* fix: ee auth api's

* fix: changes in name

* fix: return user in login api

* fix: address comments

* fix: validate password

* fix: handle get domain by email properly

* fix: move authomain to usermodule

* fix: use displayname instead of hname

* fix: rename back endpoints

* fix: update telemetry

* fix: correct errors

* fix: test and fix the invite endpoints

* fix: delete all things related to user in store

* fix: address issues

* fix: ee delete invite

* fix: rename func

* fix: update user and update role

* fix: update role

* chore(api): update the api folder structure according to rest principles

* fix: login and invite changes

* chore(api): update the api folder structure according to rest principles

* chore(login): update the frontend according to the new APIs

* fix: return org name in users response

* chore(login): update the frontend according to the new APIs

* fix: update user role

* fix: nil check

* chore(login): update the frontend according to the new API

* fix: getinvite and update role

* fix: sso

* fix: getinvite use sso ctx

* fix: use correct sourceurl

* fix: getsourceurl from req payload

* chore(login): update the frontend according to the new API

* fix: update created_at

* fix: fix reset password

* chore(login): fixed reset password and bulk invites

* fix: sso signup and token password change

* fix: don't delete last admin

* fix: reset password and migration

* fix: migration

* chore(login): fix the unwanted throw statement and tsconfig

* fix: reset password for sso users

* fix: clean up invite

* chore(login): delete last admin user and reset password

* fix: migration

* fix: update claims and store code

* fix: use correct error

* fix: proper nil checks

* fix: make migration multitenant

* fix: address comments

* fix: minor fixes

* fix: test

* fix: rename reset password

* fix: set self restration only when sso endabled

* chore(auth): update the invite user API

* fix: integration tests

* fix: integration tests

* fix: integration tests

* fix: integration tests

* fix: integration tests

* fix: integration tests

* fix: integration tests

* chore(auth): update integration test

* fix: telemetry

---------

Co-authored-by: nityanandagohain <nityanandagohain@gmail.com>
2025-05-14 18:23:41 +00:00
Nityananda Gohain
0a2b7ca1d8 chore(auth): refactor the auth modules and handler in preparation for multi tenant login (#7778)
* chore: update auth

* chore: password changes

* chore: make changes in oss code

* chore: login

* chore: get to a running state

* fix: migration inital commit

* fix: signoz cloud intgtn tests

* fix: minor fixes

* chore: sso code fixed with org domain

* fix: tests

* fix: ee auth api's

* fix: changes in name

* fix: return user in login api

* fix: address comments

* fix: validate password

* fix: handle get domain by email properly

* fix: move authomain to usermodule

* fix: use displayname instead of hname

* fix: rename back endpoints

* fix: update telemetry

* fix: correct errors

* fix: test and fix the invite endpoints

* fix: delete all things related to user in store

* fix: address issues

* fix: ee delete invite

* fix: rename func

* fix: update user and update role

* fix: update role

* fix: login and invite changes

* fix: return org name in users response

* fix: update user role

* fix: nil check

* fix: getinvite and update role

* fix: sso

* fix: getinvite use sso ctx

* fix: use correct sourceurl

* fix: getsourceurl from req payload

* fix: update created_at

* fix: fix reset password

* fix: sso signup and token password change

* fix: don't delete last admin

* fix: reset password and migration

* fix: migration

* fix: reset password for sso users

* fix: clean up invite

* fix: migration

* fix: update claims and store code

* fix: use correct error

* fix: proper nil checks

* fix: make migration multitenant

* fix: address comments

* fix: minor fixes

* fix: test

* fix: rename reset password

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-05-14 23:12:55 +05:30
Sahil Khan
16938c6cc0 feat: added 10 minute time range for traces co-relation in api monitoring (#7815)
* feat: added 10 minute time range for traces co-relation in api monitoring

* feat: url sharing for domain details drawer w/o filters for endpoint stats

* feat: added endpoint details persistence

* feat: external services to api monitoring co relation - 0

* feat: added api co relations to other panels on external services

* fix: cosmetic fix

* feat: addded tests for url sharing utils

* feat: minor cosmetic changes

* fix: changed traces co relation window from 10 minutes to 5 minutes

* fix: minor copy changes

* fix: pr comments

* fix: minor bug fix

* fix: pr comments improvements
2025-05-14 11:42:45 +00:00
Piyush Singariya
81b8f93177 chore: remove unnecessary dependency (#7938) 2025-05-14 15:40:25 +05:30
Vibhu Pandey
96cfb607d1 chore(go): add go-deps workflow (#7936)
* feat(go): add go-deps workflow

* chore(go): fix dependencies
2025-05-14 09:00:53 +00:00
primus-bot[bot]
f526e887cc chore(release): bump to v0.83.0 (#7931)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-05-14 12:18:04 +05:30
Piyush Singariya
03ab6e704b feat: S3 Sync (AWS Integrations) (#7718) 2025-05-14 05:12:41 +05:30
Vishal Sharma
9c0134da54 feat: user pilot reload (#7905)
* feat: user pilot reload

* feat: added test cases

---------

Co-authored-by: SagarRajput-7 <sagar@signoz.io>
2025-05-13 23:28:01 +05:30
Ekansh Gupta
175b059268 fix: quick filter dependency injection issue in CE (#7918)
* fix: quick filter dependency injection issue in CE

* fix: quick filter dependency injection issue in CE
2025-05-13 19:45:48 +05:30
Nageshbansal
dfca5b13c0 fix: OSS telemetry for the number of services (#7908)
Fixes the OSS telemetry for sending the `Number of services` events.
2025-05-13 16:18:59 +05:30
Aditya Singh
ad392e81ff Fix: Update query_range api from v3 to v4 (Logs and Traces) (#7906)
* fix: change query range api from v3 to v4

* test: update test cases

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-05-13 09:54:37 +00:00
Yunus M
92ceefccee Update pull_request_template.md (#7865) 2025-05-13 09:41:16 +00:00
Vikrant Gupta
9cc4e1b56f chore(frontend): update the api folder structure (#7901)
* chore(api): update the api folder structure according to rest principles

* chore(api): update the api folder structure according to rest principles
2025-05-12 18:14:58 +05:30
Ekansh Gupta
3758ee7451 fix: changed the keys in the default quick filters to actual keys in … (#7863)
* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys

* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys

* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys

* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys

* fix: changed the keys in the default quick filters to actual keys in the v3.attributekeys
2025-05-12 16:20:47 +05:30
Vibhu Pandey
02b605d109 feat(analytics): add analytics package (#7808)
- add analytics package
2025-05-12 14:32:13 +05:30
Amlan Kumar Nandy
eb86aabf3e chore: improvements to the all attributes section in metric details (#7879) 2025-05-12 05:24:02 +00:00
Amlan Kumar Nandy
8810693bda chore: persist one chart per query toggle across refreshes and exports (#7884) 2025-05-12 05:16:13 +00:00
Shaheer Kochai
6334e09a60 feat: Funnel Details Page Base Structure (#7364)
* feat: funnels list page basic UI

* feat: get funnels list data from mock API, and handle data, loading and empty states

* feat: implement funnel rename

* chore: move useFunnels to hooks/TracesFunnels

* feat: create traces funnels details basic page + funnel -> details redirection

* fix: properly display created at in funnels list item + preventDefault

* chore: add tab bar to trace funnel details page

* chore: traces funnel details page overall skeleton

* chore: traces funnel details results skeleton

* fix: hide step count for add button only

* feat: funnel details page steps and configuration (#7424)

* chore: add a new tab for traces funnels

* feat: funnels list page basic UI

* feat: get funnels list data from mock API, and handle data, loading and empty states

* feat: implement funnel rename

* refactor: overall improvements

* feat: implement sorting in traces funnels list page

* feat: add sort column key and order to url params

* chore: move useFunnels to hooks/TracesFunnels

* feat: implement traces funnels search and refactor search and sort by extracting to custom hooks

* chore: overall improvements to rename trace funnel modal

* chore: make the rename input auto-focusable

* feat: handle create funnel modal

* feat: delete funnel modal and functionality

* fix: fix the layout shift in funnel item caused by getContainer={false}

* chore: overall improvements and use live api in traces funnels

* feat: create traces funnels details basic page + funnel -> details redirection

* fix: funnels traces light mode UI

* fix: properly display created at in funnels list item + preventDefault

* refactor: extract FunnelItemPopover into a separate component

* chore: hide funnel tab from traces explorer

* chore: add check to display trace funnels tab only in dev environment

* chore: improve funnels modals light mode

* chore: overall improvements

* fix: properly pass funnel details link

* chore: address PR review changes

* chore: add tab bar to trace funnel details page

* feat: funnel step UI with service, span, and where filters

* feat: build radio button component

* refactor: use the SignozRadioButton in funnel results -> step transitions radio buttons

* feat: inter step config (i.e. latency type) UI

* chore: improve steps header styles by removing divider width

* feat: funnel steps title, description, popover UI + pass data from API

* chore: update FilterSelect component to conditionally add url params and accept on change

* fix: fix funnel step where clause and update the state variables for filters

* chore: add support for isMultiple and fix the type in FilterSelect

* feat: centralize the steps state management in StepsContent

* fix: move steps state up + pass steps count from state

* feat: implement auto save for updating the steps whenever any step changes

* feat: implement auto save for validating steps if service name or span names change

* feat: impelement funnel step removal

* feat: implement add details modal for funnel steps

* fix: fix the overflowing time range picker

* feat: funnel details empty state

* feat: add support for saving funnel description

* chore: overall improvements

* fix: fix the light mode styles

* fix: fix the failing build + broken search UI

* refactor: remove the reference of useLocation from traceFunnel item in TraceModulePage constant

* fix: fix the issue of update steps getting triggered on initial render if we have filters

* fix: fix the edge case of stale state causing filters to be re-added after removing

* feat: funnel details page results (#7451)

* feat: funnel metrics table component

* feat: funnel metrics and steps transition metrics components UI

* feat: funnel table component

* feat: slowest traces and traces with error components

* fix: overall light theme fixes

* fix: fix the warning

* chore: add empty and loading states to FunnelMetricsTable

* feat: get overall funnel metrics from the API

* fix: fix the empty state of funnel metrics table

* feat: get data for slowest traces and traces with errors

* fix: link trace id to trace details page

* fix: get data for funnel step transition metrics and refactor the existing data fetching logic

* refactor: add funnel context + overall refactoring and optimizations

* refactor: move steps states to funnel context + handle empty and run funnel disabled states

* feat: handle run funnel

* fix: improve empty state

* chore: rename isValidateStepsMutationLoading -> isValidateStepsLoading

* chore: improve query key

* fix: display loading state if funnel results are fetching

* refactor: move steps validation fetching and states to the context API

* fix: display loading state in funnel results while steps validation is fetching

* fix: call validate steps API only on changing the service name or span name of any step

* refactor: move validateStepsQuery key out of useEffect and update the dependencies

* chore: centralize hasIncompleteSteps and run validate only if steps have service and spans

* fix: handle all empty fields state + overall improvements

* fix: handle long where query tags

* feat: build the funnel result graph component

* feat: build the funnel result graph component

* feat: handle loading, error, empty states in funnel graph

* fix: don't display change percentage if % is 0

* refactor: overall improvements

* feat: get funnel steps graph data from API + move logic to custom hook

* fix: improve empty and error states

* fix: handle funnel graph legends width using css

* fix: redirect to trace funnels list page on clicking delete from funnel details

* fix: update the query cache while updating steps

* fix: implement debounced search for funnel list search

* fix: refetch steps graph data query on clicking run funnel / sync button

* fix: improve the step footer spacing

* chore: add gap between divider to inter-step-config

* fix: handle loading state while fetching

* feat: add span to funnel flow (from trace details page) (#7477)

* chore: display add to funnel icon on hovering any span in trace details page

* chore: add className to funnel item actions popover

* feat: add funnels tab to trace details v2 tab bar

* feat: add span to funnel flow

* chore: hide actions popover button from funnel item in span -> funnel flows

* chore: improve the funnel details UI in add span to funnel modal

* fix: display empty state + don't redirect to funnels list on delete success + overall improvements

* chore: add null check

* fix: display add to funnel button based on feature flag

* fix: display funnels tab in trace details based on feature flag

* fix: remove maxTagCount

* feat: change ms to ns

* chore: address review comments

* chore: remove feature flag and display trace funnels only in dev envirnoment

* fix: handle restoring steps if updating funnel steps fail

* refactor: update the get and delete funnel endpoints to adjust to the BE changes (#7697)

* refactor: address review comments

* fix: handle nested funnel response structure to fix missing funnel_id… (#7740)

* fix: handle nested funnel response structure to fix missing funnel_id in updates

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: remove console.og

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* chore: revert explicitly passing funnelId to updateFunnelSteps

---------

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
Co-authored-by: ahmadshaheer <ashaheerki@gmail.com>

* chore: fix api endpoint

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>

* refactor: incorporate the recent funnel details API changes (#7760)

* chore: trace funnels feedback changes (#7772)

* chore: change the copy from x traces to valid traces found / not found

* chore: add open funnel button in add span to funnel modal

* feat: display buttons for adding step details and funnel description + copy to clipboard

* feat: highlight funnel graph column based on selected (total / error span) from the legend items

* chore: trace funnel changes (#7780)

* refactor: handle funnels list search on frontend

* refactor: use funnel steps update API for adding / updating step title and description

* feat: allow selecting user's typed option in trace funnel service and span name dropdowns

* chore: properly render the -> between steps in funnel results

* fix: sync funnel step name with add details modal text fields

---------

Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
Co-authored-by: Yunus M <myounis.ar@live.com>
Co-authored-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-05-12 10:16:26 +05:30
Vikrant Gupta
1d379931b2 chore(integration-test): remove the outdated-setup (#7887)
* chore(integration-test): remove the outdated-setup

* chore(integration-test): remove the outdated-setup
2025-05-11 17:10:55 +05:30
dependabot[bot]
815a6d13c5 chore(deps): bump @babel/helpers from 7.21.0 to 7.26.10 in /frontend (#7272)
Bumps [@babel/helpers](https://github.com/babel/babel/tree/HEAD/packages/babel-helpers) from 7.21.0 to 7.26.10.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.26.10/packages/babel-helpers)

---
updated-dependencies:
- dependency-name: "@babel/helpers"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-11 16:38:45 +05:30
Vibhu Pandey
59af9d1c2f chore(go): upgrade to 1.23 (#7885) 2025-05-11 14:09:24 +05:30
dependabot[bot]
19d24da147 chore(deps): bump @babel/runtime from 7.21.0 to 7.26.10 in /frontend (#7289)
Bumps [@babel/runtime](https://github.com/babel/babel/tree/HEAD/packages/babel-runtime) from 7.21.0 to 7.26.10.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.26.10/packages/babel-runtime)

---
updated-dependencies:
- dependency-name: "@babel/runtime"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-09 23:28:01 +05:30
Vibhu Pandey
cd1c9ddf11 fix(dashboards): fix lock/unlock functionality (#7880)
Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-05-09 16:44:57 +00:00
Sahil Khan
7ff3286c9c fix: added prismjs 1.30.0 in resolutions (#7872) 2025-05-09 21:47:22 +05:30
Amlan Kumar Nandy
27830742f9 fix: resolve typescript issues in metrics explorer (#7878) 2025-05-09 18:55:43 +05:30
Vikrant Gupta
39f07e7477 chore(error): update the channels module to use the new api errors (#7856)
* chore(error): integrate new errors for channels create and test

* chore(error): update all the channel APIs

* chore(error): update the edit org http issue

* chore(error): fix create channel test

* chore(error): fix create channel test

* chore(error): fix create channel test

* chore(error): fix create channel test

* chore(error): remove console logs
2025-05-09 07:56:47 +00:00
Amlan Kumar Nandy
0ab50da7b0 chore: change graph list layout to grid in explorer view (#7852) 2025-05-09 05:38:51 +00:00
Amlan Kumar Nandy
c03541cd6c chore: improve error handling and loading states in summary view of metrics explorer (#7862) 2025-05-09 04:41:41 +00:00
Amlan Kumar Nandy
727a039eb9 fix: fix 'open in explorer' functionality in metrics explorer (#7873) 2025-05-09 11:34:32 +07:00
Yunus M
c7db85f44c Update CODEOWNERS (#7861) 2025-05-08 08:22:41 +00:00
Vikrant Gupta
08d9a74055 fix(api-key): make the expires in human readable in api keys (#7864)
* fix(api-key): human readable expires in

* fix(api-key): human readable expires in
2025-05-08 08:14:02 +00:00
Ekansh Gupta
503e4cdf00 Feat trace ordering on the basis of span_count or Trace_duration (#7842)
* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

* feat: added order by span_count in traces tab

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2025-05-08 06:38:41 +00:00
Srikanth Chekuri
224f952da7 chore: add notification for upcoming migration for cloud region IN users (#7848) 2025-05-07 13:41:41 +00:00
Vikrant Gupta
0c28067f89 feat(error): base setup for error handling in frontend (#7851)
* feat(login): add error response v2 and error handler v2

* feat(error): added the base error class

* feat(error): added the base error class

* feat(error): remove unnecessary code

* feat(error): fix types

* feat(error): add http status code helper
2025-05-07 16:31:20 +05:30
Vibhu Pandey
8dc749b9dd fix(migration): fix cascading drops in sqlite (#7844)
* fix(foreign-key): fix cascading drops in sqlite

* fix(foreign-key): fix comments

* fix(foreign-key): fix function names

* fix(foreign-key): fix order of migration

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-05-07 08:18:13 +00:00
Prashant Shahi
82a111e5b1 chore(signoz): remove deprecated signoz arguments (#7849)
### Summary

- remove deprecated signoz arguments

---------

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2025-05-07 07:15:37 +00:00
primus-bot[bot]
e2e6c65b4d chore(release): bump to v0.82.0 (#7847)
#### Summary
 - Release SigNoz v0.82.0
 - Bump SigNoz OTel Collector to v0.111.41

 Created by [Primus-Bot](https://github.com/apps/primus-bot)
2025-05-07 06:46:59 +00:00
Amlan Kumar Nandy
f01d21cbf2 feat: implement inspect feature for metrics explorer (#7549) 2025-05-07 05:18:56 +00:00
aniketio-ctrl
36886135d1 chore: disable writing to v2 tables and add signozclickhousemetrics in signozspanmetrics
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-05-07 03:26:01 +00:00
Vikrant Gupta
3648027576 fix(ruler): improve the user experience for rule id migration (#7841)
* fix(ruler): improve the user experience for rule id migration

* fix(ruler): improve the user experience for rule id migration
2025-05-06 22:37:59 +05:30
Shaheer Kochai
b80626f5e2 fix: add dark class to the elements when dark mode is enabled to support components library modes (#7607) 2025-05-06 15:44:26 +00:00
Shaheer Kochai
08579242eb fix: add hideSpanScopeSelector prop to QueryBuilderSearchV2 and hide from non qb consumers (#7716)
* feat: add hideSpanScopeSelector prop to QueryBuilderSearchV2 and hide from non qb consumers

* fix: update the tests to check rendering based on hideSpanScopeSelector

* feat: display span selector in exceptions page
2025-05-06 19:02:57 +04:30
aniketio-ctrl
6e0b50dd60 fix(7832): added filters in inspect metrics api (#7833)
* fix(7842): added filters in inspect metrics api

* fix(metrics-explorer): added check for 40 time series only
2025-05-06 07:06:12 +00:00
Aditya Singh
76ed58c481 Fix/logs issues main (#7758)
* fix: context log data fix in list view

* fix: fix query builder and quick filters in light mode

* chore: add desc

* chore: added test case

* fix: fix redirect url when not in logs view

* chore: minor fix

* chore: minor fix

* chore: minor test fix

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-05-06 11:02:40 +05:30
Shivanshu Raj Shrivastava
f4d029bd12 fix: correctly populate response_status (#7822)
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-05-05 13:57:09 +05:30
Amlan Kumar Nandy
b66af786e6 fix: description tooltip coming up twice in metrics list table (#7823) 2025-05-05 05:58:56 +00:00
Vibhu Pandey
5ad68a3310 docs(contributing): add sql docs (#7819)
### Summary

add sql docs
2025-05-04 02:23:44 +05:30
Vikrant Gupta
0f0693f6eb fix(ruler): scan orgIDs in string slice instead of valuer struct (#7818) 2025-05-04 00:04:20 +05:30
Ekansh Gupta
16e3c185e9 feat: quick_filter_fix (#7816)
* feat: quick_filter_fix

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters
2025-05-03 17:09:20 +00:00
Vibhu Pandey
8d6671e362 docs(contributing): add docs/contributing/go/readme (#7814)
* docs(readme): add docs/contributing/go/readme

* docs(readme): add docs/contributing/go/readme

* docs(readme): add errors package

* docs(readme): add errors package

* docs(readme): add errors package

* docs(readme): add errors package

* docs(readme): add errors package

* docs(readme): add errors package

* docs(readme): add errors package

* docs(readme): add errors package

* docs(readme): add errors package

* docs(readme): add errors package

* Update docs/contributing/go/errors.md

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

* Update docs/contributing/go/errors.md

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

---------

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
2025-05-03 13:07:18 +00:00
Vikrant Gupta
5b237ee628 feat(cache): multi-tenant cache (#7805)
* feat(cache): remove the references of old cache

* feat(cache): add orgID in query range modules pt1

* feat(cache): add orgID in query range modules pt2

* feat(cache): add orgID in query range modules pt3

* feat(cache): preload metrics for all orgs

* feat(cache): fix ruler

* feat(cache): fix go build

* feat(cache): add orgID to rule

* feat(cache): fix tests

* feat(cache): address review comments

* feat(cache): use correct errors

* feat(cache): fix tests

* feat(cache): add the cache test package
2025-05-03 18:30:07 +05:30
Nageshbansal
cb08ce5e5d chore: updates os for Docker Engine Installation for redhat (#7809) 2025-05-03 10:06:16 +00:00
Ekansh Gupta
3fbc3dec48 feat: added changes related to custom options for quick filters (#7712)
* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added support for custom quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters

* feat: added changes related to custom options for quick filters
2025-05-02 22:39:26 +05:30
768 changed files with 37766 additions and 14265 deletions

View File

@@ -40,7 +40,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.111.40
image: signoz/signoz-schema-migrator:v0.111.41
container_name: schema-migrator-sync
command:
- sync
@@ -53,7 +53,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.111.40
image: signoz/signoz-schema-migrator:v0.111.41
container_name: schema-migrator-async
command:
- async

2
.github/CODEOWNERS vendored
View File

@@ -2,7 +2,7 @@
# Owners are automatically requested for review for PRs that changes code
# that they own.
/frontend/ @YounixM
/frontend/ @SigNoz/frontend @YounixM
/frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
/deploy/ @SigNoz/devops

View File

@@ -1,17 +1,74 @@
### Summary
## 📄 Summary
<!-- ✍️ A clear and concise description...-->
<!-- Describe the purpose of the PR in a few sentences. What does it fix/add/update? -->
#### Related Issues / PR's
---
<!-- ✍️ Add the issues being resolved here and related PR's where applicable -->
## ✅ Changes
#### Screenshots
- [ ] Feature: Brief description
- [ ] Bug fix: Brief description
NA
---
<!-- ✍️ Add screenshots of before and after changes where applicable-->
## 🏷️ Required: Add Relevant Labels
#### Affected Areas and Manually Tested Areas
> ⚠️ **Manually add appropriate labels in the PR sidebar**
Please select one or more labels (as applicable):
<!-- ✍️ Add details of blast radius and dev testing areas where applicable-->
ex:
- `frontend`
- `backend`
- `devops`
- `bug`
- `enhancement`
- `ui`
- `test`
---
## 👥 Reviewers
> Tag the relevant teams for review:
- [ ] @SigNoz/frontend
- [ ] @SigNoz/backend
- [ ] @SigNoz/devops
---
## 🧪 How to Test
<!-- Describe how reviewers can test this PR -->
1. ...
2. ...
3. ...
---
## 🔍 Related Issues
<!-- Reference any related issues (e.g. Fixes #123, Closes #456) -->
Closes #
---
## 📸 Screenshots / Screen Recording (if applicable / mandatory for UI related changes)
<!-- Add screenshots or GIFs to help visualize changes -->
---
## 📋 Checklist
- [ ] Dev Review
- [ ] Test cases added (Unit/ Integration / E2E)
- [ ] Manually tested the changes
---
## 👀 Notes for Reviewers
<!-- Anything reviewers should keep in mind while reviewing -->

View File

@@ -62,6 +62,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
GO_NAME: signoz-community
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build

View File

@@ -94,6 +94,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service

View File

@@ -91,6 +91,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service

View File

@@ -18,6 +18,7 @@ jobs:
with:
PRIMUS_REF: main
GO_TEST_CONTEXT: ./...
GO_VERSION: 1.23
fmt:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -26,6 +27,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
lint:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -34,6 +36,16 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
deps:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
uses: signoz/primus.workflows/.github/workflows/go-deps.yaml@main
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.23
build:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -45,7 +57,7 @@ jobs:
- name: go-install
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
- name: qemu-install
uses: docker/setup-qemu-action@v3
- name: aarch64-install

View File

@@ -58,7 +58,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
- name: cross-compilation-tools
if: matrix.os == 'ubuntu-latest'
run: |
@@ -122,7 +122,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
# copy the caches from build
- name: get-sha

View File

@@ -73,7 +73,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
- name: cross-compilation-tools
if: matrix.os == 'ubuntu-latest'
run: |
@@ -136,7 +136,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.22"
go-version: "1.23"
# copy the caches from build
- name: get-sha

2
.gitignore vendored
View File

@@ -60,9 +60,7 @@ ee/query-service/db
e2e/node_modules/
e2e/test-results/
e2e/playwright-report/
e2e/blob-report/
e2e/playwright/.cache/
e2e/.auth
# go

View File

@@ -76,9 +76,7 @@ go-run-enterprise: ## Runs the enterprise go backend server
go run -race \
$(GO_BUILD_CONTEXT_ENTERPRISE)/main.go \
--config ./conf/prometheus.yml \
--cluster cluster \
--use-logs-new-schema true \
--use-trace-new-schema true
--cluster cluster
.PHONY: go-test
go-test: ## Runs go unit tests
@@ -96,9 +94,7 @@ go-run-community: ## Runs the community go backend server
go run -race \
$(GO_BUILD_CONTEXT_COMMUNITY)/main.go \
--config ./conf/prometheus.yml \
--cluster cluster \
--use-logs-new-schema true \
--use-trace-new-schema true
--cluster cluster
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
go-build-community: ## Builds the go backend server for community

View File

@@ -50,7 +50,7 @@ cache:
# Time-to-live for cache entries in memory. Specify the duration in ns
ttl: 60000000000
# The interval at which the cache will be cleaned up
cleanupInterval: 1m
cleanup_interval: 1m
# redis: Uses Redis as the caching backend.
redis:
# The hostname or IP address of the Redis server.
@@ -164,3 +164,9 @@ alertmanager:
maintenance_interval: 15m
# Retention of the notification logs.
retention: 120h
##################### Analytics #####################
analytics:
# Whether to enable analytics.
enabled: false

View File

@@ -174,11 +174,9 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.81.0
image: signoz/signoz:v0.84.1
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
- --use-trace-new-schema=true
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port
@@ -208,7 +206,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.40
image: signoz/signoz-otel-collector:v0.111.41
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -232,7 +230,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.40
image: signoz/signoz-schema-migrator:v0.111.41
deploy:
restart_policy:
condition: on-failure

View File

@@ -110,11 +110,9 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.81.0
image: signoz/signoz:v0.84.1
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
- --use-trace-new-schema=true
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port
@@ -143,7 +141,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.40
image: signoz/signoz-otel-collector:v0.111.41
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -167,7 +165,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.40
image: signoz/signoz-schema-migrator:v0.111.41
deploy:
restart_policy:
condition: on-failure

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: clickhousemetricswrite
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
@@ -64,8 +64,10 @@ exporters:
endpoint: tcp://clickhouse:9000/signoz_metrics
resource_to_telemetry_conversion:
enabled: true
disable_v2: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:

View File

@@ -177,12 +177,10 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.81.0}
image: signoz/signoz:${VERSION:-v0.84.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
- --use-trace-new-schema=true
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port
@@ -212,7 +210,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.40}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -238,7 +236,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
container_name: schema-migrator-sync
command:
- sync
@@ -249,7 +247,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,12 +110,10 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.81.0}
image: signoz/signoz:${VERSION:-v0.84.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
- --use-trace-new-schema=true
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port
@@ -144,7 +142,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.40}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.41}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +164,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +176,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.41}
container_name: schema-migrator-async
command:
- async

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: clickhousemetricswrite
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
@@ -62,10 +62,12 @@ exporters:
use_new_schema: true
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:

View File

@@ -93,7 +93,7 @@ check_os() {
;;
Red\ Hat*)
desired_os=1
os="red hat"
os="rhel"
package_manager="yum"
;;
CentOS*)

View File

@@ -0,0 +1,103 @@
# Errors
SigNoz includes its own structured [errors](/pkg/errors/errors.go) package. It's built on top of Go's `error` interface, extending it to add additional context that helps provide more meaningful error messages throughout the application.
## How to use it?
To use the SigNoz structured errors package, use these functions instead of the standard library alternatives:
```go
// Instead of errors.New()
errors.New(typ, code, message)
// Instead of fmt.Errorf()
errors.Newf(typ, code, message, args...)
```
### Typ
The Typ (read as Type, defined as `typ`) is used to categorize errors across the codebase and is loosely coupled with HTTP/GRPC status codes. All predefined types can be found in [pkg/errors/type.go](/pkg/errors/type.go). For example:
- `TypeInvalidInput` - Indicates invalid input was provided
- `TypeNotFound` - Indicates a resource was not found
By design, `typ` is unexported and cannot be declared outside of [errors](/pkg/errors/errors.go) package. This ensures that it is consistent across the codebase and is used in a way that is meaningful.
### Code
Codes are used to provide more granular categorization within types. For instance, a type of `TypeInvalidInput` might have codes like `CodeInvalidEmail` or `CodeInvalidPassword`.
To create new error codes, use the `errors.MustNewCode` function:
```go
var (
CodeThingAlreadyExists = errors.MustNewCode("thing_already_exists")
CodeThingNotFound = errors.MustNewCode("thing_not_found")
)
```
> 💡 **Note**: Error codes must match the regex `^[a-z_]+$` otherwise the code will panic.
## Show me some examples
### Using the error
A basic example of using the error:
```go
var (
CodeThingAlreadyExists = errors.MustNewCode("thing_already_exists")
)
func CreateThing(id string) error {
t, err := thing.GetFromStore(id)
if err != nil {
if errors.As(err, errors.TypeNotFound) {
// thing was not found, create it
return thing.Create(id)
}
// something else went wrong, wrap the error with more context
return errors.Wrapf(err, errors.TypeInternal, errors.CodeUnknown, "failed to get thing from store")
}
return errors.Newf(errors.TypeAlreadyExists, CodeThingAlreadyExists, "thing with id %s already exists", id)
}
```
### Changing the error
Sometimes you may want to change the error while preserving the message:
```go
func GetUserSecurely(id string) (*User, error) {
user, err := repository.GetUser(id)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
// Convert NotFound to Forbidden for security reasons
return nil, errors.New(errors.TypeForbidden, errors.CodeAccessDenied, "access denied to requested resource")
}
return nil, err
}
return user, nil
}
```
## Why do we need this?
In a large codebase like SigNoz, error handling is critical for maintaining reliability, debuggability, and a good user experience. We believe that it is the **responsibility of a function** to return **well-defined** errors that **accurately describe what went wrong**. With our structured error system:
- Functions can create precise errors with appropriate additional context
- Callers can make informed decisions based on the additional context
- Error context is preserved and enhanced as it moves up the call stack
The caller (which can be another function or a HTTP/gRPC handler or something else entirely), can then choose to use this error to take appropriate actions such as:
- A function can branch into different paths based on the context
- An HTTP/gRPC handler can derive the correct status code and message from the error and send it to the client
- Logging systems can capture structured error information for better diagnostics
Although there might be cases where this might seem too verbose, it makes the code more maintainable and consistent. A little verbose code is better than clever code that doesn't provide enough context.
## What should I remember?
- Think about error handling as you write your code, not as an afterthought.
- Always use the [errors](/pkg/errors/errors.go) package instead of the standard library's `errors.New()` or `fmt.Errorf()`.
- Always assign appropriate codes to errors when creating them instead of using the "catch all" error codes defined in [pkg/errors/code.go](/pkg/errors/code.go).
- Use `errors.Wrapf()` to add context to errors while preserving the original when appropriate.

View File

@@ -0,0 +1,11 @@
# Go
This document provides an overview of contributing to the SigNoz backend written in Go. The SigNoz backend is built with Go, focusing on performance, maintainability, and developer experience. We strive for clean, idiomatic code that follows established Go practices while addressing the unique needs of an observability platform.
We adhere to three primary style guides as our foundation:
- [Effective Go](https://go.dev/doc/effective_go) - For writing idiomatic Go code
- [Code Review Comments](https://go.dev/wiki/CodeReviewComments) - For understanding common comments in code reviews
- [Google Style Guide](https://google.github.io/styleguide/go/) - Additional practices from Google
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development. In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package.

View File

@@ -0,0 +1,94 @@
# SQL
SigNoz utilizes a relational database to store metadata including organization information, user data and other settings.
## How to use it?
The database interface is defined in [SQLStore](/pkg/sqlstore/sqlstore.go). SigNoz leverages the Bun ORM to interact with the underlying database. To access the database instance, use the `BunDBCtx` function. For operations that require transactions across multiple database operations, use the `RunInTxCtx` function. This function embeds a transaction in the context, which propagates through various functions in the callback.
```go
type Thing struct {
bun.BaseModel
ID types.Identifiable `bun:",embed"`
SomeColumn string `bun:"some_column"`
TimeAuditable types.TimeAuditable `bun:",embed"`
OrgID string `bun:"org_id"`
}
func GetThing(ctx context.Context, id string) (*Thing, error) {
thing := new(Thing)
err := sqlstore.
BunDBCtx(ctx).
NewSelect().
Model(thing).
Where("id = ?", id).
Scan(ctx)
return thing, err
}
func CreateThing(ctx context.Context, thing *Thing) error {
return sqlstore.
BunDBCtx(ctx).
NewInsert().
Model(thing).
Exec(ctx)
}
```
> 💡 **Note**: Always use line breaks while working with SQL queries to enhance code readability.
> 💡 **Note**: Always use the `new` function to create new instances of structs.
## What are hooks?
Hooks are user-defined functions that execute before and/or after specific database operations. These hooks are particularly useful for generating telemetry data such as logs, traces, and metrics, providing visibility into database interactions. Hooks are defined in the [SQLStoreHook](/pkg/sqlstore/sqlstore.go) interface.
## How is the schema designed?
SigNoz implements a star schema design with the organizations table as the central entity. All other tables link to the organizations table via foreign key constraints on the `org_id` column. This design ensures that every entity within the system is either directly or indirectly associated with an organization.
```mermaid
erDiagram
ORGANIZATIONS {
string id PK
timestamp created_at
timestamp updated_at
}
ENTITY_A {
string id PK
timestamp created_at
timestamp updated_at
string org_id FK
}
ENTITY_B {
string id PK
timestamp created_at
timestamp updated_at
string org_id FK
}
ORGANIZATIONS ||--o{ ENTITY_A : contains
ORGANIZATIONS ||--o{ ENTITY_B : contains
```
> 💡 **Note**: There are rare exceptions to the above star schema design. Consult with the maintainers before deviating from the above design.
All tables follow a consistent primary key pattern using a `id` column (referenced by the `types.Identifiable` struct) and include `created_at` and `updated_at` columns (referenced by the `types.TimeAuditable` struct) for audit purposes.
## How to write migrations?
For schema migrations, use the [SQLMigration](/pkg/sqlmigration/sqlmigration.go) interface and write the migration in the same package. When creating migrations, adhere to these guidelines:
- Do not implement **`ON CASCADE` foreign key constraints**. Deletion operations should be handled explicitly in application logic rather than delegated to the database.
- Do not **import types from the types package** in the `sqlmigration` package. Instead, define the required types within the migration package itself. This practice ensures migration stability as the core types evolve over time.
- Do not implement **`Down` migrations**. As the codebase matures, we may introduce this capability, but for now, the `Down` function should remain empty.
- Always write **idempotent** migrations. This means that if the migration is run multiple times, it should not cause an error.
- A migration which is **dependent on the underlying dialect** (sqlite, postgres, etc) should be written as part of the [SQLDialect](/pkg/sqlstore/sqlstore.go) interface. The implementation needs to go in the dialect specific package of the respective database.
## What should I remember?
- Use `BunDBCtx` and `RunInTxCtx` to access the database instance and execute transactions respectively.
- While designing new tables, ensure the consistency of `id`, `created_at`, `updated_at` and an `org_id` column with a foreign key constraint to the `organizations` table (unless the table serves as a transitive entity not directly associated with an organization but indirectly associated with one).
- Implement deletion logic in the application rather than relying on cascading deletes in the database.
- While writing migrations, adhere to the guidelines mentioned above.

View File

@@ -61,14 +61,14 @@ func (p *Pat) Wrap(next http.Handler) http.Handler {
return
}
role, err := authtypes.NewRole(user.Role)
role, err := types.NewRole(user.Role)
if err != nil {
next.ServeHTTP(w, r)
return
}
jwt := authtypes.Claims{
UserID: user.ID,
UserID: user.ID.String(),
Role: role,
Email: user.Email,
OrgID: user.OrgID,

View File

@@ -0,0 +1,203 @@
package impluser
import (
"context"
"encoding/json"
"net/http"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/types"
"github.com/gorilla/mux"
)
// EnterpriseHandler embeds the base handler implementation
type Handler struct {
user.Handler // Embed the base handler interface
module user.Module
}
func NewHandler(module user.Module) user.Handler {
baseHandler := impluser.NewHandler(module)
return &Handler{
Handler: baseHandler,
module: module,
}
}
func (h *Handler) Login(w http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
var req types.PostableLoginRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
render.Error(w, err)
return
}
if req.RefreshToken == "" {
// the EE handler wrapper passes the feature flag value in context
ssoAvailable, ok := ctx.Value(types.SSOAvailable).(bool)
if !ok {
render.Error(w, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to retrieve SSO availability"))
return
}
if ssoAvailable {
_, err := h.module.CanUsePassword(ctx, req.Email)
if err != nil {
render.Error(w, err)
return
}
}
}
user, err := h.module.GetAuthenticatedUser(ctx, req.OrgID, req.Email, req.Password, req.RefreshToken)
if err != nil {
render.Error(w, err)
return
}
jwt, err := h.module.GetJWTForUser(ctx, user)
if err != nil {
render.Error(w, err)
return
}
gettableLoginResponse := &types.GettableLoginResponse{
GettableUserJwt: jwt,
UserID: user.ID.String(),
}
render.Success(w, http.StatusOK, gettableLoginResponse)
}
// Override only the methods you need with enterprise-specific implementations
func (h *Handler) LoginPrecheck(w http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
// assume user is valid unless proven otherwise and assign default values for rest of the fields
email := r.URL.Query().Get("email")
sourceUrl := r.URL.Query().Get("ref")
orgID := r.URL.Query().Get("orgID")
resp, err := h.module.LoginPrecheck(ctx, orgID, email, sourceUrl)
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, resp)
}
func (h *Handler) AcceptInvite(w http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
req := new(types.PostableAcceptInvite)
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode user"))
return
}
// get invite object
invite, err := h.module.GetInviteByToken(ctx, req.InviteToken)
if err != nil {
render.Error(w, err)
return
}
orgDomain, err := h.module.GetAuthDomainByEmail(ctx, invite.Email)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
render.Error(w, err)
return
}
precheckResp := &types.GettableLoginPrecheck{
SSO: false,
IsUser: false,
}
if invite.Name == "" && req.DisplayName != "" {
invite.Name = req.DisplayName
}
user, err := types.NewUser(invite.Name, invite.Email, invite.Role, invite.OrgID)
if err != nil {
render.Error(w, err)
return
}
if orgDomain != nil && orgDomain.SsoEnabled {
// sso is enabled, create user and respond precheck data
err = h.module.CreateUser(ctx, user)
if err != nil {
render.Error(w, err)
return
}
// check if sso is enforced for the org
precheckResp, err = h.module.LoginPrecheck(ctx, invite.OrgID, user.Email, req.SourceURL)
if err != nil {
render.Error(w, err)
return
}
} else {
password, err := types.NewFactorPassword(req.Password)
if err != nil {
render.Error(w, err)
return
}
user, err = h.module.CreateUserWithPassword(ctx, user, password)
if err != nil {
render.Error(w, err)
return
}
precheckResp.IsUser = true
}
// delete the invite
if err := h.module.DeleteInvite(ctx, invite.OrgID, invite.ID); err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, precheckResp)
}
func (h *Handler) GetInvite(w http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
token := mux.Vars(r)["token"]
sourceUrl := r.URL.Query().Get("ref")
invite, err := h.module.GetInviteByToken(ctx, token)
if err != nil {
render.Error(w, err)
return
}
// precheck the user
precheckResp, err := h.module.LoginPrecheck(ctx, invite.OrgID, invite.Email, sourceUrl)
if err != nil {
render.Error(w, err)
return
}
gettableInvite := &types.GettableEEInvite{
GettableInvite: *invite,
PreCheck: precheckResp,
}
render.Success(w, http.StatusOK, gettableInvite)
return
}

View File

@@ -0,0 +1,229 @@
package impluser
import (
"context"
"fmt"
"net/url"
"strings"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/user"
baseimpl "github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"go.uber.org/zap"
)
// EnterpriseModule embeds the base module implementation
type Module struct {
*baseimpl.Module // Embed the base module implementation
store types.UserStore
}
func NewModule(store types.UserStore) user.Module {
baseModule := baseimpl.NewModule(store).(*baseimpl.Module)
return &Module{
Module: baseModule,
store: store,
}
}
func (m *Module) createUserForSAMLRequest(ctx context.Context, email string) (*types.User, error) {
// get auth domain from email domain
_, err := m.GetAuthDomainByEmail(ctx, email)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
// get name from email
parts := strings.Split(email, "@")
if len(parts) < 2 {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format")
}
name := parts[0]
defaultOrgID, err := m.store.GetDefaultOrgID(ctx)
if err != nil {
return nil, err
}
user, err := types.NewUser(name, email, types.RoleViewer.String(), defaultOrgID)
if err != nil {
return nil, err
}
err = m.CreateUser(ctx, user)
if err != nil {
return nil, err
}
return user, nil
}
func (m *Module) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (string, error) {
users, err := m.GetUsersByEmail(ctx, email)
if err != nil {
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", err.Error()))
return "", err
}
user := &types.User{}
if len(users) == 0 {
newUser, err := m.createUserForSAMLRequest(ctx, email)
user = newUser
if err != nil {
zap.L().Error("failed to create user with email received from auth provider", zap.Error(err))
return "", err
}
} else {
user = &users[0].User
}
tokenStore, err := m.GetJWTForUser(ctx, user)
if err != nil {
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
return "", err
}
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
redirectUri,
tokenStore.AccessJwt,
user.ID,
tokenStore.RefreshJwt), nil
}
func (m *Module) CanUsePassword(ctx context.Context, email string) (bool, error) {
domain, err := m.GetAuthDomainByEmail(ctx, email)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return false, err
}
if domain != nil && domain.SsoEnabled {
// sso is enabled, check if the user has admin role
users, err := m.GetUsersByEmail(ctx, email)
if err != nil {
return false, err
}
if len(users) == 0 {
return false, errors.New(errors.TypeNotFound, errors.CodeNotFound, "user not found")
}
if users[0].Role != types.RoleAdmin.String() {
return false, errors.New(errors.TypeForbidden, errors.CodeForbidden, "auth method not supported")
}
}
return true, nil
}
func (m *Module) LoginPrecheck(ctx context.Context, orgID, email, sourceUrl string) (*types.GettableLoginPrecheck, error) {
resp := &types.GettableLoginPrecheck{IsUser: true, CanSelfRegister: false}
// check if email is a valid user
users, err := m.GetUsersByEmail(ctx, email)
if err != nil {
return nil, err
}
if len(users) == 0 {
resp.IsUser = false
}
// give them an option to select an org
if orgID == "" && len(users) > 1 {
resp.SelectOrg = true
resp.Orgs = make([]string, len(users))
for i, user := range users {
resp.Orgs[i] = user.OrgID
}
return resp, nil
}
// select the user with the corresponding orgID
if len(users) > 1 {
found := false
for _, tuser := range users {
if tuser.OrgID == orgID {
// user = tuser
found = true
break
}
}
if !found {
resp.IsUser = false
return resp, nil
}
}
// the EE handler wrapper passes the feature flag value in context
ssoAvailable, ok := ctx.Value(types.SSOAvailable).(bool)
if !ok {
zap.L().Error("failed to retrieve ssoAvailable from context")
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to retrieve SSO availability")
}
if ssoAvailable {
// TODO(Nitya): in multitenancy this should use orgId as well.
orgDomain, err := m.GetAuthDomainByEmail(ctx, email)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
if orgDomain != nil && orgDomain.SsoEnabled {
// this is to allow self registration
resp.IsUser = true
// saml is enabled for this domain, lets prepare sso url
if sourceUrl == "" {
sourceUrl = constants.GetDefaultSiteURL()
}
// parse source url that generated the login request
var err error
escapedUrl, _ := url.QueryUnescape(sourceUrl)
siteUrl, err := url.Parse(escapedUrl)
if err != nil {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse referer")
}
// build Idp URL that will authenticat the user
// the front-end will redirect user to this url
resp.SSOUrl, err = orgDomain.BuildSsoUrl(siteUrl)
if err != nil {
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to prepare saml request for domain")
}
// set SSO to true, as the url is generated correctly
resp.SSO = true
}
}
return resp, nil
}
func (m *Module) GetAuthDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, error) {
if email == "" {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required")
}
components := strings.Split(email, "@")
if len(components) < 2 {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format")
}
domain, err := m.store.GetDomainByName(ctx, components[1])
if err != nil {
return nil, err
}
gettableDomain := &types.GettableOrgDomain{StorableOrgDomain: *domain}
if err := gettableDomain.LoadConfig(domain.Data); err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to load domain config")
}
return gettableDomain, nil
}

View File

@@ -0,0 +1,37 @@
package impluser
import (
"context"
"github.com/SigNoz/signoz/pkg/errors"
baseimpl "github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
)
type store struct {
*baseimpl.Store
sqlstore sqlstore.SQLStore
}
func NewStore(sqlstore sqlstore.SQLStore) types.UserStore {
baseStore := baseimpl.NewStore(sqlstore).(*baseimpl.Store)
return &store{
Store: baseStore,
sqlstore: sqlstore,
}
}
func (s *store) GetDomainByName(ctx context.Context, name string) (*types.StorableOrgDomain, error) {
domain := new(types.StorableOrgDomain)
err := s.sqlstore.BunDB().NewSelect().
Model(domain).
Where("name = ?", name).
Limit(1).
Scan(ctx)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeNotFound, errors.CodeNotFound, "failed to get domain from name")
}
return domain, nil
}

View File

@@ -1,4 +1,4 @@
FROM golang:1.22-bullseye
FROM golang:1.23-bullseye
ARG OS="linux"
ARG TARGETARCH

View File

@@ -5,6 +5,7 @@ import (
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/valuer"
)
type DailyProvider struct {
@@ -37,7 +38,7 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
return dp
}
func (p *DailyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
func (p *DailyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
req.Seasonality = SeasonalityDaily
return p.getAnomalies(ctx, req)
return p.getAnomalies(ctx, orgID, req)
}

View File

@@ -5,6 +5,7 @@ import (
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/valuer"
)
type HourlyProvider struct {
@@ -37,7 +38,7 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
return hp
}
func (p *HourlyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
func (p *HourlyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
req.Seasonality = SeasonalityHourly
return p.getAnomalies(ctx, req)
return p.getAnomalies(ctx, orgID, req)
}

View File

@@ -2,8 +2,10 @@ package anomaly
import (
"context"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Provider interface {
GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
}

View File

@@ -5,11 +5,12 @@ import (
"math"
"time"
"github.com/SigNoz/signoz/pkg/query-service/cache"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
@@ -59,9 +60,9 @@ func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomaly
return prepareAnomalyQueryParams(req.Params, req.Seasonality)
}
func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQueryParams) (*anomalyQueryResults, error) {
func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID, params *anomalyQueryParams) (*anomalyQueryResults, error) {
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery)
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery)
if err != nil {
return nil, err
}
@@ -72,7 +73,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
}
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery)
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery)
if err != nil {
return nil, err
}
@@ -83,7 +84,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
}
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery)
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery)
if err != nil {
return nil, err
}
@@ -94,7 +95,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
}
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery)
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery)
if err != nil {
return nil, err
}
@@ -105,7 +106,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
}
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past2SeasonQuery)
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery)
if err != nil {
return nil, err
}
@@ -116,7 +117,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
}
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past3SeasonQuery)
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery)
if err != nil {
return nil, err
}
@@ -335,9 +336,9 @@ func (p *BaseSeasonalProvider) getAnomalyScores(
return anomalyScoreSeries
}
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
anomalyParams := p.getQueryParams(req)
anomalyQueryResults, err := p.getResults(ctx, anomalyParams)
anomalyQueryResults, err := p.getResults(ctx, orgID, anomalyParams)
if err != nil {
return nil, err
}

View File

@@ -5,6 +5,7 @@ import (
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/valuer"
)
type WeeklyProvider struct {
@@ -36,7 +37,7 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
return wp
}
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
req.Seasonality = SeasonalityWeekly
return p.getAnomalies(ctx, req)
return p.getAnomalies(ctx, orgID, req)
}

View File

@@ -1,6 +1,7 @@
package api
import (
"context"
"net/http"
"net/http/httputil"
"time"
@@ -9,22 +10,28 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/interfaces"
"github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/cache"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
type APIHandlerOptions struct {
@@ -38,7 +45,6 @@ type APIHandlerOptions struct {
IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Cache cache.Cache
Gateway *httputil.ReverseProxy
GatewayUrl string
// Querier Influx Interval
@@ -55,20 +61,22 @@ type APIHandler struct {
// NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
quickfiltermodule := quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(signoz.SQLStore))
quickFilter := quickfilter.NewAPI(quickfiltermodule)
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
PreferSpanMetrics: opts.PreferSpanMetrics,
AppDao: opts.AppDao,
RuleManager: opts.RulesManager,
FeatureFlags: opts.FeatureFlags,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsController: opts.CloudIntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
Cache: opts.Cache,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
Signoz: signoz,
QuickFilters: quickFilter,
QuickFilterModule: quickfiltermodule,
})
if err != nil {
@@ -117,43 +125,24 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// routes available only in ee version
router.HandleFunc("/api/v1/featureFlags",
am.OpenAccess(ah.getFeatureFlags)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/featureFlags", am.OpenAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/loginPrecheck", am.OpenAccess(ah.loginPrecheck)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/loginPrecheck",
am.OpenAccess(ah.precheckLogin)).
Methods(http.MethodGet)
// invite
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/invite/accept", am.OpenAccess(ah.acceptInvite)).Methods(http.MethodPost)
// paid plans specific routes
router.HandleFunc("/api/v1/complete/saml",
am.OpenAccess(ah.receiveSAML)).
Methods(http.MethodPost)
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/complete/google", am.OpenAccess(ah.receiveGoogleAuth)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/orgs/{orgId}/domains", am.AdminAccess(ah.listDomainsByOrg)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/complete/google",
am.OpenAccess(ah.receiveGoogleAuth)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/orgs/{orgId}/domains",
am.AdminAccess(ah.listDomainsByOrg)).
Methods(http.MethodGet)
router.HandleFunc("/api/v1/domains",
am.AdminAccess(ah.postDomain)).
Methods(http.MethodPost)
router.HandleFunc("/api/v1/domains/{id}",
am.AdminAccess(ah.putDomain)).
Methods(http.MethodPut)
router.HandleFunc("/api/v1/domains/{id}",
am.AdminAccess(ah.deleteDomain)).
Methods(http.MethodDelete)
router.HandleFunc("/api/v1/domains", am.AdminAccess(ah.postDomain)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(ah.putDomain)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(ah.deleteDomain)).Methods(http.MethodDelete)
// base overrides
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
// PAT APIs
@@ -185,6 +174,54 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
}
// TODO(nitya): remove this once we know how to get the FF's
func (ah *APIHandler) updateRequestContext(w http.ResponseWriter, r *http.Request) (*http.Request, error) {
ssoAvailable := true
err := ah.FF().CheckFeature(model.SSO)
if err != nil {
switch err.(type) {
case basemodel.ErrFeatureUnavailable:
// do nothing, just skip sso
ssoAvailable = false
default:
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return r, errors.New(errors.TypeInternal, errors.CodeInternal, "error checking SSO feature")
}
}
ctx := context.WithValue(r.Context(), types.SSOAvailable, ssoAvailable)
return r.WithContext(ctx), nil
}
func (ah *APIHandler) loginPrecheck(w http.ResponseWriter, r *http.Request) {
r, err := ah.updateRequestContext(w, r)
if err != nil {
render.Error(w, err)
return
}
ah.Signoz.Handlers.User.LoginPrecheck(w, r)
return
}
func (ah *APIHandler) acceptInvite(w http.ResponseWriter, r *http.Request) {
r, err := ah.updateRequestContext(w, r)
if err != nil {
render.Error(w, err)
return
}
ah.Signoz.Handlers.User.AcceptInvite(w, r)
return
}
func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
r, err := ah.updateRequestContext(w, r)
if err != nil {
render.Error(w, err)
return
}
ah.Signoz.Handlers.User.GetInvite(w, r)
return
}
func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) {
ah.APIHandler.RegisterCloudIntegrationsRoutes(router, am)

View File

@@ -9,13 +9,11 @@ import (
"net/http"
"net/url"
"github.com/gorilla/mux"
"go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/model"
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/http/render"
)
func parseRequest(r *http.Request, req interface{}) error {
@@ -31,161 +29,13 @@ func parseRequest(r *http.Request, req interface{}) error {
// loginUser overrides base handler and considers SSO case.
func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
req := basemodel.LoginRequest{}
err := parseRequest(r, &req)
r, err := ah.updateRequestContext(w, r)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
render.Error(w, err)
return
}
ctx := context.Background()
if req.Email != "" && ah.CheckFeature(model.SSO) {
var apierr basemodel.BaseApiError
_, apierr = ah.AppDao().CanUsePassword(ctx, req.Email)
if apierr != nil && !apierr.IsNil() {
RespondError(w, apierr, nil)
}
}
// if all looks good, call auth
resp, err := baseauth.Login(ctx, &req, ah.opts.JWT)
if ah.HandleError(w, err, http.StatusUnauthorized) {
return
}
ah.WriteJSON(w, r, resp)
}
// registerUser registers a user and responds with a precheck
// so the front-end can decide the login method
func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(model.SSO) {
ah.APIHandler.Register(w, r)
return
}
ctx := context.Background()
var req *baseauth.RegisterRequest
defer r.Body.Close()
requestBody, err := io.ReadAll(r.Body)
if err != nil {
zap.L().Error("received no input in api", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
err = json.Unmarshal(requestBody, &req)
if err != nil {
zap.L().Error("received invalid user registration request", zap.Error(err))
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
return
}
// get invite object
invite, err := baseauth.ValidateInvite(ctx, req)
if err != nil {
zap.L().Error("failed to validate invite token", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
if invite == nil {
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
return
}
// get auth domain from email domain
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
}
precheckResp := &basemodel.PrecheckResponse{
SSO: false,
IsUser: false,
}
if domain != nil && domain.SsoEnabled {
// sso is enabled, create user and respond precheck data
user, apierr := baseauth.RegisterInvitedUser(ctx, req, true)
if apierr != nil {
RespondError(w, apierr, nil)
return
}
var precheckError basemodel.BaseApiError
precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl)
if precheckError != nil {
RespondError(w, precheckError, precheckResp)
}
} else {
// no-sso, validate password
if err := baseauth.ValidatePassword(req.Password); err != nil {
RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil)
return
}
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager, ah.Signoz.Modules.Organization)
if !registerError.IsNil() {
RespondError(w, apierr, nil)
return
}
precheckResp.IsUser = true
}
ah.Respond(w, precheckResp)
}
// getInvite returns the invite object details for the given invite token. We do not need to
// protect this API because invite token itself is meant to be private.
func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
token := mux.Vars(r)["token"]
sourceUrl := r.URL.Query().Get("ref")
inviteObject, err := baseauth.GetInvite(r.Context(), token, ah.Signoz.Modules.Organization)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
resp := model.GettableInvitation{
InvitationResponseObject: inviteObject,
}
precheck, apierr := ah.AppDao().PrecheckLogin(r.Context(), inviteObject.Email, sourceUrl)
resp.Precheck = precheck
if apierr != nil {
RespondError(w, apierr, resp)
}
ah.WriteJSON(w, r, resp)
}
// PrecheckLogin enables browser login page to display appropriate
// login methods
func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
email := r.URL.Query().Get("email")
sourceUrl := r.URL.Query().Get("ref")
resp, apierr := ah.AppDao().PrecheckLogin(ctx, email, sourceUrl)
if apierr != nil {
RespondError(w, apierr, resp)
}
ah.Respond(w, resp)
ah.Signoz.Handlers.User.Login(w, r)
return
}
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
@@ -252,7 +102,7 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
return
}
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email, ah.opts.JWT)
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, identity.Email, ah.opts.JWT)
if err != nil {
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
@@ -330,7 +180,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
return
}
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
if err != nil {
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)

View File

@@ -12,8 +12,8 @@ import (
"github.com/SigNoz/signoz/ee/query-service/constants"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/auth"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -123,7 +123,7 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
))
}
for _, p := range allPats {
if p.UserID == integrationUser.ID && p.Name == integrationPATName {
if p.UserID == integrationUser.ID.String() && p.Name == integrationPATName {
return p.Token, nil
}
}
@@ -135,8 +135,8 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
newPAT := eeTypes.NewGettablePAT(
integrationPATName,
authtypes.RoleViewer.String(),
integrationUser.ID,
types.RoleViewer.String(),
integrationUser.ID.String(),
0,
)
integrationPAT, err := ah.AppDao().CreatePAT(ctx, orgId, newPAT)
@@ -154,10 +154,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
// TODO(nitya): there should be orgId here
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err))
}
if integrationUserResult != nil {
@@ -169,29 +168,18 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
zap.String("cloudProvider", cloudProvider),
)
newUser := &types.User{
ID: uuid.New().String(),
Name: cloudIntegrationUser,
Email: email,
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
},
OrgID: orgId,
}
newUser.Role = authtypes.RoleViewer.String()
passwordHash, err := auth.PasswordHash(uuid.NewString())
newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't hash random password for cloud integration user: %w", err,
"couldn't create cloud integration user: %w", err,
))
}
newUser.Password = passwordHash
integrationUser, apiErr := ah.AppDao().CreateUser(ctx, newUser, false)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't create cloud integration user")
password, err := types.NewFactorPassword(uuid.NewString())
integrationUser, err := ah.Signoz.Modules.User.CreateUserWithPassword(ctx, newUser, password)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
}
return integrationUser, nil

View File

@@ -6,7 +6,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/gorilla/mux"
)
@@ -41,9 +40,9 @@ func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request
return
}
dashboard, err := dashboards.GetDashboard(r.Context(), claims.OrgID, uuid)
dashboard, err := ah.Signoz.Modules.Dashboard.Get(r.Context(), claims.OrgID, uuid)
if err != nil {
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get dashboard"))
render.Error(w, err)
return
}
@@ -53,9 +52,9 @@ func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request
}
// Lock/Unlock the dashboard
err = dashboards.LockUnlockDashboard(r.Context(), claims.OrgID, uuid, lock)
err = ah.Signoz.Modules.Dashboard.LockUnlock(r.Context(), claims.OrgID, uuid, lock)
if err != nil {
render.Error(w, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to lock/unlock dashboard"))
render.Error(w, err)
return
}

View File

@@ -7,7 +7,7 @@ import (
"net/http"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/types"
"github.com/google/uuid"
"github.com/gorilla/mux"
)

View File

@@ -56,7 +56,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
}
func validatePATRequest(req eeTypes.GettablePAT) error {
_, err := authtypes.NewRole(req.Role)
_, err := types.NewRole(req.Role)
if err != nil {
return err
}
@@ -93,16 +93,16 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(r.Context(), claims.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
existingPAT, err := ah.AppDao().GetPATByID(r.Context(), claims.OrgID, id)
if err != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(r.Context(), existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
createdByUser, err := ah.Signoz.Modules.User.GetUserByID(r.Context(), claims.OrgID, existingPAT.UserID)
if err != nil {
render.Error(w, err)
return
}
@@ -119,7 +119,6 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
req.UpdatedByUserID = claims.UserID
req.UpdatedAt = time.Now()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
if apierr = ah.AppDao().UpdatePAT(r.Context(), claims.OrgID, req, id); apierr != nil {
RespondError(w, apierr, nil)
@@ -167,9 +166,9 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(r.Context(), existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
createdByUser, err := ah.Signoz.Modules.User.GetUserByID(r.Context(), claims.OrgID, existingPAT.UserID)
if err != nil {
render.Error(w, err)
return
}

View File

@@ -7,14 +7,27 @@ import (
"net/http"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/http/render"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(w, err)
return
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
@@ -29,7 +42,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
queryRangeParams.Version = "v4"
// add temporality for each metric
temporalityErr := aH.PopulateTemporality(r.Context(), queryRangeParams)
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
if temporalityErr != nil {
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
@@ -85,30 +98,30 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
switch seasonality {
case anomaly.SeasonalityWeekly:
provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
anomaly.WithCache[*anomaly.WeeklyProvider](aH.Signoz.Cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
)
case anomaly.SeasonalityDaily:
provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
)
case anomaly.SeasonalityHourly:
provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
anomaly.WithCache[*anomaly.HourlyProvider](aH.Signoz.Cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
)
default:
provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
)
}
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
anomalies, err := provider.GetAnomalies(r.Context(), orgID, &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return

View File

@@ -33,3 +33,7 @@ func NewDataConnector(
ClickHouseReader: chReader,
}
}
func (r *ClickhouseReader) GetSQLStore() sqlstore.SQLStore {
return r.appdb
}

View File

@@ -15,10 +15,11 @@ import (
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/app/db"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/dao"
"github.com/SigNoz/signoz/ee/query-service/dao/sqlite"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -35,13 +36,10 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
baseexplorer "github.com/SigNoz/signoz/pkg/query-service/app/explorer"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
"github.com/SigNoz/signoz/pkg/query-service/cache"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
@@ -57,7 +55,6 @@ type ServerOptions struct {
HTTPHostPort string
PrivateHostPort string
PreferSpanMetrics bool
CacheConfigPath string
FluxInterval string
FluxIntervalForTraceDetail string
Cluster string
@@ -93,19 +90,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
// NewServer creates and initializes Server
func NewServer(serverOptions *ServerOptions) (*Server, error) {
modelDao, err := dao.InitDao(serverOptions.SigNoz.SQLStore)
if err != nil {
return nil, err
}
if err := baseexplorer.InitWithDSN(serverOptions.SigNoz.SQLStore); err != nil {
return nil, err
}
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
return nil, err
}
modelDao := sqlite.NewModelDao(serverOptions.SigNoz.SQLStore)
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
if err != nil {
return nil, err
@@ -117,9 +102,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
return nil, err
}
// set license manager as feature flag provider in dao
modelDao.SetFlagProvider(lm)
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
if err != nil {
return nil, err
@@ -134,19 +116,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
serverOptions.SigNoz.Cache,
)
var c cache.Cache
if serverOptions.CacheConfigPath != "" {
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
if err != nil {
return nil, err
}
c = cache.NewCache(cacheOpts)
}
rm, err := makeRulesManager(
serverOptions.SigNoz.SQLStore.SQLxDB(),
reader,
c,
serverOptions.SigNoz.Cache,
serverOptions.SigNoz.Alertmanager,
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
@@ -205,7 +178,13 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
telemetry.GetInstance().SetReader(reader)
telemetry.GetInstance().SetSqlStore(serverOptions.SigNoz.SQLStore)
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
telemetry.GetInstance().SetSavedViewsInfoCallback(telemetry.GetSavedViewsInfo)
telemetry.GetInstance().SetAlertsInfoCallback(telemetry.GetAlertsInfo)
telemetry.GetInstance().SetGetUsersCallback(telemetry.GetUsers)
telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount)
telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo)
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
if err != nil {
@@ -223,7 +202,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
IntegrationsController: integrationsController,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
Cache: c,
FluxInterval: fluxInterval,
Gateway: gatewayProxy,
GatewayUrl: serverOptions.GatewayUrl,
@@ -261,9 +239,15 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
&opAmpModel.AllAgents, agentConfMgr,
)
errorList := reader.PreloadMetricsMetadata(context.Background())
for _, er := range errorList {
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
orgs, err := apiHandler.Signoz.Modules.Organization.GetAll(context.Background())
if err != nil {
return nil, err
}
for _, org := range orgs {
errorList := reader.PreloadMetricsMetadata(context.Background(), org.ID)
for _, er := range errorList {
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
}
}
return s, nil

View File

@@ -1,10 +0,0 @@
package dao
import (
"github.com/SigNoz/signoz/ee/query-service/dao/sqlite"
"github.com/SigNoz/signoz/pkg/sqlstore"
)
func InitDao(sqlStore sqlstore.SQLStore) (ModelDao, error) {
return sqlite.InitDB(sqlStore)
}

View File

@@ -4,27 +4,15 @@ import (
"context"
"net/url"
"github.com/SigNoz/signoz/ee/types"
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
eeTypes "github.com/SigNoz/signoz/ee/types"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/uptrace/bun"
)
type ModelDao interface {
basedao.ModelDao
// SetFlagProvider sets the feature lookup provider
SetFlagProvider(flags baseint.FeatureLookup)
DB() *bun.DB
// auth methods
CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError)
GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error)
// org domain (auth domains) CRUD ops
@@ -35,10 +23,10 @@ type ModelDao interface {
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError)
CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
CreatePAT(ctx context.Context, orgID string, p eeTypes.GettablePAT) (eeTypes.GettablePAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, orgID string, p eeTypes.GettablePAT, id valuer.UUID) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*eeTypes.GettablePAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*eeTypes.GettablePAT, basemodel.BaseApiError)
ListPATs(ctx context.Context, orgID string) ([]eeTypes.GettablePAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
}

View File

@@ -1,191 +0,0 @@
package sqlite
import (
"context"
"fmt"
"net/url"
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/model"
baseauth "github.com/SigNoz/signoz/pkg/query-service/auth"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/google/uuid"
"go.uber.org/zap"
)
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*types.User, basemodel.BaseApiError) {
// get auth domain from email domain
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
return nil, model.InternalErrorStr("failed to get domain from email")
}
if domain == nil {
zap.L().Error("email domain does not match any authenticated domain", zap.String("email", email))
return nil, model.InternalErrorStr("email domain does not match any authenticated domain")
}
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
if err != nil {
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
return nil, model.InternalErrorStr("failed to generate password hash")
}
user := &types.User{
ID: uuid.New().String(),
Name: "",
Email: email,
Password: hash,
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
},
ProfilePictureURL: "", // Currently unused
Role: authtypes.RoleViewer.String(),
OrgID: domain.OrgID,
}
user, apiErr := m.CreateUser(ctx, user, false)
if apiErr != nil {
zap.L().Error("CreateUser failed", zap.Error(apiErr))
return nil, apiErr
}
return user, nil
}
// PrepareSsoRedirect prepares redirect page link after SSO response
// is successfully parsed (i.e. valid email is available)
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (redirectURL string, apierr basemodel.BaseApiError) {
userPayload, apierr := m.GetUserByEmail(ctx, email)
if !apierr.IsNil() {
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
return "", model.BadRequestStr("invalid user email received from the auth provider")
}
user := &types.User{}
if userPayload == nil {
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
user = newUser
if apiErr != nil {
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
return "", apiErr
}
} else {
user = &userPayload.User
}
tokenStore, err := baseauth.GenerateJWTForUser(user, jwt)
if err != nil {
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
return "", model.InternalErrorStr("failed to generate token for the user")
}
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
redirectUri,
tokenStore.AccessJwt,
user.ID,
tokenStore.RefreshJwt), nil
}
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
return false, apierr
}
if domain != nil && domain.SsoEnabled {
// sso is enabled, check if the user has admin role
userPayload, baseapierr := m.GetUserByEmail(ctx, email)
if baseapierr != nil || userPayload == nil {
return false, baseapierr
}
if userPayload.Role != authtypes.RoleAdmin.String() {
return false, model.BadRequest(fmt.Errorf("auth method not supported"))
}
}
return true, nil
}
// PrecheckLogin is called when the login or signup page is loaded
// to check sso login is to be prompted
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, basemodel.BaseApiError) {
// assume user is valid unless proven otherwise
resp := &basemodel.PrecheckResponse{IsUser: true, CanSelfRegister: false}
// check if email is a valid user
userPayload, baseApiErr := m.GetUserByEmail(ctx, email)
if baseApiErr != nil {
return resp, baseApiErr
}
if userPayload == nil {
resp.IsUser = false
}
ssoAvailable := true
err := m.checkFeature(model.SSO)
if err != nil {
switch err.(type) {
case basemodel.ErrFeatureUnavailable:
// do nothing, just skip sso
ssoAvailable = false
default:
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return resp, model.BadRequestStr(err.Error())
}
}
if ssoAvailable {
resp.IsUser = true
// find domain from email
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
return resp, apierr
}
if orgDomain != nil && orgDomain.SsoEnabled {
// saml is enabled for this domain, lets prepare sso url
if sourceUrl == "" {
sourceUrl = constants.GetDefaultSiteURL()
}
// parse source url that generated the login request
var err error
escapedUrl, _ := url.QueryUnescape(sourceUrl)
siteUrl, err := url.Parse(escapedUrl)
if err != nil {
zap.L().Error("failed to parse referer", zap.Error(err))
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
}
// build Idp URL that will authenticat the user
// the front-end will redirect user to this url
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
if err != nil {
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
return resp, model.InternalError(err)
}
// set SSO to true, as the url is generated correctly
resp.SSO = true
}
}
return resp, nil
}

View File

@@ -10,8 +10,8 @@ import (
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/google/uuid"
"go.uber.org/zap"
@@ -44,7 +44,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
}
domain, err = m.GetDomain(ctx, domainId)
if (err != nil) || domain == nil {
if err != nil {
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
@@ -54,7 +54,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
domain = domainFromDB
if (err != nil) || domain == nil {
if err != nil {
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
@@ -70,7 +70,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*types.GettableOrgDomain, basemodel.BaseApiError) {
stored := types.StorableOrgDomain{}
err := m.DB().NewSelect().
err := m.sqlStore.BunDB().NewSelect().
Model(&stored).
Where("name = ?", name).
Limit(1).
@@ -94,7 +94,7 @@ func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*types.Get
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError) {
stored := types.StorableOrgDomain{}
err := m.DB().NewSelect().
err := m.sqlStore.BunDB().NewSelect().
Model(&stored).
Where("id = ?", id).
Limit(1).
@@ -119,7 +119,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]types.Getta
domains := []types.GettableOrgDomain{}
stored := []types.StorableOrgDomain{}
err := m.DB().NewSelect().
err := m.sqlStore.BunDB().NewSelect().
Model(&stored).
Where("org_id = ?", orgId).
Scan(ctx)
@@ -167,7 +167,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *types.GettableOrgDo
TimeAuditable: ossTypes.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
}
_, err = m.DB().NewInsert().
_, err = m.sqlStore.BunDB().NewInsert().
Model(&storableDomain).
Exec(ctx)
@@ -201,7 +201,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *types.GettableOrgDo
TimeAuditable: ossTypes.TimeAuditable{UpdatedAt: time.Now()},
}
_, err = m.DB().NewUpdate().
_, err = m.sqlStore.BunDB().NewUpdate().
Model(storableDomain).
Column("data", "updated_at").
WherePK().
@@ -224,7 +224,7 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.Bas
}
storableDomain := &types.StorableOrgDomain{ID: id}
_, err := m.DB().NewDelete().
_, err := m.sqlStore.BunDB().NewDelete().
Model(storableDomain).
WherePK().
Exec(ctx)
@@ -251,7 +251,7 @@ func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*types.G
parsedDomain := components[1]
stored := types.StorableOrgDomain{}
err := m.DB().NewSelect().
err := m.sqlStore.BunDB().NewSelect().
Model(&stored).
Where("name = ?", parsedDomain).
Limit(1).

View File

@@ -1,46 +1,18 @@
package sqlite
import (
"fmt"
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
basedsql "github.com/SigNoz/signoz/pkg/query-service/dao/sqlite"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
)
type modelDao struct {
*basedsql.ModelDaoSqlite
flags baseint.FeatureLookup
}
// SetFlagProvider sets the feature lookup provider
func (m *modelDao) SetFlagProvider(flags baseint.FeatureLookup) {
m.flags = flags
}
// CheckFeature confirms if a feature is available
func (m *modelDao) checkFeature(key string) error {
if m.flags == nil {
return fmt.Errorf("flag provider not set")
}
return m.flags.CheckFeature(key)
userModule user.Module
sqlStore sqlstore.SQLStore
}
// InitDB creates and extends base model DB repository
func InitDB(sqlStore sqlstore.SQLStore) (*modelDao, error) {
dao, err := basedsql.InitDB(sqlStore)
if err != nil {
return nil, err
}
// set package variable so dependent base methods (e.g. AuthCache) will work
basedao.SetDB(dao)
m := &modelDao{ModelDaoSqlite: dao}
return m, nil
}
func (m *modelDao) DB() *bun.DB {
return m.ModelDaoSqlite.DB()
func NewModelDao(sqlStore sqlstore.SQLStore) *modelDao {
userModule := impluser.NewModule(impluser.NewStore(sqlStore))
return &modelDao{userModule: userModule, sqlStore: sqlStore}
}

View File

@@ -17,7 +17,7 @@ import (
func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.GettablePAT) (types.GettablePAT, basemodel.BaseApiError) {
p.StorablePersonalAccessToken.OrgID = orgID
p.StorablePersonalAccessToken.ID = valuer.GenerateUUID()
_, err := m.DB().NewInsert().
_, err := m.sqlStore.BunDB().NewInsert().
Model(&p.StorablePersonalAccessToken).
Exec(ctx)
if err != nil {
@@ -25,7 +25,7 @@ func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.Gettable
return types.GettablePAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
createdByUser, _ := m.GetUser(ctx, p.UserID)
createdByUser, _ := m.userModule.GetUserByID(ctx, orgID, p.UserID)
if createdByUser == nil {
p.CreatedByUser = types.PatUser{
NotFound: true,
@@ -33,14 +33,15 @@ func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.Gettable
} else {
p.CreatedByUser = types.PatUser{
User: ossTypes.User{
ID: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
Identifiable: ossTypes.Identifiable{
ID: createdByUser.ID,
},
DisplayName: createdByUser.DisplayName,
Email: createdByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: createdByUser.CreatedAt,
UpdatedAt: createdByUser.UpdatedAt,
},
ProfilePictureURL: createdByUser.ProfilePictureURL,
},
NotFound: false,
}
@@ -49,7 +50,7 @@ func (m *modelDao) CreatePAT(ctx context.Context, orgID string, p types.Gettable
}
func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError {
_, err := m.DB().NewUpdate().
_, err := m.sqlStore.BunDB().NewUpdate().
Model(&p.StorablePersonalAccessToken).
Column("role", "name", "updated_at", "updated_by_user_id").
Where("id = ?", id.StringValue()).
@@ -66,7 +67,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, orgID string, p types.Gettable
func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
if err := m.sqlStore.BunDB().NewSelect().
Model(&pats).
Where("revoked = false").
Where("org_id = ?", orgID).
@@ -82,7 +83,7 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
StorablePersonalAccessToken: pats[i],
}
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
createdByUser, _ := m.userModule.GetUserByID(ctx, orgID, pats[i].UserID)
if createdByUser == nil {
patWithUser.CreatedByUser = types.PatUser{
NotFound: true,
@@ -90,20 +91,21 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
} else {
patWithUser.CreatedByUser = types.PatUser{
User: ossTypes.User{
ID: createdByUser.ID,
Name: createdByUser.Name,
Email: createdByUser.Email,
Identifiable: ossTypes.Identifiable{
ID: createdByUser.ID,
},
DisplayName: createdByUser.DisplayName,
Email: createdByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: createdByUser.CreatedAt,
UpdatedAt: createdByUser.UpdatedAt,
},
ProfilePictureURL: createdByUser.ProfilePictureURL,
},
NotFound: false,
}
}
updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID)
updatedByUser, _ := m.userModule.GetUserByID(ctx, orgID, pats[i].UpdatedByUserID)
if updatedByUser == nil {
patWithUser.UpdatedByUser = types.PatUser{
NotFound: true,
@@ -111,14 +113,15 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
} else {
patWithUser.UpdatedByUser = types.PatUser{
User: ossTypes.User{
ID: updatedByUser.ID,
Name: updatedByUser.Name,
Email: updatedByUser.Email,
Identifiable: ossTypes.Identifiable{
ID: updatedByUser.ID,
},
DisplayName: updatedByUser.DisplayName,
Email: updatedByUser.Email,
TimeAuditable: ossTypes.TimeAuditable{
CreatedAt: updatedByUser.CreatedAt,
UpdatedAt: updatedByUser.UpdatedAt,
},
ProfilePictureURL: updatedByUser.ProfilePictureURL,
},
NotFound: false,
}
@@ -131,7 +134,7 @@ func (m *modelDao) ListPATs(ctx context.Context, orgID string) ([]types.Gettable
func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError {
updatedAt := time.Now().Unix()
_, err := m.DB().NewUpdate().
_, err := m.sqlStore.BunDB().NewUpdate().
Model(&types.StorablePersonalAccessToken{}).
Set("revoked = ?", true).
Set("updated_by_user_id = ?", userID).
@@ -149,7 +152,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, orgID string, id valuer.UUID,
func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
if err := m.sqlStore.BunDB().NewSelect().
Model(&pats).
Where("token = ?", token).
Where("revoked = false").
@@ -174,7 +177,7 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*types.GettablePAT
func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError) {
pats := []types.StorablePersonalAccessToken{}
if err := m.DB().NewSelect().
if err := m.sqlStore.BunDB().NewSelect().
Model(&pats).
Where("id = ?", id.StringValue()).
Where("org_id = ?", orgID).

View File

@@ -6,6 +6,7 @@ import (
"os"
"time"
eeuserimpl "github.com/SigNoz/signoz/ee/modules/user/impluser"
"github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/ee/zeus"
@@ -13,8 +14,10 @@ import (
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider"
"github.com/SigNoz/signoz/pkg/config/fileprovider"
"github.com/SigNoz/signoz/pkg/modules/user"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
@@ -72,6 +75,7 @@ func main() {
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
// Deprecated
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
// Deprecated
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)")
@@ -117,6 +121,12 @@ func main() {
signoz.NewWebProviderFactories(),
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
func(sqlstore sqlstore.SQLStore) user.Module {
return eeuserimpl.NewModule(eeuserimpl.NewStore(sqlstore))
},
func(userModule user.Module) user.Handler {
return eeuserimpl.NewHandler(userModule)
},
)
if err != nil {
zap.L().Fatal("Failed to create signoz", zap.Error(err))
@@ -138,7 +148,6 @@ func main() {
HTTPHostPort: baseconst.HTTPHostPort,
PreferSpanMetrics: preferSpanMetrics,
PrivateHostPort: baseconst.PrivateHostPort,
CacheConfigPath: cacheConfigPath,
FluxInterval: fluxInterval,
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
Cluster: cluster,

View File

@@ -1,12 +0,0 @@
package model
import (
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
)
// GettableInvitation overrides base object and adds precheck into
// response
type GettableInvitation struct {
*basemodel.InvitationResponseObject
Precheck *basemodel.PrecheckResponse `json:"precheck"`
}

View File

@@ -12,10 +12,11 @@ import (
"go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/query-service/cache"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
@@ -53,6 +54,7 @@ type AnomalyRule struct {
func NewAnomalyRule(
id string,
orgID valuer.UUID,
p *ruletypes.PostableRule,
reader interfaces.Reader,
cache cache.Cache,
@@ -66,7 +68,7 @@ func NewAnomalyRule(
p.RuleCondition.Target = &target
}
baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...)
if err != nil {
return nil, err
}
@@ -158,18 +160,18 @@ func (r *AnomalyRule) GetSelectedQuery() string {
return r.Condition().GetSelectedQueryName()
}
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRange(ts)
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, params)
err = r.PopulateTemporality(ctx, orgID, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
anomalies, err := r.provider.GetAnomalies(ctx, &anomaly.GetAnomaliesRequest{
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
Params: params,
Seasonality: r.seasonality,
})
@@ -204,7 +206,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
prevState := r.State()
valueFormatter := formatter.FromUnit(r.Unit())
res, err := r.buildAndRunQuery(ctx, ts)
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts)
if err != nil {
return nil, err

View File

@@ -9,6 +9,7 @@ import (
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"go.uber.org/zap"
)
@@ -23,6 +24,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
// create a threshold rule
tr, err := baserules.NewThresholdRule(
ruleId,
opts.OrgID,
opts.Rule,
opts.Reader,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
@@ -43,6 +45,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
// create promql rule
pr, err := baserules.NewPromRule(
ruleId,
opts.OrgID,
opts.Rule,
opts.Logger,
opts.Reader,
@@ -63,6 +66,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
// create anomaly rule
ar, err := NewAnomalyRule(
ruleId,
opts.OrgID,
opts.Rule,
opts.Reader,
opts.Cache,
@@ -119,6 +123,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
// create a threshold rule
rule, err = baserules.NewThresholdRule(
alertname,
opts.OrgID,
parsedRule,
opts.Reader,
baserules.WithSendAlways(),
@@ -136,6 +141,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
// create promql rule
rule, err = baserules.NewPromRule(
alertname,
opts.OrgID,
parsedRule,
opts.Logger,
opts.Reader,
@@ -153,6 +159,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
// create anomaly rule
rule, err = NewAnomalyRule(
alertname,
opts.OrgID,
parsedRule,
opts.Reader,
opts.Cache,
@@ -187,7 +194,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
// newTask returns an appropriate group for
// rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID string) baserules.Task {
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
}

View File

@@ -1,31 +0,0 @@
package sso
import (
"net/http"
)
// SSOIdentity contains details of user received from SSO provider
type SSOIdentity struct {
UserID string
Username string
PreferredUsername string
Email string
EmailVerified bool
ConnectorData []byte
}
// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth
// style redirect flow to determine user information.
type OAuthCallbackProvider interface {
// The initial URL user would be redirect to.
// OAuth2 implementations support various scopes but we only need profile and user as
// the roles are still being managed in SigNoz.
BuildAuthURL(state string) (string, error)
// Handle the callback to the server (after login at oauth provider site)
// and return a email identity.
// At the moment we dont support auto signup flow (based on domain), so
// the full identity (including name, group etc) is not required outside of the
// connector
HandleCallback(r *http.Request) (identity *SSOIdentity, err error)
}

View File

@@ -19,12 +19,14 @@ var (
var (
Org = "org"
User = "user"
FactorPassword = "factor_password"
CloudIntegration = "cloud_integration"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
)
@@ -264,6 +266,8 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, OrgReference)
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
fkReferences = append(fkReferences, UserReference)
} else if reference == FactorPassword && !slices.Contains(fkReferences, FactorPasswordReference) {
fkReferences = append(fkReferences, FactorPasswordReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
}

View File

@@ -106,3 +106,7 @@ func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, for
return err
}
func (dialect *dialect) ToggleForeignKeyConstraint(ctx context.Context, bun *bun.DB, enable bool) error {
return nil
}

View File

@@ -110,6 +110,8 @@ module.exports = {
// eslint rules need to remove
'@typescript-eslint/no-shadow': 'off',
'import/no-cycle': 'off',
// https://typescript-eslint.io/rules/consistent-return/ check the warning for details
'consistent-return': 'off',
'prettier/prettier': [
'error',
{},

View File

@@ -1,6 +1,10 @@
# Ignore artifacts:
build
coverage
public/
# Ignore all MD files:
**/*.md
**/*.md
# Ignore all JSON files:
**/*.json

View File

@@ -3,5 +3,4 @@ BUNDLE_ANALYSER="true"
FRONTEND_API_ENDPOINT="http://localhost:8080/"
INTERCOM_APP_ID="intercom-app-id"
PLAYWRIGHT_TEST_BASE_URL="http://localhost:8080"
CI="1"

View File

@@ -30,11 +30,6 @@ const config: Config.InitialOptions = {
testPathIgnorePatterns: ['/node_modules/', '/public/'],
moduleDirectories: ['node_modules', 'src'],
testEnvironment: 'jest-environment-jsdom',
testEnvironmentOptions: {
'jest-playwright': {
browsers: ['chromium', 'firefox', 'webkit'],
},
},
coverageThreshold: {
global: {
statements: 80,

View File

@@ -15,10 +15,6 @@
"jest:coverage": "jest --coverage",
"jest:watch": "jest --watch",
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
"playwright": "NODE_ENV=testing playwright test --config=./playwright.config.ts",
"playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium",
"playwright:codegen:local": "playwright codegen http://localhost:3301",
"playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json",
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
"commitlint": "commitlint --edit $1",
"test": "jest",
@@ -35,6 +31,7 @@
"@dnd-kit/core": "6.1.0",
"@dnd-kit/modifiers": "7.0.0",
"@dnd-kit/sortable": "8.0.0",
"@dnd-kit/utilities": "3.2.2",
"@grafana/data": "^11.2.3",
"@mdx-js/loader": "2.3.0",
"@mdx-js/react": "2.3.0",
@@ -82,6 +79,7 @@
"history": "4.10.1",
"html-webpack-plugin": "5.5.0",
"http-proxy-middleware": "3.0.3",
"http-status-codes": "2.3.0",
"i18next": "^21.6.12",
"i18next-browser-languagedetector": "^6.1.3",
"i18next-http-backend": "^1.3.2",
@@ -90,7 +88,7 @@
"less": "^4.1.2",
"less-loader": "^10.2.0",
"lodash-es": "^4.17.21",
"lucide-react": "0.427.0",
"lucide-react": "0.498.0",
"mini-css-extract-plugin": "2.4.5",
"motion": "12.4.13",
"overlayscrollbars": "^2.8.1",
@@ -163,7 +161,6 @@
"@commitlint/config-conventional": "^16.2.4",
"@faker-js/faker": "9.3.0",
"@jest/globals": "^27.5.1",
"@playwright/test": "^1.22.0",
"@testing-library/jest-dom": "5.16.5",
"@testing-library/react": "13.4.0",
"@testing-library/user-event": "14.4.3",
@@ -257,6 +254,7 @@
"http-proxy-middleware": "3.0.3",
"cross-spawn": "7.0.5",
"cookie": "^0.7.1",
"serialize-javascript": "6.0.2"
"serialize-javascript": "6.0.2",
"prismjs": "1.30.0"
}
}

View File

@@ -1,23 +0,0 @@
import { PlaywrightTestConfig } from '@playwright/test';
import dotenv from 'dotenv';
dotenv.config();
const config: PlaywrightTestConfig = {
forbidOnly: !!process.env.CI,
retries: process.env.CI ? 2 : 0,
preserveOutput: 'always',
name: 'Signoz',
testDir: './tests',
use: {
trace: 'retain-on-failure',
baseURL: process.env.PLAYWRIGHT_TEST_BASE_URL || 'http://localhost:3301',
},
updateSnapshots: 'all',
fullyParallel: !!process.env.CI,
quiet: false,
testMatch: ['**/*.spec.ts'],
reporter: process.env.CI ? 'github' : 'list',
};
export default config;

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.9 KiB

View File

@@ -0,0 +1 @@
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="m12.192 3.18-1.167 2.33-.583 1.165M7.31 12.74a.583.583 0 0 1-.835-.24L1.808 3.179"/><path d="M7 1.167c2.9 0 5.25.783 5.25 1.75 0 .966-2.35 1.75-5.25 1.75s-5.25-.784-5.25-1.75c0-.967 2.35-1.75 5.25-1.75ZM8.75 10.5h3.5M10.5 12.25v-3.5"/></g></svg>

After

Width:  |  Height:  |  Size: 418 B

View File

@@ -0,0 +1 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#a)" stroke-linecap="round" stroke-linejoin="round"><path d="M8 14.666A6.667 6.667 0 1 0 8 1.333a6.667 6.667 0 0 0 0 13.333Z" fill="#C0C1C3" stroke="#C0C1C3" stroke-width="2"/><path d="M8 11.333v-4H6.333M8 4.667h.007" stroke="#121317" stroke-width="1.333"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 439 B

View File

@@ -69,5 +69,5 @@
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
"API_MONITORING": "SigNoz | API Monitoring"
"API_MONITORING": "SigNoz | External APIs"
}

View File

@@ -9,7 +9,7 @@ done
# create temporary tsconfig which includes only passed files
str="{
\"extends\": \"./tsconfig.json\",
\"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./conf/default.conf\",\"./public\",\"./tests\",\"./playwright.config.ts\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files]
\"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./public\",\"./tests\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files]
}"
echo $str > tsconfig.tmp

View File

@@ -1,6 +1,6 @@
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import getOrgUser from 'api/user/getOrgUser';
import getAll from 'api/v1/user/get';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
@@ -11,8 +11,11 @@ import { useAppContext } from 'providers/App/App';
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { matchPath, useLocation } from 'react-router-dom';
import { SuccessResponseV2 } from 'types/api';
import APIError from 'types/api/error';
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
import { Organization } from 'types/api/user/getOrganization';
import { UserResponse } from 'types/api/user/getUser';
import { USER_ROLES } from 'types/roles';
import { routePermission } from 'utils/permission';
@@ -58,12 +61,13 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const [orgData, setOrgData] = useState<Organization | undefined>(undefined);
const { data: orgUsers, isFetching: isFetchingOrgUsers } = useQuery({
const { data: usersData, isFetching: isFetchingUsers } = useQuery<
SuccessResponseV2<UserResponse[]> | undefined,
APIError
>({
queryFn: () => {
if (orgData && orgData.id !== undefined) {
return getOrgUser({
orgId: orgData.id,
});
return getAll();
}
return undefined;
},
@@ -72,23 +76,23 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
});
const checkFirstTimeUser = useCallback((): boolean => {
const users = orgUsers?.payload || [];
const users = usersData?.data || [];
const remainingUsers = users.filter(
(user) => user.email !== 'admin@signoz.cloud',
);
return remainingUsers.length === 1;
}, [orgUsers?.payload]);
}, [usersData?.data]);
useEffect(() => {
if (
isCloudUserVal &&
!isFetchingOrgPreferences &&
orgPreferences &&
!isFetchingOrgUsers &&
orgUsers &&
orgUsers.payload
!isFetchingUsers &&
usersData &&
usersData.data
) {
const isOnboardingComplete = orgPreferences?.find(
(preference: Record<string, any>) => preference.key === 'ORG_ONBOARDING',
@@ -108,9 +112,9 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
checkFirstTimeUser,
isCloudUserVal,
isFetchingOrgPreferences,
isFetchingOrgUsers,
isFetchingUsers,
orgPreferences,
orgUsers,
usersData,
pathname,
]);

View File

@@ -5,6 +5,7 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
import logEvent from 'api/common/logEvent';
import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
@@ -69,14 +70,14 @@ function App(): JSX.Element {
const orgName =
org && Array.isArray(org) && org.length > 0 ? org[0].displayName : '';
const { name, email, role } = user;
const { displayName, email, role } = user;
const domain = extractDomain(email);
const hostNameParts = hostname.split('.');
const identifyPayload = {
email,
name,
name: displayName,
company_name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
@@ -105,7 +106,7 @@ function App(): JSX.Element {
Userpilot.identify(email, {
email,
name,
name: displayName,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
@@ -117,7 +118,7 @@ function App(): JSX.Element {
posthog?.identify(email, {
email,
name,
name: displayName,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
@@ -143,7 +144,7 @@ function App(): JSX.Element {
) {
window.cioanalytics.reset();
window.cioanalytics.identify(email, {
name: user.name,
name: user.displayName,
email,
role: user.role,
});
@@ -257,7 +258,7 @@ function App(): JSX.Element {
window.Intercom('boot', {
app_id: process.env.INTERCOM_APP_ID,
email: user?.email || '',
name: user?.name || '',
name: user?.displayName || '',
});
}
}
@@ -354,6 +355,7 @@ function App(): JSX.Element {
<ConfigProvider theme={themeConfig}>
<Router history={history}>
<CompatRouter>
<UserpilotRouteTracker />
<NotificationProvider>
<PrivateRoute>
<ResourceProvider>

View File

@@ -47,9 +47,10 @@ export const TracesFunnels = Loadable(
import(/* webpackChunkName: "Traces Funnels" */ 'pages/TracesModulePage'),
);
export const TracesFunnelDetails = Loadable(
// eslint-disable-next-line sonarjs/no-identical-functions
() =>
import(
/* webpackChunkName: "Traces Funnel Details" */ 'pages/TracesFunnelDetails'
/* webpackChunkName: "Traces Funnel Details" */ 'pages/TracesModulePage'
),
);

View File

@@ -531,6 +531,7 @@ export const oldRoutes = [
'/traces-save-views',
'/settings/access-tokens',
'/messaging-queues',
'/alerts/edit',
];
export const oldNewRoutesMapping: Record<string, string> = {
@@ -541,6 +542,7 @@ export const oldNewRoutesMapping: Record<string, string> = {
'/traces-save-views': '/traces/saved-views',
'/settings/access-tokens': '/settings/api-keys',
'/messaging-queues': '/messaging-queues/overview',
'/alerts/edit': '/alerts/overview',
};
export const ROUTES_NOT_TO_BE_OVERRIDEN: string[] = [

View File

@@ -0,0 +1,46 @@
import { AxiosError } from 'axios';
import { ErrorV2Resp } from 'types/api';
import APIError from 'types/api/error';
// reference - https://axios-http.com/docs/handling_errors
export function ErrorResponseHandlerV2(error: AxiosError<ErrorV2Resp>): never {
const { response, request } = error;
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
if (response) {
throw new APIError({
httpStatusCode: response.status || 500,
error: {
code: response.data.error.code,
message: response.data.error.message,
url: response.data.error.url,
errors: response.data.error.errors,
},
});
}
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
if (request) {
throw new APIError({
httpStatusCode: error.status || 500,
error: {
code: error.code || error.name,
message: error.message,
url: '',
errors: [],
},
});
}
// Something happened in setting up the request that triggered an Error
throw new APIError({
httpStatusCode: error.status || 500,
error: {
code: error.name,
message: error.message,
url: '',
errors: [],
},
});
}

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createEmail';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
email_configs: [
{
@@ -21,13 +21,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createMsTeams';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
msteamsv2_configs: [
{
@@ -21,13 +21,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createOpsgenie';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
opsgenie_configs: [
{
@@ -24,13 +24,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createPager';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
pagerduty_configs: [
{
@@ -29,13 +29,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createSlack';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
slack_configs: [
{
@@ -22,13 +22,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,12 +1,12 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createWebhook';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
let httpConfig = {};
const username = props.username ? props.username.trim() : '';
@@ -28,7 +28,7 @@ const create = async (
};
}
const response = await axios.post('/channels', {
const response = await axios.post<PayloadProps>('/channels', {
name: props.name,
webhook_configs: [
{
@@ -40,13 +40,12 @@ const create = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,23 +1,22 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/delete';
const deleteChannel = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.delete(`/channels/${props.id}`);
const response = await axios.delete<PayloadProps>(`/channels/${props.id}`);
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editEmail';
const editEmail = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
email_configs: [
{
@@ -21,13 +21,12 @@ const editEmail = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editMsTeams';
const editMsTeams = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
msteamsv2_configs: [
{
@@ -21,13 +21,12 @@ const editMsTeams = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorResponse, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editOpsgenie';
const editOpsgenie = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
opsgenie_configs: [
{
@@ -25,13 +25,12 @@ const editOpsgenie = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editPager';
const editPager = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
pagerduty_configs: [
{
@@ -29,13 +29,12 @@ const editPager = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editSlack';
const editSlack = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
slack_configs: [
{
@@ -22,13 +22,12 @@ const editSlack = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,12 +1,12 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/editWebhook';
const editWebhook = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
let httpConfig = {};
const username = props.username ? props.username.trim() : '';
@@ -28,7 +28,7 @@ const editWebhook = async (
};
}
const response = await axios.put(`/channels/${props.id}`, {
const response = await axios.put<PayloadProps>(`/channels/${props.id}`, {
name: props.name,
webhook_configs: [
{
@@ -40,13 +40,12 @@ const editWebhook = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,23 +1,21 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/get';
import { Channels } from 'types/api/channels/getAll';
const get = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const get = async (props: Props): Promise<SuccessResponseV2<Channels>> => {
try {
const response = await axios.get(`/channels/${props.id}`);
const response = await axios.get<PayloadProps>(`/channels/${props.id}`);
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,23 +1,20 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps } from 'types/api/channels/getAll';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Channels, PayloadProps } from 'types/api/channels/getAll';
const getAll = async (): Promise<
SuccessResponse<PayloadProps> | ErrorResponse
> => {
const getAll = async (): Promise<SuccessResponseV2<Channels[]>> => {
try {
const response = await axios.get('/channels');
const response = await axios.get<PayloadProps>('/channels');
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createEmail';
const testEmail = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/testChannel', {
const response = await axios.post<PayloadProps>('/testChannel', {
name: props.name,
email_configs: [
{
@@ -21,13 +21,12 @@ const testEmail = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createMsTeams';
const testMsTeams = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/testChannel', {
const response = await axios.post<PayloadProps>('/testChannel', {
name: props.name,
msteamsv2_configs: [
{
@@ -21,13 +21,12 @@ const testMsTeams = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createOpsgenie';
const testOpsgenie = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/testChannel', {
const response = await axios.post<PayloadProps>('/testChannel', {
name: props.name,
opsgenie_configs: [
{
@@ -24,13 +24,12 @@ const testOpsgenie = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createPager';
const testPager = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/testChannel', {
const response = await axios.post<PayloadProps>('/testChannel', {
name: props.name,
pagerduty_configs: [
{
@@ -29,13 +29,12 @@ const testPager = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createSlack';
const testSlack = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.post('/testChannel', {
const response = await axios.post<PayloadProps>('/testChannel', {
name: props.name,
slack_configs: [
{
@@ -22,13 +22,12 @@ const testSlack = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -1,12 +1,12 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/channels/createWebhook';
const testWebhook = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
let httpConfig = {};
const username = props.username ? props.username.trim() : '';
@@ -28,7 +28,7 @@ const testWebhook = async (
};
}
const response = await axios.post('/testChannel', {
const response = await axios.post<PayloadProps>('/testChannel', {
name: props.name,
webhook_configs: [
{
@@ -40,13 +40,12 @@ const testWebhook = async (
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data.data,
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};

View File

@@ -2,7 +2,7 @@
/* eslint-disable no-param-reassign */
/* eslint-disable @typescript-eslint/no-explicit-any */
import getLocalStorageApi from 'api/browser/localstorage/get';
import loginApi from 'api/user/login';
import loginApi from 'api/v1/login/login';
import afterLogin from 'AppRoutes/utils';
import axios, { AxiosResponse, InternalAxiosRequestConfig } from 'axios';
import { ENVIRONMENT } from 'constants/env';
@@ -71,15 +71,15 @@ const interceptorRejected = async (
const { response } = value;
// reject the refresh token error
if (response.status === 401 && response.config.url !== '/login') {
const response = await loginApi({
refreshToken: getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN) || '',
});
try {
const response = await loginApi({
refreshToken: getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN) || '',
});
if (response.statusCode === 200) {
afterLogin(
response.payload.userId,
response.payload.accessJwt,
response.payload.refreshJwt,
response.data.userId,
response.data.accessJwt,
response.data.refreshJwt,
true,
);
@@ -89,23 +89,22 @@ const interceptorRejected = async (
method: value.config.method,
headers: {
...value.config.headers,
Authorization: `Bearer ${response.payload.accessJwt}`,
Authorization: `Bearer ${response.data.accessJwt}`,
},
data: {
...JSON.parse(value.config.data || '{}'),
},
},
);
if (reResponse.status === 200) {
return await Promise.resolve(reResponse);
}
Logout();
return await Promise.reject(reResponse);
} catch (error) {
Logout();
}
Logout();
}
// when refresh token is expired
if (response.status === 401 && response.config.url === '/login') {
Logout();

View File

@@ -1,8 +0,0 @@
import axios from 'api';
import { AxiosResponse } from 'axios';
import { ApDexPayloadAndSettingsProps } from 'types/api/metrics/getApDex';
export const getApDexSettings = (
servicename: string,
): Promise<AxiosResponse<ApDexPayloadAndSettingsProps[]>> =>
axios.get(`/settings/apdex?services=${servicename}`);

View File

@@ -0,0 +1,54 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
export interface InspectMetricsRequest {
metricName: string;
start: number;
end: number;
filters: TagFilter;
}
export interface InspectMetricsResponse {
status: string;
data: {
series: InspectMetricsSeries[];
};
}
export interface InspectMetricsSeries {
title?: string;
strokeColor?: string;
labels: Record<string, string>;
labelsArray: Array<Record<string, string>>;
values: InspectMetricsTimestampValue[];
}
interface InspectMetricsTimestampValue {
timestamp: number;
value: string;
}
export const getInspectMetricsDetails = async (
request: InspectMetricsRequest,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<InspectMetricsResponse> | ErrorResponse> => {
try {
const response = await axios.post(`/metrics/inspect`, request, {
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -1,21 +1,20 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/user/updateRole';
import { PayloadProps, Props } from 'types/api/quickFilters/getCustomFilters';
const updateRole = async (
const getCustomFilters = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const { signal } = props;
try {
const response = await axios.put(`/rbac/role/${props.userId}`, {
group_name: props.group_name,
});
const response = await ApiBaseInstance.get(`orgs/me/filters/${signal}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
message: 'Success',
payload: response.data.data,
};
} catch (error) {
@@ -23,4 +22,4 @@ const updateRole = async (
}
};
export default updateRole;
export default getCustomFilters;

View File

@@ -0,0 +1,13 @@
import { ApiBaseInstance } from 'api';
import { AxiosError } from 'axios';
import { SuccessResponse } from 'types/api';
import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFilters';
const updateCustomFiltersAPI = async (
props: UpdateCustomFiltersProps,
): Promise<SuccessResponse<void> | AxiosError> =>
ApiBaseInstance.put(`orgs/me/filters`, {
...props.data,
});
export default updateCustomFiltersAPI;

View File

@@ -5,6 +5,7 @@ import {
CreateFunnelPayload,
CreateFunnelResponse,
FunnelData,
FunnelStepData,
} from 'types/api/traceFunnels';
const FUNNELS_BASE_PATH = '/trace-funnels';
@@ -13,7 +14,7 @@ export const createFunnel = async (
payload: CreateFunnelPayload,
): Promise<SuccessResponse<CreateFunnelResponse> | ErrorResponse> => {
const response: AxiosResponse = await axios.post(
`${FUNNELS_BASE_PATH}/new-funnel`,
`${FUNNELS_BASE_PATH}/new`,
payload,
);
@@ -21,64 +22,49 @@ export const createFunnel = async (
statusCode: 200,
error: null,
message: 'Funnel created successfully',
payload: response.data,
payload: response.data.data,
};
};
interface GetFunnelsListParams {
search?: string;
}
export const getFunnelsList = async ({
search = '',
}: GetFunnelsListParams = {}): Promise<
export const getFunnelsList = async (): Promise<
SuccessResponse<FunnelData[]> | ErrorResponse
> => {
const params = new URLSearchParams();
if (search.length) {
params.set('search', search);
}
const response: AxiosResponse = await axios.get(
`${FUNNELS_BASE_PATH}/list${
params.toString() ? `?${params.toString()}` : ''
}`,
);
const response: AxiosResponse = await axios.get(`${FUNNELS_BASE_PATH}/list`);
return {
statusCode: 200,
error: null,
message: '',
payload: response.data,
payload: response.data.data,
};
};
export const getFunnelById = async (
funnelId: string,
funnelId?: string,
): Promise<SuccessResponse<FunnelData> | ErrorResponse> => {
const response: AxiosResponse = await axios.get(
`${FUNNELS_BASE_PATH}/get/${funnelId}`,
`${FUNNELS_BASE_PATH}/${funnelId}`,
);
return {
statusCode: 200,
error: null,
message: '',
payload: response.data,
payload: response.data.data,
};
};
interface RenameFunnelPayload {
id: string;
export interface RenameFunnelPayload {
funnel_id: string;
funnel_name: string;
timestamp: number;
}
export const renameFunnel = async (
payload: RenameFunnelPayload,
): Promise<SuccessResponse<FunnelData> | ErrorResponse> => {
const response: AxiosResponse = await axios.put(
`${FUNNELS_BASE_PATH}/${payload.id}/update`,
{ funnel_name: payload.funnel_name },
`${FUNNELS_BASE_PATH}/${payload.funnel_id}`,
payload,
);
return {
@@ -97,7 +83,7 @@ export const deleteFunnel = async (
payload: DeleteFunnelPayload,
): Promise<SuccessResponse<FunnelData> | ErrorResponse> => {
const response: AxiosResponse = await axios.delete(
`${FUNNELS_BASE_PATH}/delete/${payload.id}`,
`${FUNNELS_BASE_PATH}/${payload.id}`,
);
return {
@@ -107,3 +93,270 @@ export const deleteFunnel = async (
payload: response.data,
};
};
export interface UpdateFunnelStepsPayload {
funnel_id: string;
steps: FunnelStepData[];
timestamp: number;
}
export const updateFunnelSteps = async (
payload: UpdateFunnelStepsPayload,
): Promise<SuccessResponse<FunnelData> | ErrorResponse> => {
const response: AxiosResponse = await axios.put(
`${FUNNELS_BASE_PATH}/steps/update`,
payload,
);
return {
statusCode: 200,
error: null,
message: 'Funnel steps updated successfully',
payload: response.data.data,
};
};
export interface ValidateFunnelPayload {
start_time: number;
end_time: number;
}
export interface ValidateFunnelResponse {
status: string;
data: Array<{
timestamp: string;
data: {
trace_id: string;
};
}> | null;
}
export const validateFunnelSteps = async (
funnelId: string,
payload: ValidateFunnelPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<ValidateFunnelResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/validate`,
payload,
{ signal },
);
return {
statusCode: 200,
error: null,
message: '',
payload: response.data,
};
};
export interface UpdateFunnelStepDetailsPayload {
funnel_id: string;
steps: Array<{
step_name: string;
description: string;
}>;
updated_at: number;
}
interface UpdateFunnelDescriptionPayload {
funnel_id: string;
description: string;
}
export const saveFunnelDescription = async (
payload: UpdateFunnelDescriptionPayload,
): Promise<SuccessResponse<FunnelData> | ErrorResponse> => {
const response: AxiosResponse = await axios.post(
`${FUNNELS_BASE_PATH}/save`,
payload,
);
return {
statusCode: 200,
error: null,
message: 'Funnel description updated successfully',
payload: response.data,
};
};
export interface FunnelOverviewPayload {
start_time: number;
end_time: number;
step_start?: number;
step_end?: number;
}
export interface FunnelOverviewResponse {
status: string;
data: Array<{
timestamp: string;
data: {
avg_duration: number;
avg_rate: number;
conversion_rate: number | null;
errors: number;
// TODO(shaheer): remove p99_latency once we have support for latency
p99_latency: number;
latency: number;
};
}>;
}
export const getFunnelOverview = async (
funnelId: string,
payload: FunnelOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelOverviewResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/overview`,
payload,
{
signal,
},
);
return {
statusCode: 200,
error: null,
message: '',
payload: response.data,
};
};
export interface SlowTraceData {
status: string;
data: Array<{
timestamp: string;
data: {
duration_ms: string;
span_count: number;
trace_id: string;
};
}>;
}
export const getFunnelSlowTraces = async (
funnelId: string,
payload: FunnelOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<SlowTraceData> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/slow-traces`,
payload,
{
signal,
},
);
return {
statusCode: 200,
error: null,
message: '',
payload: response.data,
};
};
export interface ErrorTraceData {
status: string;
data: Array<{
timestamp: string;
data: {
duration_ms: string;
span_count: number;
trace_id: string;
};
}>;
}
export const getFunnelErrorTraces = async (
funnelId: string,
payload: FunnelOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<ErrorTraceData> | ErrorResponse> => {
const response: AxiosResponse = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/error-traces`,
payload,
{
signal,
},
);
return {
statusCode: 200,
error: null,
message: '',
payload: response.data,
};
};
export interface FunnelStepsPayload {
start_time: number;
end_time: number;
}
export interface FunnelStepGraphMetrics {
[key: `total_s${number}_spans`]: number;
[key: `total_s${number}_errored_spans`]: number;
}
export interface FunnelStepsResponse {
status: string;
data: Array<{
timestamp: string;
data: FunnelStepGraphMetrics;
}>;
}
export const getFunnelSteps = async (
funnelId: string,
payload: FunnelStepsPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelStepsResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps`,
payload,
{ signal },
);
return {
statusCode: 200,
error: null,
message: '',
payload: response.data,
};
};
export interface FunnelStepsOverviewPayload {
start_time: number;
end_time: number;
step_start?: number;
step_end?: number;
}
export interface FunnelStepsOverviewResponse {
status: string;
data: Array<{
timestamp: string;
data: Record<string, number>;
}>;
}
export const getFunnelStepsOverview = async (
funnelId: string,
payload: FunnelStepsOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelStepsOverviewResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps/overview`,
payload,
{ signal },
);
return {
statusCode: 200,
error: null,
message: '',
payload: response.data,
};
};

View File

@@ -1,26 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/user/changeMyPassword';
const changeMyPassword = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.post(`/changePassword/${props.userId}`, {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default changeMyPassword;

Some files were not shown because too many files have changed in this diff Show More