Compare commits
21 Commits
v0.80.0-7b
...
remove-dea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
807aa60906 | ||
|
|
208a5603a9 | ||
|
|
940313d28b | ||
|
|
0de779a866 | ||
|
|
9815ec7d81 | ||
|
|
a7cad0f1a5 | ||
|
|
a624b4758d | ||
|
|
5cc833b73f | ||
|
|
3eee3bfec1 | ||
|
|
01b308d507 | ||
|
|
ee5684b130 | ||
|
|
dcf627a683 | ||
|
|
2f8da5957b | ||
|
|
3f6f77d0e2 | ||
|
|
5bceffbeaa | ||
|
|
49c04eb9d9 | ||
|
|
c89a8cbb0c | ||
|
|
b6bb71f650 | ||
|
|
af135aa068 | ||
|
|
4a4e4d6779 | ||
|
|
fc604915ed |
42
.github/workflows/README.md
vendored
42
.github/workflows/README.md
vendored
@@ -1,42 +0,0 @@
|
||||
# Github actions
|
||||
|
||||
## Testing the UI manually on each PR
|
||||
|
||||
First we need to make sure the UI is ready
|
||||
* Check the `Start tunnel` step in `e2e-k8s/deploy-on-k3s-cluster` job and make sure you see `your url is: https://pull-<number>-signoz.loca.lt`
|
||||
* This job will run until the PR is merged or closed to keep the local tunneling alive
|
||||
- github will cancel this job if the PR wasn't merged after 6h
|
||||
- if the job was cancel, go to the action and press `Re-run all jobs`
|
||||
|
||||
Now you can open your browser at https://pull-<number>-signoz.loca.lt and check the UI.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
To run GitHub workflow, a few environment variables needs to add in GitHub secrets
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th> Variables </th>
|
||||
<th> Description </th>
|
||||
<th> Example </th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> REPONAME </td>
|
||||
<td> Provide the DockerHub user/organisation name of the image. </td>
|
||||
<td> signoz</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> DOCKERHUB_USERNAME </td>
|
||||
<td> Docker hub username </td>
|
||||
<td> signoz</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> DOCKERHUB_TOKEN </td>
|
||||
<td> Docker hub password/token with push permission </td>
|
||||
<td> **** </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> SONAR_TOKEN </td>
|
||||
<td> <a href="https://sonarcloud.io">SonarCloud</a> token </td>
|
||||
<td> **** </td>
|
||||
</tr>
|
||||
16
.github/workflows/remove-label.yaml
vendored
16
.github/workflows/remove-label.yaml
vendored
@@ -1,16 +0,0 @@
|
||||
name: remove-label
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [synchronize]
|
||||
|
||||
jobs:
|
||||
remove:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Remove label testing-deploy from PR
|
||||
uses: buildsville/add-remove-label@v2.0.0
|
||||
with:
|
||||
label: testing-deploy
|
||||
type: remove
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -103,6 +103,13 @@ telemetrystore:
|
||||
clickhouse:
|
||||
# The DSN to use for clickhouse.
|
||||
dsn: tcp://localhost:9000
|
||||
# The query settings for clickhouse.
|
||||
settings:
|
||||
max_execution_time: 0
|
||||
max_execution_time_leaf: 0
|
||||
timeout_before_checking_execution_speed: 0
|
||||
max_bytes_to_read: 0
|
||||
max_result_rows_for_ch_query: 0
|
||||
|
||||
##################### Prometheus #####################
|
||||
prometheus:
|
||||
|
||||
@@ -13,9 +13,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -26,14 +23,12 @@ import (
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
type APIHandlerOptions struct {
|
||||
DataConnector interfaces.DataConnector
|
||||
SkipConfig *basemodel.SkipConfig
|
||||
PreferSpanMetrics bool
|
||||
AppDao dao.ModelDao
|
||||
RulesManager *rules.Manager
|
||||
@@ -60,13 +55,8 @@ type APIHandler struct {
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(signoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
|
||||
organizationAPI := implorganization.NewAPI(implorganization.NewModule(implorganization.NewStore(signoz.SQLStore)))
|
||||
organizationModule := implorganization.NewModule(implorganization.NewStore(signoz.SQLStore))
|
||||
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
SkipConfig: opts.SkipConfig,
|
||||
PreferSpanMetrics: opts.PreferSpanMetrics,
|
||||
AppDao: opts.AppDao,
|
||||
RuleManager: opts.RulesManager,
|
||||
@@ -76,14 +66,9 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
Cache: opts.Cache,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
Preference: preference,
|
||||
OrganizationAPI: organizationAPI,
|
||||
OrganizationModule: organizationModule,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -134,7 +134,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager, ah.OrganizationModule)
|
||||
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager, ah.Signoz.Modules.Organization)
|
||||
if !registerError.IsNil() {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
@@ -152,7 +152,7 @@ func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
|
||||
token := mux.Vars(r)["token"]
|
||||
sourceUrl := r.URL.Query().Get("ref")
|
||||
|
||||
inviteObject, err := baseauth.GetInvite(r.Context(), token, ah.OrganizationModule)
|
||||
inviteObject, err := baseauth.GetInvite(r.Context(), token, ah.Signoz.Modules.Organization)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
|
||||
@@ -23,12 +23,10 @@ func NewDataConnector(
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
fluxIntervalForTraceDetail time.Duration,
|
||||
cache cache.Cache,
|
||||
) *ClickhouseReader {
|
||||
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, fluxIntervalForTraceDetail, cache)
|
||||
return &ClickhouseReader{
|
||||
conn: telemetryStore.ClickhouseDB(),
|
||||
appdb: sqlDB,
|
||||
|
||||
@@ -45,33 +45,23 @@ import (
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
const AppDbEngine = "sqlite"
|
||||
|
||||
type ServerOptions struct {
|
||||
Config signoz.Config
|
||||
SigNoz *signoz.SigNoz
|
||||
PromConfigPath string
|
||||
SkipTopLvlOpsPath string
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
// alert specific params
|
||||
DisableRules bool
|
||||
RuleRepoURL string
|
||||
Config signoz.Config
|
||||
SigNoz *signoz.SigNoz
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
PreferSpanMetrics bool
|
||||
CacheConfigPath string
|
||||
FluxInterval string
|
||||
FluxIntervalForTraceDetail string
|
||||
Cluster string
|
||||
GatewayUrl string
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
Jwt *authtypes.JWT
|
||||
}
|
||||
|
||||
@@ -140,20 +130,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
serverOptions.Cluster,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
fluxIntervalForTraceDetail,
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
|
||||
skipConfig := &basemodel.SkipConfig{}
|
||||
if serverOptions.SkipTopLvlOpsPath != "" {
|
||||
// read skip config
|
||||
skipConfig, err = basemodel.ReadSkipConfig(serverOptions.SkipTopLvlOpsPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
@@ -164,13 +144,9 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.RuleRepoURL,
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
@@ -238,7 +214,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
apiOpts := api.APIHandlerOptions{
|
||||
DataConnector: reader,
|
||||
SkipConfig: skipConfig,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
AppDao: modelDao,
|
||||
RulesManager: rm,
|
||||
@@ -252,8 +227,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
FluxInterval: fluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: serverOptions.GatewayUrl,
|
||||
UseLogsNewSchema: serverOptions.UseLogsNewSchema,
|
||||
UseTraceNewSchema: serverOptions.UseTraceNewSchema,
|
||||
JWT: serverOptions.Jwt,
|
||||
}
|
||||
|
||||
@@ -263,8 +236,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
s := &Server{
|
||||
// logger: logger,
|
||||
// tracer: tracer,
|
||||
ruleManager: rm,
|
||||
serverOptions: serverOptions,
|
||||
unavailableChannel: make(chan healthcheck.Status),
|
||||
@@ -411,13 +382,7 @@ func (s *Server) initListeners() error {
|
||||
|
||||
// Start listening on http and private http port concurrently
|
||||
func (s *Server) Start(ctx context.Context) error {
|
||||
|
||||
// initiate rule manager first
|
||||
if !s.serverOptions.DisableRules {
|
||||
s.ruleManager.Start(ctx)
|
||||
} else {
|
||||
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
}
|
||||
s.ruleManager.Start(ctx)
|
||||
|
||||
err := s.initListeners()
|
||||
if err != nil {
|
||||
@@ -508,13 +473,9 @@ func (s *Server) Stop() error {
|
||||
}
|
||||
|
||||
func makeRulesManager(
|
||||
ruleRepoURL string,
|
||||
db *sqlx.DB,
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
@@ -524,17 +485,13 @@ func makeRulesManager(
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
TelemetryStore: telemetryStore,
|
||||
Prometheus: prometheus,
|
||||
RepoURL: ruleRepoURL,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
PrepareTestRuleFunc: rules.TestNotification,
|
||||
Alertmanager: alertmanager,
|
||||
SQLStore: sqlstore,
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"go.uber.org/zap/zapcore"
|
||||
)
|
||||
|
||||
// Deprecated: Please use the logger from pkg/instrumentation.
|
||||
func initZapLog() *zap.Logger {
|
||||
config := zap.NewProductionConfig()
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
@@ -50,21 +51,31 @@ func main() {
|
||||
var gatewayUrl string
|
||||
var useLicensesV3 bool
|
||||
|
||||
// Deprecated
|
||||
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
|
||||
// Deprecated
|
||||
flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces")
|
||||
// Deprecated
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
// Deprecated
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
// Deprecated
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
// Deprecated
|
||||
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
|
||||
// Deprecated
|
||||
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
|
||||
// Deprecated
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
|
||||
// Deprecated
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)")
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
|
||||
// Deprecated
|
||||
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
|
||||
flag.Parse()
|
||||
|
||||
@@ -121,19 +132,13 @@ func main() {
|
||||
Config: config,
|
||||
SigNoz: signoz,
|
||||
HTTPHostPort: baseconst.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
SkipTopLvlOpsPath: skipTopLvlOpsPath,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: baseconst.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
RuleRepoURL: ruleRepoURL,
|
||||
CacheConfigPath: cacheConfigPath,
|
||||
FluxInterval: fluxInterval,
|
||||
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
|
||||
Cluster: cluster,
|
||||
GatewayUrl: gatewayUrl,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
Jwt: jwt,
|
||||
}
|
||||
|
||||
|
||||
@@ -25,8 +25,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
@@ -123,8 +121,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
|
||||
@@ -194,7 +194,7 @@ func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table str
|
||||
}
|
||||
|
||||
if !oldColumnExists {
|
||||
return false, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("old column: %s doesn't exist", oldColumnName))
|
||||
return false, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "old column: %s doesn't exist", oldColumnName)
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
|
||||
@@ -4,8 +4,10 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/jackc/pgx/v5/pgconn"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/jackc/pgx/v5/stdlib"
|
||||
"github.com/jmoiron/sqlx"
|
||||
@@ -87,3 +89,20 @@ func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
func (provider *provider) RunInTxCtx(ctx context.Context, opts *sql.TxOptions, cb func(ctx context.Context) error) error {
|
||||
return provider.bundb.RunInTxCtx(ctx, opts, cb)
|
||||
}
|
||||
|
||||
func (provider *provider) WrapNotFoundErrf(err error, code errors.Code, format string, args ...any) error {
|
||||
if err == sql.ErrNoRows {
|
||||
return errors.Wrapf(err, errors.TypeNotFound, code, format, args...)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, format string, args ...any) error {
|
||||
var pgErr *pgconn.PgError
|
||||
if errors.As(err, &pgErr) && pgErr.Code == "23505" {
|
||||
return errors.Wrapf(err, errors.TypeAlreadyExists, code, format, args...)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@
|
||||
"ansi-to-html": "0.7.2",
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"axios": "1.7.7",
|
||||
"axios": "1.8.2",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
|
||||
@@ -64,10 +64,6 @@ export const TraceDetail = Loadable(
|
||||
),
|
||||
);
|
||||
|
||||
export const UsageExplorerPage = Loadable(
|
||||
() => import(/* webpackChunkName: "UsageExplorerPage" */ 'modules/Usage'),
|
||||
);
|
||||
|
||||
export const SignupPage = Loadable(
|
||||
() => import(/* webpackChunkName: "SignupPage" */ 'pages/SignUp'),
|
||||
);
|
||||
|
||||
@@ -57,7 +57,6 @@ import {
|
||||
TracesFunnels,
|
||||
TracesSaveViews,
|
||||
UnAuthorized,
|
||||
UsageExplorerPage,
|
||||
WorkspaceAccessRestricted,
|
||||
WorkspaceBlocked,
|
||||
WorkspaceSuspended,
|
||||
@@ -155,13 +154,6 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'SETTINGS',
|
||||
},
|
||||
{
|
||||
path: ROUTES.USAGE_EXPLORER,
|
||||
exact: true,
|
||||
component: UsageExplorerPage,
|
||||
isPrivate: true,
|
||||
key: 'USAGE_EXPLORER',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ALL_DASHBOARD,
|
||||
exact: true,
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/logs/getLogs';
|
||||
|
||||
const GetLogs = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const data = await axios.get(`/logs`, {
|
||||
params: props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: '',
|
||||
payload: data.data.results,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default GetLogs;
|
||||
@@ -1,19 +0,0 @@
|
||||
import apiV1 from 'api/apiV1';
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { EventSourcePolyfill } from 'event-source-polyfill';
|
||||
|
||||
// 10 min in ms
|
||||
const TIMEOUT_IN_MS = 10 * 60 * 1000;
|
||||
|
||||
export const LiveTail = (queryParams: string): EventSourcePolyfill =>
|
||||
new EventSourcePolyfill(
|
||||
`${ENVIRONMENT.baseURL}${apiV1}logs/tail?${queryParams}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
|
||||
},
|
||||
heartbeatTimeout: TIMEOUT_IN_MS,
|
||||
},
|
||||
);
|
||||
13
frontend/src/components/NewSelect/CustomMultiSelect.scss
Normal file
13
frontend/src/components/NewSelect/CustomMultiSelect.scss
Normal file
@@ -0,0 +1,13 @@
|
||||
.custom-multiselect-dropdown {
|
||||
.divider {
|
||||
height: 1px;
|
||||
background-color: #e8e8e8;
|
||||
margin: 4px 0;
|
||||
}
|
||||
|
||||
.all-option {
|
||||
font-weight: 500;
|
||||
border-bottom: 1px solid #f0f0f0;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
}
|
||||
1765
frontend/src/components/NewSelect/CustomMultiSelect.tsx
Normal file
1765
frontend/src/components/NewSelect/CustomMultiSelect.tsx
Normal file
File diff suppressed because it is too large
Load Diff
606
frontend/src/components/NewSelect/CustomSelect.tsx
Normal file
606
frontend/src/components/NewSelect/CustomSelect.tsx
Normal file
@@ -0,0 +1,606 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable react/function-component-definition */
|
||||
import './styles.scss';
|
||||
|
||||
import {
|
||||
CloseOutlined,
|
||||
DownOutlined,
|
||||
LoadingOutlined,
|
||||
ReloadOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Select } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { capitalize, isEmpty } from 'lodash-es';
|
||||
import { ArrowDown, ArrowUp } from 'lucide-react';
|
||||
import type { BaseSelectRef } from 'rc-select';
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { CustomSelectProps, OptionData } from './types';
|
||||
import {
|
||||
filterOptionsBySearch,
|
||||
prioritizeOrAddOptionForSingleSelect,
|
||||
SPACEKEY,
|
||||
} from './utils';
|
||||
|
||||
/**
|
||||
* CustomSelect Component
|
||||
*
|
||||
*/
|
||||
const CustomSelect: React.FC<CustomSelectProps> = ({
|
||||
placeholder = 'Search...',
|
||||
className,
|
||||
loading = false,
|
||||
onSearch,
|
||||
options = [],
|
||||
value,
|
||||
onChange,
|
||||
defaultActiveFirstOption = true,
|
||||
noDataMessage,
|
||||
onClear,
|
||||
getPopupContainer,
|
||||
dropdownRender,
|
||||
highlightSearch = true,
|
||||
placement = 'bottomLeft',
|
||||
popupMatchSelectWidth = true,
|
||||
popupClassName,
|
||||
errorMessage,
|
||||
allowClear = false,
|
||||
onRetry,
|
||||
...rest
|
||||
}) => {
|
||||
// ===== State & Refs =====
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [searchText, setSearchText] = useState('');
|
||||
const [activeOptionIndex, setActiveOptionIndex] = useState<number>(-1);
|
||||
|
||||
// Refs for element access and scroll behavior
|
||||
const selectRef = useRef<BaseSelectRef>(null);
|
||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||
const optionRefs = useRef<Record<number, HTMLDivElement | null>>({});
|
||||
|
||||
// ===== Option Filtering & Processing Utilities =====
|
||||
|
||||
/**
|
||||
* Checks if a label exists in the provided options
|
||||
*/
|
||||
const isLabelPresent = useCallback(
|
||||
(options: OptionData[], label: string): boolean =>
|
||||
options.some((option) => {
|
||||
const lowerLabel = label.toLowerCase();
|
||||
|
||||
// Check in nested options if they exist
|
||||
if ('options' in option && Array.isArray(option.options)) {
|
||||
return option.options.some(
|
||||
(subOption) => subOption.label.toLowerCase() === lowerLabel,
|
||||
);
|
||||
}
|
||||
|
||||
// Check top-level option
|
||||
return option.label.toLowerCase() === lowerLabel;
|
||||
}),
|
||||
[],
|
||||
);
|
||||
|
||||
/**
|
||||
* Separates section and non-section options
|
||||
*/
|
||||
const splitOptions = useCallback((options: OptionData[]): {
|
||||
sectionOptions: OptionData[];
|
||||
nonSectionOptions: OptionData[];
|
||||
} => {
|
||||
const sectionOptions: OptionData[] = [];
|
||||
const nonSectionOptions: OptionData[] = [];
|
||||
|
||||
options.forEach((option) => {
|
||||
if ('options' in option && Array.isArray(option.options)) {
|
||||
sectionOptions.push(option);
|
||||
} else {
|
||||
nonSectionOptions.push(option);
|
||||
}
|
||||
});
|
||||
|
||||
return { sectionOptions, nonSectionOptions };
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Apply search filtering to options
|
||||
*/
|
||||
const filteredOptions = useMemo(
|
||||
(): OptionData[] => filterOptionsBySearch(options, searchText),
|
||||
[options, searchText],
|
||||
);
|
||||
|
||||
// ===== UI & Rendering Functions =====
|
||||
|
||||
/**
|
||||
* Highlights matched text in search results
|
||||
*/
|
||||
const highlightMatchedText = useCallback(
|
||||
(text: string, searchQuery: string): React.ReactNode => {
|
||||
if (!searchQuery || !highlightSearch) return text;
|
||||
|
||||
const parts = text.split(new RegExp(`(${searchQuery})`, 'gi'));
|
||||
return (
|
||||
<>
|
||||
{parts.map((part, i) => {
|
||||
// Create a deterministic but unique key
|
||||
const uniqueKey = `${text.substring(0, 3)}-${part.substring(0, 3)}-${i}`;
|
||||
|
||||
return part.toLowerCase() === searchQuery.toLowerCase() ? (
|
||||
<span key={uniqueKey} className="highlight-text">
|
||||
{part}
|
||||
</span>
|
||||
) : (
|
||||
part
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
},
|
||||
[highlightSearch],
|
||||
);
|
||||
|
||||
/**
|
||||
* Renders an individual option with proper keyboard navigation support
|
||||
*/
|
||||
const renderOptionItem = useCallback(
|
||||
(
|
||||
option: OptionData,
|
||||
isSelected: boolean,
|
||||
index?: number,
|
||||
): React.ReactElement => {
|
||||
const handleSelection = (): void => {
|
||||
if (onChange) {
|
||||
onChange(option.value, option);
|
||||
setIsOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
const isActive = index === activeOptionIndex;
|
||||
const optionId = `option-${index}`;
|
||||
|
||||
return (
|
||||
<div
|
||||
key={option.value}
|
||||
id={optionId}
|
||||
ref={(el): void => {
|
||||
if (index !== undefined) {
|
||||
optionRefs.current[index] = el;
|
||||
}
|
||||
}}
|
||||
className={cx('option-item', {
|
||||
selected: isSelected,
|
||||
active: isActive,
|
||||
})}
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
handleSelection();
|
||||
}}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter' || e.key === SPACEKEY) {
|
||||
e.preventDefault();
|
||||
handleSelection();
|
||||
}
|
||||
}}
|
||||
onMouseEnter={(): void => setActiveOptionIndex(index || -1)}
|
||||
role="option"
|
||||
aria-selected={isSelected}
|
||||
aria-disabled={option.disabled}
|
||||
tabIndex={isActive ? 0 : -1}
|
||||
>
|
||||
<div className="option-content">
|
||||
<div>{highlightMatchedText(String(option.label || ''), searchText)}</div>
|
||||
{option.type === 'custom' && (
|
||||
<div className="option-badge">{capitalize(option.type)}</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[highlightMatchedText, searchText, onChange, activeOptionIndex],
|
||||
);
|
||||
|
||||
/**
|
||||
* Helper function to render option with index tracking
|
||||
*/
|
||||
const renderOptionWithIndex = useCallback(
|
||||
(option: OptionData, isSelected: boolean, idx: number) =>
|
||||
renderOptionItem(option, isSelected, idx),
|
||||
[renderOptionItem],
|
||||
);
|
||||
|
||||
/**
|
||||
* Custom clear button renderer
|
||||
*/
|
||||
const clearIcon = useCallback(
|
||||
() => (
|
||||
<CloseOutlined
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
if (onChange) onChange(undefined, []);
|
||||
if (onClear) onClear();
|
||||
}}
|
||||
/>
|
||||
),
|
||||
[onChange, onClear],
|
||||
);
|
||||
|
||||
// ===== Event Handlers =====
|
||||
|
||||
/**
|
||||
* Handles search input changes
|
||||
*/
|
||||
const handleSearch = useCallback(
|
||||
(value: string): void => {
|
||||
const trimmedValue = value.trim();
|
||||
setSearchText(trimmedValue);
|
||||
|
||||
if (onSearch) onSearch(trimmedValue);
|
||||
},
|
||||
[onSearch],
|
||||
);
|
||||
|
||||
/**
|
||||
* Prevents event propagation for dropdown clicks
|
||||
*/
|
||||
const handleDropdownClick = useCallback((e: React.MouseEvent): void => {
|
||||
e.stopPropagation();
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Comprehensive keyboard navigation handler
|
||||
*/
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent): void => {
|
||||
// Handle keyboard navigation when dropdown is open
|
||||
if (isOpen) {
|
||||
// Get flattened list of all selectable options
|
||||
const getFlatOptions = (): OptionData[] => {
|
||||
if (!filteredOptions) return [];
|
||||
|
||||
const flatList: OptionData[] = [];
|
||||
|
||||
// Process options
|
||||
const { sectionOptions, nonSectionOptions } = splitOptions(
|
||||
isEmpty(value)
|
||||
? filteredOptions
|
||||
: prioritizeOrAddOptionForSingleSelect(filteredOptions, value),
|
||||
);
|
||||
|
||||
// Add custom option if needed
|
||||
if (!isEmpty(searchText) && !isLabelPresent(filteredOptions, searchText)) {
|
||||
flatList.push({
|
||||
label: searchText,
|
||||
value: searchText,
|
||||
type: 'custom',
|
||||
});
|
||||
}
|
||||
|
||||
// Add all options to flat list
|
||||
flatList.push(...nonSectionOptions);
|
||||
sectionOptions.forEach((section) => {
|
||||
if (section.options) {
|
||||
flatList.push(...section.options);
|
||||
}
|
||||
});
|
||||
|
||||
return flatList;
|
||||
};
|
||||
|
||||
const options = getFlatOptions();
|
||||
|
||||
switch (e.key) {
|
||||
case 'ArrowDown':
|
||||
e.preventDefault();
|
||||
setActiveOptionIndex((prev) =>
|
||||
prev < options.length - 1 ? prev + 1 : 0,
|
||||
);
|
||||
break;
|
||||
|
||||
case 'ArrowUp':
|
||||
e.preventDefault();
|
||||
setActiveOptionIndex((prev) =>
|
||||
prev > 0 ? prev - 1 : options.length - 1,
|
||||
);
|
||||
break;
|
||||
|
||||
case 'Tab':
|
||||
// Tab navigation with Shift key support
|
||||
if (e.shiftKey) {
|
||||
e.preventDefault();
|
||||
setActiveOptionIndex((prev) =>
|
||||
prev > 0 ? prev - 1 : options.length - 1,
|
||||
);
|
||||
} else {
|
||||
e.preventDefault();
|
||||
setActiveOptionIndex((prev) =>
|
||||
prev < options.length - 1 ? prev + 1 : 0,
|
||||
);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Enter':
|
||||
e.preventDefault();
|
||||
if (activeOptionIndex >= 0 && activeOptionIndex < options.length) {
|
||||
// Select the focused option
|
||||
const selectedOption = options[activeOptionIndex];
|
||||
if (onChange) {
|
||||
onChange(selectedOption.value, selectedOption);
|
||||
setIsOpen(false);
|
||||
setActiveOptionIndex(-1);
|
||||
}
|
||||
} else if (!isEmpty(searchText)) {
|
||||
// Add custom value when no option is focused
|
||||
const customOption = {
|
||||
label: searchText,
|
||||
value: searchText,
|
||||
type: 'custom',
|
||||
};
|
||||
if (onChange) {
|
||||
onChange(customOption.value, customOption);
|
||||
setIsOpen(false);
|
||||
setActiveOptionIndex(-1);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Escape':
|
||||
e.preventDefault();
|
||||
setIsOpen(false);
|
||||
setActiveOptionIndex(-1);
|
||||
break;
|
||||
|
||||
case ' ': // Space key
|
||||
if (activeOptionIndex >= 0 && activeOptionIndex < options.length) {
|
||||
e.preventDefault();
|
||||
const selectedOption = options[activeOptionIndex];
|
||||
if (onChange) {
|
||||
onChange(selectedOption.value, selectedOption);
|
||||
setIsOpen(false);
|
||||
setActiveOptionIndex(-1);
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
} else if (e.key === 'ArrowDown' || e.key === 'Tab') {
|
||||
// Open dropdown when Down or Tab is pressed while closed
|
||||
e.preventDefault();
|
||||
setIsOpen(true);
|
||||
setActiveOptionIndex(0);
|
||||
}
|
||||
},
|
||||
[
|
||||
isOpen,
|
||||
activeOptionIndex,
|
||||
filteredOptions,
|
||||
searchText,
|
||||
onChange,
|
||||
splitOptions,
|
||||
value,
|
||||
isLabelPresent,
|
||||
],
|
||||
);
|
||||
|
||||
// ===== Dropdown Rendering =====
|
||||
|
||||
/**
|
||||
* Renders the custom dropdown with sections and keyboard navigation
|
||||
*/
|
||||
const customDropdownRender = useCallback((): React.ReactElement => {
|
||||
// Process options based on current value
|
||||
let processedOptions = isEmpty(value)
|
||||
? filteredOptions
|
||||
: prioritizeOrAddOptionForSingleSelect(filteredOptions, value);
|
||||
|
||||
if (!isEmpty(searchText)) {
|
||||
processedOptions = filterOptionsBySearch(processedOptions, searchText);
|
||||
}
|
||||
|
||||
const { sectionOptions, nonSectionOptions } = splitOptions(processedOptions);
|
||||
|
||||
// Check if we need to add a custom option based on search text
|
||||
const isSearchTextNotPresent =
|
||||
!isEmpty(searchText) && !isLabelPresent(processedOptions, searchText);
|
||||
|
||||
let optionIndex = 0;
|
||||
|
||||
// Add custom option if needed
|
||||
if (isSearchTextNotPresent) {
|
||||
nonSectionOptions.unshift({
|
||||
label: searchText,
|
||||
value: searchText,
|
||||
type: 'custom',
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to map options with index tracking
|
||||
const mapOptions = (options: OptionData[]): React.ReactNode =>
|
||||
options.map((option) => {
|
||||
const result = renderOptionWithIndex(
|
||||
option,
|
||||
option.value === value,
|
||||
optionIndex,
|
||||
);
|
||||
optionIndex += 1;
|
||||
return result;
|
||||
});
|
||||
|
||||
const customMenu = (
|
||||
<div
|
||||
ref={dropdownRef}
|
||||
className="custom-select-dropdown"
|
||||
onClick={handleDropdownClick}
|
||||
onKeyDown={handleKeyDown}
|
||||
role="listbox"
|
||||
tabIndex={-1}
|
||||
aria-activedescendant={
|
||||
activeOptionIndex >= 0 ? `option-${activeOptionIndex}` : undefined
|
||||
}
|
||||
>
|
||||
{/* Non-section options */}
|
||||
<div className="no-section-options">
|
||||
{nonSectionOptions.length > 0 && mapOptions(nonSectionOptions)}
|
||||
</div>
|
||||
|
||||
{/* Section options */}
|
||||
{sectionOptions.length > 0 &&
|
||||
sectionOptions.map((section) =>
|
||||
!isEmpty(section.options) ? (
|
||||
<div className="select-group" key={section.label}>
|
||||
<div className="group-label" role="heading" aria-level={2}>
|
||||
{section.label}
|
||||
</div>
|
||||
<div role="group" aria-label={`${section.label} options`}>
|
||||
{section.options && mapOptions(section.options)}
|
||||
</div>
|
||||
</div>
|
||||
) : null,
|
||||
)}
|
||||
|
||||
{/* Navigation help footer */}
|
||||
<div className="navigation-footer" role="note">
|
||||
{!loading && !errorMessage && !noDataMessage && (
|
||||
<section className="navigate">
|
||||
<ArrowDown size={8} className="icons" />
|
||||
<ArrowUp size={8} className="icons" />
|
||||
<span className="keyboard-text">to navigate</span>
|
||||
</section>
|
||||
)}
|
||||
{loading && (
|
||||
<div className="navigation-loading">
|
||||
<div className="navigation-icons">
|
||||
<LoadingOutlined />
|
||||
</div>
|
||||
<div className="navigation-text">We are updating the values...</div>
|
||||
</div>
|
||||
)}
|
||||
{errorMessage && !loading && (
|
||||
<div className="navigation-error">
|
||||
<div className="navigation-text">
|
||||
{errorMessage || SOMETHING_WENT_WRONG}
|
||||
</div>
|
||||
<div className="navigation-icons">
|
||||
<ReloadOutlined
|
||||
twoToneColor={Color.BG_CHERRY_400}
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
if (onRetry) onRetry();
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{noDataMessage && !loading && (
|
||||
<div className="navigation-text">{noDataMessage}</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
return dropdownRender ? dropdownRender(customMenu) : customMenu;
|
||||
}, [
|
||||
value,
|
||||
filteredOptions,
|
||||
searchText,
|
||||
splitOptions,
|
||||
isLabelPresent,
|
||||
handleDropdownClick,
|
||||
handleKeyDown,
|
||||
activeOptionIndex,
|
||||
loading,
|
||||
errorMessage,
|
||||
noDataMessage,
|
||||
dropdownRender,
|
||||
renderOptionWithIndex,
|
||||
onRetry,
|
||||
]);
|
||||
|
||||
// ===== Side Effects =====
|
||||
|
||||
// Clear search text when dropdown closes
|
||||
useEffect(() => {
|
||||
if (!isOpen) {
|
||||
setSearchText('');
|
||||
setActiveOptionIndex(-1);
|
||||
}
|
||||
}, [isOpen]);
|
||||
|
||||
// Auto-scroll to active option for keyboard navigation
|
||||
useEffect(() => {
|
||||
if (
|
||||
isOpen &&
|
||||
activeOptionIndex >= 0 &&
|
||||
optionRefs.current[activeOptionIndex]
|
||||
) {
|
||||
optionRefs.current[activeOptionIndex]?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'nearest',
|
||||
});
|
||||
}
|
||||
}, [isOpen, activeOptionIndex]);
|
||||
|
||||
// ===== Final Processing =====
|
||||
|
||||
// Apply highlight to matched text in options
|
||||
const optionsWithHighlight = useMemo(
|
||||
() =>
|
||||
options
|
||||
?.filter((option) =>
|
||||
String(option.label || '')
|
||||
.toLowerCase()
|
||||
.includes(searchText.toLowerCase()),
|
||||
)
|
||||
?.map((option) => ({
|
||||
...option,
|
||||
label: highlightMatchedText(String(option.label || ''), searchText),
|
||||
})),
|
||||
[options, searchText, highlightMatchedText],
|
||||
);
|
||||
|
||||
// ===== Component Rendering =====
|
||||
return (
|
||||
<Select
|
||||
ref={selectRef}
|
||||
className={cx('custom-select', className)}
|
||||
placeholder={placeholder}
|
||||
showSearch
|
||||
filterOption={false}
|
||||
onSearch={handleSearch}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onDropdownVisibleChange={setIsOpen}
|
||||
open={isOpen}
|
||||
options={optionsWithHighlight}
|
||||
defaultActiveFirstOption={defaultActiveFirstOption}
|
||||
popupMatchSelectWidth={popupMatchSelectWidth}
|
||||
allowClear={allowClear ? { clearIcon } : false}
|
||||
getPopupContainer={getPopupContainer ?? popupContainer}
|
||||
suffixIcon={<DownOutlined style={{ cursor: 'default' }} />}
|
||||
dropdownRender={customDropdownRender}
|
||||
menuItemSelectedIcon={null}
|
||||
popupClassName={cx('custom-select-dropdown-container', popupClassName)}
|
||||
listHeight={300}
|
||||
placement={placement}
|
||||
optionFilterProp="label"
|
||||
notFoundContent={<div className="empty-message">{noDataMessage}</div>}
|
||||
onKeyDown={handleKeyDown}
|
||||
{...rest}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default CustomSelect;
|
||||
@@ -0,0 +1,263 @@
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
|
||||
import CustomMultiSelect from '../CustomMultiSelect';
|
||||
|
||||
// Mock scrollIntoView which isn't available in JSDOM
|
||||
window.HTMLElement.prototype.scrollIntoView = jest.fn();
|
||||
|
||||
// Mock options data
|
||||
const mockOptions = [
|
||||
{ label: 'Option 1', value: 'option1' },
|
||||
{ label: 'Option 2', value: 'option2' },
|
||||
{ label: 'Option 3', value: 'option3' },
|
||||
];
|
||||
|
||||
const mockGroupedOptions = [
|
||||
{
|
||||
label: 'Group 1',
|
||||
options: [
|
||||
{ label: 'Group 1 - Option 1', value: 'g1-option1' },
|
||||
{ label: 'Group 1 - Option 2', value: 'g1-option2' },
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'Group 2',
|
||||
options: [
|
||||
{ label: 'Group 2 - Option 1', value: 'g2-option1' },
|
||||
{ label: 'Group 2 - Option 2', value: 'g2-option2' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
describe('CustomMultiSelect Component', () => {
|
||||
it('renders with placeholder', () => {
|
||||
const handleChange = jest.fn();
|
||||
render(
|
||||
<CustomMultiSelect
|
||||
placeholder="Select multiple options"
|
||||
options={mockOptions}
|
||||
onChange={handleChange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Check placeholder exists
|
||||
const placeholderElement = screen.getByText('Select multiple options');
|
||||
expect(placeholderElement).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('opens dropdown when clicked', async () => {
|
||||
const handleChange = jest.fn();
|
||||
render(<CustomMultiSelect options={mockOptions} onChange={handleChange} />);
|
||||
|
||||
// Click to open the dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Wait for dropdown to appear
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('ALL')).toBeInTheDocument(); // The ALL option
|
||||
expect(screen.getByText('Option 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Option 2')).toBeInTheDocument();
|
||||
expect(screen.getByText('Option 3')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('selects multiple options', async () => {
|
||||
const handleChange = jest.fn();
|
||||
|
||||
// Start with option1 already selected
|
||||
render(
|
||||
<CustomMultiSelect
|
||||
options={mockOptions}
|
||||
onChange={handleChange}
|
||||
value={['option1']}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Wait for dropdown to appear
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Option 3')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click on Option 3
|
||||
const option3 = screen.getByText('Option 3');
|
||||
fireEvent.click(option3);
|
||||
|
||||
// Verify onChange was called with the right values
|
||||
expect(handleChange).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('selects ALL options when ALL is clicked', async () => {
|
||||
const handleChange = jest.fn();
|
||||
render(
|
||||
<CustomMultiSelect
|
||||
options={mockOptions}
|
||||
onChange={handleChange}
|
||||
enableAllSelection
|
||||
/>,
|
||||
);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Wait for dropdown to appear
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('ALL')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click on ALL option
|
||||
const allOption = screen.getByText('ALL');
|
||||
fireEvent.click(allOption);
|
||||
|
||||
// Verify onChange was called with all option values
|
||||
expect(handleChange).toHaveBeenCalledWith(
|
||||
['option1', 'option2', 'option3'],
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ value: 'option1' }),
|
||||
expect.objectContaining({ value: 'option2' }),
|
||||
expect.objectContaining({ value: 'option3' }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('displays selected options as tags', async () => {
|
||||
render(
|
||||
<CustomMultiSelect options={mockOptions} value={['option1', 'option2']} />,
|
||||
);
|
||||
|
||||
// Check that option values are shown as tags (not labels)
|
||||
expect(screen.getByText('option1')).toBeInTheDocument();
|
||||
expect(screen.getByText('option2')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('removes a tag when clicked', async () => {
|
||||
const handleChange = jest.fn();
|
||||
render(
|
||||
<CustomMultiSelect
|
||||
options={mockOptions}
|
||||
value={['option1', 'option2']}
|
||||
onChange={handleChange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Find close button on Option 1 tag and click it
|
||||
const closeButtons = document.querySelectorAll(
|
||||
'.ant-select-selection-item-remove',
|
||||
);
|
||||
fireEvent.click(closeButtons[0]);
|
||||
|
||||
// Verify onChange was called with remaining option
|
||||
expect(handleChange).toHaveBeenCalledWith(
|
||||
['option2'],
|
||||
expect.arrayContaining([expect.objectContaining({ value: 'option2' })]),
|
||||
);
|
||||
});
|
||||
|
||||
it('filters options when searching', async () => {
|
||||
render(<CustomMultiSelect options={mockOptions} />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Type into search box - get input directly
|
||||
const inputElement = selectElement.querySelector('input');
|
||||
if (inputElement) {
|
||||
fireEvent.change(inputElement, { target: { value: '2' } });
|
||||
}
|
||||
|
||||
// Wait for the dropdown filtering to happen
|
||||
await waitFor(() => {
|
||||
// Check that the dropdown is present
|
||||
const dropdownElement = document.querySelector(
|
||||
'.custom-multiselect-dropdown',
|
||||
);
|
||||
expect(dropdownElement).toBeInTheDocument();
|
||||
|
||||
// Verify Option 2 is visible in the dropdown
|
||||
const options = document.querySelectorAll('.option-label-text');
|
||||
let foundOption2 = false;
|
||||
|
||||
options.forEach((option) => {
|
||||
const text = option.textContent || '';
|
||||
if (text.includes('Option 2')) foundOption2 = true;
|
||||
});
|
||||
|
||||
expect(foundOption2).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('renders grouped options correctly', async () => {
|
||||
render(<CustomMultiSelect options={mockGroupedOptions} />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Check group headers and options
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Group 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 2')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 1 - Option 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 1 - Option 2')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 2 - Option 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 2 - Option 2')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('shows loading state', () => {
|
||||
render(<CustomMultiSelect options={mockOptions} loading />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Check loading text is displayed
|
||||
expect(screen.getByText('We are updating the values...')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows error message', () => {
|
||||
render(
|
||||
<CustomMultiSelect
|
||||
options={mockOptions}
|
||||
errorMessage="Test error message"
|
||||
/>,
|
||||
);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Check error message is displayed
|
||||
expect(screen.getByText('Test error message')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows no data message', () => {
|
||||
render(<CustomMultiSelect options={[]} noDataMessage="No data available" />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Check no data message is displayed
|
||||
expect(screen.getByText('No data available')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows "ALL" tag when all options are selected', () => {
|
||||
render(
|
||||
<CustomMultiSelect
|
||||
options={mockOptions}
|
||||
value={['option1', 'option2', 'option3']}
|
||||
maxTagCount={2}
|
||||
/>,
|
||||
);
|
||||
|
||||
// When all options are selected, component shows ALL tag instead
|
||||
expect(screen.getByText('ALL')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
206
frontend/src/components/NewSelect/__test__/CustomSelect.test.tsx
Normal file
206
frontend/src/components/NewSelect/__test__/CustomSelect.test.tsx
Normal file
@@ -0,0 +1,206 @@
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
|
||||
import CustomSelect from '../CustomSelect';
|
||||
|
||||
// Mock scrollIntoView which isn't available in JSDOM
|
||||
window.HTMLElement.prototype.scrollIntoView = jest.fn();
|
||||
|
||||
// Mock options data
|
||||
const mockOptions = [
|
||||
{ label: 'Option 1', value: 'option1' },
|
||||
{ label: 'Option 2', value: 'option2' },
|
||||
{ label: 'Option 3', value: 'option3' },
|
||||
];
|
||||
|
||||
const mockGroupedOptions = [
|
||||
{
|
||||
label: 'Group 1',
|
||||
options: [
|
||||
{ label: 'Group 1 - Option 1', value: 'g1-option1' },
|
||||
{ label: 'Group 1 - Option 2', value: 'g1-option2' },
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'Group 2',
|
||||
options: [
|
||||
{ label: 'Group 2 - Option 1', value: 'g2-option1' },
|
||||
{ label: 'Group 2 - Option 2', value: 'g2-option2' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
describe('CustomSelect Component', () => {
|
||||
it('renders with placeholder and options', () => {
|
||||
const handleChange = jest.fn();
|
||||
render(
|
||||
<CustomSelect
|
||||
placeholder="Test placeholder"
|
||||
options={mockOptions}
|
||||
onChange={handleChange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Check placeholder exists in the DOM (not using getByPlaceholderText)
|
||||
const placeholderElement = screen.getByText('Test placeholder');
|
||||
expect(placeholderElement).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('opens dropdown when clicked', async () => {
|
||||
const handleChange = jest.fn();
|
||||
render(<CustomSelect options={mockOptions} onChange={handleChange} />);
|
||||
|
||||
// Click to open the dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Wait for dropdown to appear
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Option 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Option 2')).toBeInTheDocument();
|
||||
expect(screen.getByText('Option 3')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls onChange when option is selected', async () => {
|
||||
const handleChange = jest.fn();
|
||||
render(<CustomSelect options={mockOptions} onChange={handleChange} />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Click on an option
|
||||
await waitFor(() => {
|
||||
const option = screen.getByText('Option 2');
|
||||
fireEvent.click(option);
|
||||
});
|
||||
|
||||
// Check onChange was called with correct value
|
||||
expect(handleChange).toHaveBeenCalledWith('option2', expect.anything());
|
||||
});
|
||||
|
||||
it('filters options when searching', async () => {
|
||||
render(<CustomSelect options={mockOptions} />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Type into search box
|
||||
fireEvent.change(selectElement, { target: { value: '2' } });
|
||||
|
||||
// Dropdown should only show Option 2
|
||||
await waitFor(() => {
|
||||
// Check that the dropdown is present
|
||||
const dropdownElement = document.querySelector('.custom-select-dropdown');
|
||||
expect(dropdownElement).toBeInTheDocument();
|
||||
|
||||
// Use a simple approach to verify filtering
|
||||
const allOptionsInDropdown = document.querySelectorAll('.option-item');
|
||||
let foundOption2 = false;
|
||||
|
||||
allOptionsInDropdown.forEach((option) => {
|
||||
if (option.textContent?.includes('Option 2')) {
|
||||
foundOption2 = true;
|
||||
}
|
||||
|
||||
// Should not show Options 1 or 3
|
||||
expect(option.textContent).not.toContain('Option 1');
|
||||
expect(option.textContent).not.toContain('Option 3');
|
||||
});
|
||||
|
||||
expect(foundOption2).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('renders grouped options correctly', async () => {
|
||||
const handleChange = jest.fn();
|
||||
render(<CustomSelect options={mockGroupedOptions} onChange={handleChange} />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Check group headers and options
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Group 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 2')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 1 - Option 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 1 - Option 2')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 2 - Option 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Group 2 - Option 2')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('shows loading state', () => {
|
||||
render(<CustomSelect options={mockOptions} loading />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Check loading text is displayed
|
||||
expect(screen.getByText('We are updating the values...')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows error message', () => {
|
||||
render(
|
||||
<CustomSelect options={mockOptions} errorMessage="Test error message" />,
|
||||
);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Check error message is displayed
|
||||
expect(screen.getByText('Test error message')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows no data message', () => {
|
||||
render(<CustomSelect options={[]} noDataMessage="No data available" />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Check no data message is displayed
|
||||
expect(screen.getByText('No data available')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('supports keyboard navigation', async () => {
|
||||
const handleChange = jest.fn();
|
||||
render(<CustomSelect options={mockOptions} onChange={handleChange} />);
|
||||
|
||||
// Open dropdown using keyboard
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.focus(selectElement);
|
||||
|
||||
// Press down arrow to open dropdown
|
||||
fireEvent.keyDown(selectElement, { key: 'ArrowDown' });
|
||||
|
||||
// Wait for dropdown to appear
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Option 1')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('handles selection via keyboard', async () => {
|
||||
const handleChange = jest.fn();
|
||||
render(<CustomSelect options={mockOptions} onChange={handleChange} />);
|
||||
|
||||
// Open dropdown
|
||||
const selectElement = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selectElement);
|
||||
|
||||
// Wait for dropdown to appear then press Enter
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Option 1')).toBeInTheDocument();
|
||||
|
||||
// Press Enter to select first option
|
||||
fireEvent.keyDown(screen.getByText('Option 1'), { key: 'Enter' });
|
||||
});
|
||||
|
||||
// Check onChange was called
|
||||
expect(handleChange).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
4
frontend/src/components/NewSelect/index.ts
Normal file
4
frontend/src/components/NewSelect/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import CustomMultiSelect from './CustomMultiSelect';
|
||||
import CustomSelect from './CustomSelect';
|
||||
|
||||
export { CustomMultiSelect, CustomSelect };
|
||||
838
frontend/src/components/NewSelect/styles.scss
Normal file
838
frontend/src/components/NewSelect/styles.scss
Normal file
@@ -0,0 +1,838 @@
|
||||
// Main container styles
|
||||
|
||||
// make const of #2c3044
|
||||
$custom-border-color: #2c3044;
|
||||
|
||||
.custom-select {
|
||||
width: 100%;
|
||||
position: relative;
|
||||
|
||||
&.ant-select-focused {
|
||||
.ant-select-selector {
|
||||
border-color: var(--bg-robin-500);
|
||||
box-shadow: 0 0 0 2px rgba(78, 116, 248, 0.2);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-selection-placeholder {
|
||||
color: rgba(192, 193, 195, 0.45);
|
||||
}
|
||||
|
||||
// Base styles are for dark mode
|
||||
.ant-select-selector {
|
||||
background-color: var(--bg-ink-400);
|
||||
border-color: var(--bg-slate-400);
|
||||
}
|
||||
|
||||
.ant-select-clear {
|
||||
background-color: var(--bg-ink-400);
|
||||
color: rgba(192, 193, 195, 0.7);
|
||||
}
|
||||
}
|
||||
|
||||
// Keep chip styles ONLY in the multi-select
|
||||
.custom-multiselect {
|
||||
width: 100%;
|
||||
position: relative;
|
||||
|
||||
.ant-select-selector {
|
||||
max-height: 200px;
|
||||
overflow: auto;
|
||||
scrollbar-width: thin;
|
||||
background-color: var(--bg-ink-400);
|
||||
border-color: var(--bg-slate-400);
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background-color: $custom-border-color;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background-color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
|
||||
&.ant-select-focused {
|
||||
.ant-select-selector {
|
||||
border-color: var(--bg-robin-500);
|
||||
box-shadow: 0 0 0 2px rgba(78, 116, 248, 0.2);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-selection-placeholder {
|
||||
color: rgba(192, 193, 195, 0.45);
|
||||
}
|
||||
|
||||
// Customize tags in multiselect (dark mode by default)
|
||||
.ant-select-selection-item {
|
||||
background-color: var(--bg-slate-400);
|
||||
border-radius: 4px;
|
||||
border: 1px solid $custom-border-color;
|
||||
margin-right: 4px;
|
||||
transition: all 0.2s;
|
||||
color: var(--bg-vanilla-400);
|
||||
|
||||
// Style for active tag (keyboard navigation)
|
||||
&-active {
|
||||
border-color: var(--bg-robin-500) !important;
|
||||
background-color: rgba(78, 116, 248, 0.15) !important;
|
||||
outline: 2px solid rgba(78, 116, 248, 0.2);
|
||||
}
|
||||
|
||||
// Style for selected tags (via keyboard or mouse selection)
|
||||
&-selected {
|
||||
border-color: var(--bg-robin-500) !important;
|
||||
background-color: rgba(78, 116, 248, 0.15) !important;
|
||||
box-shadow: 0 0 0 2px rgba(78, 116, 248, 0.2);
|
||||
}
|
||||
|
||||
.ant-select-selection-item-content {
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
|
||||
.ant-select-selection-item-remove {
|
||||
color: rgba(192, 193, 195, 0.7);
|
||||
&:hover {
|
||||
color: rgba(192, 193, 195, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Class applied when in selection mode
|
||||
&.has-selection {
|
||||
.ant-select-selection-item-selected {
|
||||
cursor: move; // Indicate draggable
|
||||
}
|
||||
|
||||
// Change cursor for selection
|
||||
.ant-select-selector {
|
||||
cursor: text;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Dropdown styles
|
||||
.custom-select-dropdown-container,
|
||||
.custom-multiselect-dropdown-container {
|
||||
z-index: 1050 !important;
|
||||
padding: 0;
|
||||
box-shadow: 0 3px 6px -4px rgba(0, 0, 0, 0.5), 0 6px 16px 0 rgba(0, 0, 0, 0.4),
|
||||
0 9px 28px 8px rgba(0, 0, 0, 0.3);
|
||||
background-color: var(--bg-ink-400);
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
|
||||
.ant-select-item {
|
||||
padding: 8px 12px;
|
||||
color: var(--bg-vanilla-400);
|
||||
|
||||
// Make keyboard navigation visible
|
||||
&-option-active {
|
||||
background-color: var(--bg-slate-400) !important;
|
||||
}
|
||||
|
||||
&-option-selected {
|
||||
background-color: rgba(78, 116, 248, 0.15) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.custom-select-dropdown-container,
|
||||
.custom-multiselect-dropdown-container {
|
||||
width: 100%;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
resize: horizontal;
|
||||
min-width: 300px !important;
|
||||
|
||||
.empty-message {
|
||||
padding: 12px;
|
||||
text-align: center;
|
||||
color: rgba(192, 193, 195, 0.45);
|
||||
}
|
||||
}
|
||||
|
||||
// Custom dropdown styles for single select
|
||||
.custom-select-dropdown {
|
||||
padding: 8px 0 0 0;
|
||||
max-height: 500px;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
scrollbar-width: thin;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
width: 100%;
|
||||
background-color: var(--bg-ink-400);
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background-color: $custom-border-color;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background-color: var(--bg-slate-400);
|
||||
}
|
||||
|
||||
.no-section-options {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.select-group {
|
||||
margin-bottom: 16px;
|
||||
border-radius: 4px;
|
||||
overflow: hidden;
|
||||
|
||||
.group-label {
|
||||
font-weight: 500;
|
||||
padding: 4px 12px;
|
||||
font-size: 13px;
|
||||
color: var(--bg-vanilla-400);
|
||||
background-color: var(--bg-slate-400);
|
||||
border-bottom: 1px solid $custom-border-color;
|
||||
border-top: 1px solid $custom-border-color;
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
.option-item {
|
||||
padding: 8px 12px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
color: var(--bg-vanilla-400);
|
||||
|
||||
&:hover {
|
||||
background-color: var(--bg-slate-400);
|
||||
}
|
||||
|
||||
&.selected {
|
||||
background-color: rgba(78, 116, 248, 0.15);
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
&.active {
|
||||
background-color: rgba(78, 116, 248, 0.15);
|
||||
border-color: var(--bg-robin-500);
|
||||
}
|
||||
|
||||
.option-content {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
|
||||
.option-label-text {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.option-badge {
|
||||
font-size: 12px;
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
background-color: $custom-border-color;
|
||||
color: var(--bg-vanilla-400);
|
||||
margin-left: 8px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.loading-container {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
padding: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
.navigation-footer {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 8px 12px;
|
||||
border-top: 1px solid var(--bg-slate-400);
|
||||
position: sticky;
|
||||
bottom: 0;
|
||||
background-color: var(--bg-ink-400);
|
||||
z-index: 1;
|
||||
|
||||
.navigation-icons {
|
||||
display: flex;
|
||||
margin-right: 8px;
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
|
||||
.navigation-text {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.navigation-error {
|
||||
.navigation-text,
|
||||
.navigation-icons {
|
||||
color: var(--bg-cherry-500) !important;
|
||||
}
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
width: 100%;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.navigation-loading {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
.navigation-text,
|
||||
.navigation-icons {
|
||||
color: var(--bg-robin-600) !important;
|
||||
}
|
||||
}
|
||||
|
||||
.navigate {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding-right: 12px;
|
||||
gap: 6px;
|
||||
|
||||
.icons {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
flex-shrink: 0;
|
||||
border-radius: 2.286px;
|
||||
border-top: 1.143px solid var(--bg-ink-200);
|
||||
border-right: 1.143px solid var(--bg-ink-200);
|
||||
border-bottom: 2.286px solid var(--bg-ink-200);
|
||||
border-left: 1.143px solid var(--bg-ink-200);
|
||||
background: var(--Ink-400, var(--bg-ink-400));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Custom dropdown styles for multi-select
|
||||
.custom-multiselect-dropdown {
|
||||
padding: 8px 0 0 0;
|
||||
max-height: 500px;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
scrollbar-width: thin;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
width: 100%;
|
||||
background-color: var(--bg-ink-400);
|
||||
|
||||
.select-all-option,
|
||||
.custom-value-option {
|
||||
padding: 8px 12px;
|
||||
border-bottom: 1px solid $custom-border-color;
|
||||
margin-bottom: 8px;
|
||||
background-color: var(--bg-slate-400);
|
||||
position: sticky;
|
||||
top: 0;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.selected-values-section {
|
||||
padding: 0 0 8px 0;
|
||||
border-bottom: 1px solid $custom-border-color;
|
||||
margin-bottom: 8px;
|
||||
|
||||
.selected-option {
|
||||
padding: 4px 12px;
|
||||
}
|
||||
}
|
||||
|
||||
.select-group {
|
||||
margin-bottom: 12px;
|
||||
overflow: hidden;
|
||||
|
||||
.group-label {
|
||||
font-weight: 500;
|
||||
padding: 4px 12px;
|
||||
font-size: 13px;
|
||||
color: var(--bg-vanilla-400);
|
||||
background-color: var(--bg-slate-400);
|
||||
border-bottom: 1px solid $custom-border-color;
|
||||
border-top: 1px solid $custom-border-color;
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.option-item {
|
||||
padding: 8px 12px;
|
||||
cursor: pointer;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
color: var(--bg-vanilla-400);
|
||||
|
||||
&.active {
|
||||
background-color: rgba(78, 116, 248, 0.15);
|
||||
border-color: var(--bg-robin-500);
|
||||
}
|
||||
|
||||
&:hover {
|
||||
background-color: var(--bg-slate-400);
|
||||
}
|
||||
|
||||
&.selected {
|
||||
background-color: rgba(78, 116, 248, 0.15);
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
&.all-option {
|
||||
font-weight: 500;
|
||||
border-bottom: 1px solid $custom-border-color;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.option-checkbox {
|
||||
width: 100%;
|
||||
|
||||
> span:not(.ant-checkbox) {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.option-content {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
|
||||
.option-label-text {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.option-badge {
|
||||
font-size: 12px;
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
background-color: $custom-border-color;
|
||||
color: var(--bg-vanilla-400);
|
||||
margin-left: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.only-btn {
|
||||
display: none;
|
||||
}
|
||||
.toggle-btn {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.only-btn:hover {
|
||||
background-color: unset;
|
||||
}
|
||||
.toggle-btn:hover {
|
||||
background-color: unset;
|
||||
}
|
||||
|
||||
.option-content:hover {
|
||||
.only-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 21px;
|
||||
}
|
||||
.toggle-btn {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.option-badge {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.option-checkbox:hover {
|
||||
.toggle-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 21px;
|
||||
}
|
||||
.option-badge {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.loading-container {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
padding: 12px;
|
||||
}
|
||||
|
||||
.empty-message {
|
||||
padding: 12px;
|
||||
text-align: center;
|
||||
color: rgba(192, 193, 195, 0.45);
|
||||
}
|
||||
|
||||
.status-message {
|
||||
padding: 8px 12px;
|
||||
text-align: center;
|
||||
font-style: italic;
|
||||
color: rgba(192, 193, 195, 0.65);
|
||||
border-top: 1px dashed $custom-border-color;
|
||||
}
|
||||
}
|
||||
|
||||
// Custom styles for highlight text
|
||||
.highlight-text {
|
||||
background-color: rgba(78, 116, 248, 0.2);
|
||||
padding: 0 1px;
|
||||
border-radius: 2px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
// Custom option styles for keyboard navigation
|
||||
.custom-option {
|
||||
&.focused,
|
||||
&.ant-select-item-option-active {
|
||||
background-color: var(--bg-slate-400) !important;
|
||||
}
|
||||
}
|
||||
|
||||
// Improve the sticky headers appearance
|
||||
.custom-select-dropdown-container {
|
||||
.group-label,
|
||||
.ant-select-item-group {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
z-index: 2;
|
||||
background-color: var(--bg-slate-400);
|
||||
border-bottom: 1px solid $custom-border-color;
|
||||
padding: 4px 12px;
|
||||
margin: 0;
|
||||
width: 100%; // Ensure the header spans full width
|
||||
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.2); // Add subtle shadow for separation
|
||||
}
|
||||
|
||||
// Ensure proper spacing between sections
|
||||
.select-group {
|
||||
margin-bottom: 8px;
|
||||
position: relative; // Create a positioning context
|
||||
}
|
||||
}
|
||||
|
||||
// Custom scrollbar styling (shared between components)
|
||||
@mixin custom-scrollbar {
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: rgba(192, 193, 195, 0.3) rgba(29, 33, 45, 0.6);
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background-color: rgba(29, 33, 45, 0.6);
|
||||
border-radius: 10px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background-color: rgba(192, 193, 195, 0.3);
|
||||
border-radius: 10px;
|
||||
transition: background-color 0.2s ease;
|
||||
|
||||
&:hover {
|
||||
background-color: rgba(192, 193, 195, 0.5);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Subtle nested scrollbar styling
|
||||
@mixin nested-scrollbar {
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: rgba(192, 193, 195, 0.2) rgba(29, 33, 45, 0.6);
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 4px;
|
||||
height: 4px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background-color: rgba(29, 33, 45, 0.6);
|
||||
border-radius: 10px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background-color: rgba(192, 193, 195, 0.2);
|
||||
border-radius: 10px;
|
||||
|
||||
&:hover {
|
||||
background-color: rgba(192, 193, 195, 0.3);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply to main dropdown containers
|
||||
.custom-select-dropdown,
|
||||
.custom-multiselect-dropdown {
|
||||
@include custom-scrollbar;
|
||||
|
||||
// Main content area
|
||||
.options-container {
|
||||
@include custom-scrollbar;
|
||||
padding-right: 2px; // Add slight padding to prevent content touching scrollbar
|
||||
}
|
||||
|
||||
// Non-sectioned options
|
||||
.no-section-options {
|
||||
@include nested-scrollbar;
|
||||
margin-right: 2px;
|
||||
padding-right: 2px;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply to dropdown container wrappers
|
||||
.custom-select-dropdown-container,
|
||||
.custom-multiselect-dropdown-container {
|
||||
@include custom-scrollbar;
|
||||
|
||||
// Add subtle shadow inside to indicate scrollable area
|
||||
&.has-overflow {
|
||||
box-shadow: inset 0 -10px 10px -10px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
}
|
||||
|
||||
// Light Mode Overrides
|
||||
.lightMode {
|
||||
.custom-select {
|
||||
.ant-select-selector {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
border-color: #e9e9e9;
|
||||
}
|
||||
|
||||
.ant-select-selection-placeholder {
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.ant-select-clear {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
&.ant-select-focused {
|
||||
.ant-select-selector {
|
||||
border-color: #1890ff;
|
||||
box-shadow: 0 0 0 2px rgba(24, 144, 255, 0.2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.custom-multiselect {
|
||||
.ant-select-selector {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
border-color: #e9e9e9;
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-selection-placeholder {
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.ant-select-selection-item {
|
||||
background-color: #f5f5f5;
|
||||
border: 1px solid #e8e8e8;
|
||||
color: rgba(0, 0, 0, 0.85);
|
||||
|
||||
.ant-select-selection-item-content {
|
||||
color: rgba(0, 0, 0, 0.85);
|
||||
}
|
||||
|
||||
.ant-select-selection-item-remove {
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
&:hover {
|
||||
color: rgba(0, 0, 0, 0.85);
|
||||
}
|
||||
}
|
||||
|
||||
&-active {
|
||||
border-color: var(--bg-robin-500) !important;
|
||||
background-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
|
||||
&-selected {
|
||||
border-color: #1890ff !important;
|
||||
background-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.custom-select-dropdown-container,
|
||||
.custom-multiselect-dropdown-container {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
border: 1px solid #f0f0f0;
|
||||
box-shadow: 0 3px 6px -4px rgba(0, 0, 0, 0.12),
|
||||
0 6px 16px 0 rgba(0, 0, 0, 0.08), 0 9px 28px 8px rgba(0, 0, 0, 0.05);
|
||||
|
||||
.empty-message {
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.ant-select-item {
|
||||
color: rgba(0, 0, 0, 0.85);
|
||||
|
||||
&-option-active {
|
||||
background-color: #f5f5f5 !important;
|
||||
}
|
||||
|
||||
&-option-selected {
|
||||
background-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.custom-select-dropdown,
|
||||
.custom-multiselect-dropdown {
|
||||
border: 1px solid #f0f0f0;
|
||||
background-color: var(--bg-vanilla-100);
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
|
||||
.select-group {
|
||||
.group-label {
|
||||
color: rgba(0, 0, 0, 0.85);
|
||||
background-color: #fafafa;
|
||||
border-bottom: 1px solid #f0f0f0;
|
||||
border-top: 1px solid #f0f0f0;
|
||||
}
|
||||
}
|
||||
|
||||
.option-item {
|
||||
color: rgba(0, 0, 0, 0.85);
|
||||
|
||||
&:hover {
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
&.selected {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
&.active {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
border-color: #91d5ff;
|
||||
}
|
||||
|
||||
.option-content {
|
||||
.option-badge {
|
||||
background-color: #f0f0f0;
|
||||
color: #666;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.navigation-footer {
|
||||
border-top: 1px solid #f0f0f0;
|
||||
background-color: var(--bg-vanilla-100);
|
||||
|
||||
.navigation-icons {
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.navigation-text {
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.navigate {
|
||||
.icons {
|
||||
border-top: 1.143px solid var(--bg-ink-200);
|
||||
border-right: 1.143px solid var(--bg-ink-200);
|
||||
border-bottom: 2.286px solid var(--bg-ink-200);
|
||||
border-left: 1.143px solid var(--bg-ink-200);
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.custom-multiselect-dropdown {
|
||||
.select-all-option,
|
||||
.custom-value-option {
|
||||
border-bottom: 1px solid #f0f0f0;
|
||||
background-color: #fafafa;
|
||||
}
|
||||
|
||||
.selected-values-section {
|
||||
border-bottom: 1px solid #f0f0f0;
|
||||
}
|
||||
|
||||
.status-message {
|
||||
color: rgba(0, 0, 0, 0.65);
|
||||
border-top: 1px dashed #f0f0f0;
|
||||
}
|
||||
|
||||
.option-item {
|
||||
&.all-option {
|
||||
border-bottom: 1px solid #f0f0f0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.highlight-text {
|
||||
background-color: rgba(24, 144, 255, 0.2);
|
||||
}
|
||||
|
||||
.custom-option {
|
||||
&.focused,
|
||||
&.ant-select-item-option-active {
|
||||
background-color: #f5f5f5 !important;
|
||||
}
|
||||
}
|
||||
|
||||
.custom-select-dropdown-container {
|
||||
.group-label,
|
||||
.ant-select-item-group {
|
||||
background-color: #f5f0f0;
|
||||
border-bottom: 1px solid #e8e8e8;
|
||||
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
}
|
||||
|
||||
// Light mode scrollbar overrides
|
||||
.custom-select-dropdown,
|
||||
.custom-multiselect-dropdown,
|
||||
.custom-select-dropdown-container,
|
||||
.custom-multiselect-dropdown-container {
|
||||
scrollbar-color: rgba(0, 0, 0, 0.2) rgba(0, 0, 0, 0.05);
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background-color: rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background-color: rgba(0, 0, 0, 0.2);
|
||||
|
||||
&:hover {
|
||||
background-color: rgba(0, 0, 0, 0.3);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
60
frontend/src/components/NewSelect/types.ts
Normal file
60
frontend/src/components/NewSelect/types.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { SelectProps } from 'antd';
|
||||
|
||||
export interface OptionData {
|
||||
label: string;
|
||||
value?: string;
|
||||
disabled?: boolean;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
options?: OptionData[];
|
||||
type?: 'defined' | 'custom' | 'regex';
|
||||
}
|
||||
|
||||
export interface CustomSelectProps extends Omit<SelectProps, 'options'> {
|
||||
placeholder?: string;
|
||||
className?: string;
|
||||
loading?: boolean;
|
||||
onSearch?: (value: string) => void;
|
||||
options?: OptionData[];
|
||||
defaultActiveFirstOption?: boolean;
|
||||
noDataMessage?: string;
|
||||
onClear?: () => void;
|
||||
getPopupContainer?: (triggerNode: HTMLElement) => HTMLElement;
|
||||
dropdownRender?: (menu: React.ReactElement) => React.ReactElement;
|
||||
highlightSearch?: boolean;
|
||||
placement?: 'topLeft' | 'topRight' | 'bottomLeft' | 'bottomRight';
|
||||
popupMatchSelectWidth?: boolean;
|
||||
errorMessage?: string;
|
||||
allowClear?: SelectProps['allowClear'];
|
||||
onRetry?: () => void;
|
||||
}
|
||||
|
||||
export interface CustomTagProps {
|
||||
label: React.ReactNode;
|
||||
value: string;
|
||||
closable: boolean;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
export interface CustomMultiSelectProps
|
||||
extends Omit<SelectProps<string[] | string>, 'options'> {
|
||||
placeholder?: string;
|
||||
className?: string;
|
||||
loading?: boolean;
|
||||
onSearch?: (value: string) => void;
|
||||
options?: OptionData[];
|
||||
defaultActiveFirstOption?: boolean;
|
||||
dropdownMatchSelectWidth?: boolean | number;
|
||||
noDataMessage?: string;
|
||||
onClear?: () => void;
|
||||
enableAllSelection?: boolean;
|
||||
getPopupContainer?: (triggerNode: HTMLElement) => HTMLElement;
|
||||
dropdownRender?: (menu: React.ReactElement) => React.ReactElement;
|
||||
highlightSearch?: boolean;
|
||||
errorMessage?: string;
|
||||
popupClassName?: string;
|
||||
placement?: 'topLeft' | 'topRight' | 'bottomLeft' | 'bottomRight';
|
||||
maxTagCount?: number;
|
||||
allowClear?: SelectProps['allowClear'];
|
||||
onRetry?: () => void;
|
||||
}
|
||||
135
frontend/src/components/NewSelect/utils.ts
Normal file
135
frontend/src/components/NewSelect/utils.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { OptionData } from './types';
|
||||
|
||||
export const SPACEKEY = ' ';
|
||||
|
||||
export const prioritizeOrAddOptionForSingleSelect = (
|
||||
options: OptionData[],
|
||||
value: string,
|
||||
label?: string,
|
||||
): OptionData[] => {
|
||||
let foundOption: OptionData | null = null;
|
||||
|
||||
// Separate the found option and the rest
|
||||
const filteredOptions = options
|
||||
.map((option) => {
|
||||
if ('options' in option && Array.isArray(option.options)) {
|
||||
// Filter out the value from nested options
|
||||
const remainingSubOptions = option.options.filter(
|
||||
(subOption) => subOption.value !== value,
|
||||
);
|
||||
const extractedOption = option.options.find(
|
||||
(subOption) => subOption.value === value,
|
||||
);
|
||||
|
||||
if (extractedOption) foundOption = extractedOption;
|
||||
|
||||
// Keep the group if it still has remaining options
|
||||
return remainingSubOptions.length > 0
|
||||
? { ...option, options: remainingSubOptions }
|
||||
: null;
|
||||
}
|
||||
|
||||
// Check top-level options
|
||||
if (option.value === value) {
|
||||
foundOption = option;
|
||||
return null; // Remove it from the list
|
||||
}
|
||||
|
||||
return option;
|
||||
})
|
||||
.filter(Boolean) as OptionData[]; // Remove null values
|
||||
|
||||
// If not found, create a new option
|
||||
if (!foundOption) {
|
||||
foundOption = { value, label: label ?? value };
|
||||
}
|
||||
|
||||
// Add the found/new option at the top
|
||||
return [foundOption, ...filteredOptions];
|
||||
};
|
||||
|
||||
export const prioritizeOrAddOptionForMultiSelect = (
|
||||
options: OptionData[],
|
||||
values: string[], // Only supports multiple values (string[])
|
||||
labels?: Record<string, string>,
|
||||
): OptionData[] => {
|
||||
const foundOptions: OptionData[] = [];
|
||||
|
||||
// Separate the found options and the rest
|
||||
const filteredOptions = options
|
||||
.map((option) => {
|
||||
if ('options' in option && Array.isArray(option.options)) {
|
||||
// Filter out selected values from nested options
|
||||
const remainingSubOptions = option.options.filter(
|
||||
(subOption) => subOption.value && !values.includes(subOption.value),
|
||||
);
|
||||
const extractedOptions = option.options.filter(
|
||||
(subOption) => subOption.value && values.includes(subOption.value),
|
||||
);
|
||||
|
||||
if (extractedOptions.length > 0) {
|
||||
foundOptions.push(...extractedOptions);
|
||||
}
|
||||
|
||||
// Keep the group if it still has remaining options
|
||||
return remainingSubOptions.length > 0
|
||||
? { ...option, options: remainingSubOptions }
|
||||
: null;
|
||||
}
|
||||
|
||||
// Check top-level options
|
||||
if (option.value && values.includes(option.value)) {
|
||||
foundOptions.push(option);
|
||||
return null; // Remove it from the list
|
||||
}
|
||||
|
||||
return option;
|
||||
})
|
||||
.filter(Boolean) as OptionData[]; // Remove null values
|
||||
|
||||
// Find missing values that were not present in the original options and create new ones
|
||||
const missingValues = values.filter(
|
||||
(value) => !foundOptions.some((opt) => opt.value === value),
|
||||
);
|
||||
|
||||
const newOptions = missingValues.map((value) => ({
|
||||
value,
|
||||
label: labels?.[value] ?? value, // Use provided label or default to value
|
||||
}));
|
||||
|
||||
// Add found & new options to the top
|
||||
return [...newOptions, ...foundOptions, ...filteredOptions];
|
||||
};
|
||||
|
||||
/**
|
||||
* Filters options based on search text
|
||||
*/
|
||||
export const filterOptionsBySearch = (
|
||||
options: OptionData[],
|
||||
searchText: string,
|
||||
): OptionData[] => {
|
||||
if (!searchText.trim()) return options;
|
||||
|
||||
const lowerSearchText = searchText.toLowerCase();
|
||||
|
||||
return options
|
||||
.map((option) => {
|
||||
if ('options' in option && Array.isArray(option.options)) {
|
||||
// Filter nested options
|
||||
const filteredSubOptions = option.options.filter((subOption) =>
|
||||
subOption.label.toLowerCase().includes(lowerSearchText),
|
||||
);
|
||||
|
||||
return filteredSubOptions.length > 0
|
||||
? { ...option, options: filteredSubOptions }
|
||||
: undefined;
|
||||
}
|
||||
|
||||
// Filter top-level options
|
||||
return option.label.toLowerCase().includes(lowerSearchText)
|
||||
? option
|
||||
: undefined;
|
||||
})
|
||||
.filter(Boolean) as OptionData[];
|
||||
};
|
||||
@@ -17,7 +17,6 @@ const ROUTES = {
|
||||
'/get-started/infrastructure-monitoring',
|
||||
GET_STARTED_AWS_MONITORING: '/get-started/aws-monitoring',
|
||||
GET_STARTED_AZURE_MONITORING: '/get-started/azure-monitoring',
|
||||
USAGE_EXPLORER: '/usage-explorer',
|
||||
APPLICATION: '/services',
|
||||
ALL_DASHBOARD: '/dashboard',
|
||||
DASHBOARD: '/dashboard/:dashboardId',
|
||||
|
||||
@@ -133,231 +133,3 @@ const ServicesListTable = memo(
|
||||
),
|
||||
);
|
||||
ServicesListTable.displayName = 'ServicesListTable';
|
||||
|
||||
function ServiceMetrics({
|
||||
onUpdateChecklistDoneItem,
|
||||
loadingUserPreferences,
|
||||
}: {
|
||||
onUpdateChecklistDoneItem: (itemKey: string) => void;
|
||||
loadingUserPreferences: boolean;
|
||||
}): JSX.Element {
|
||||
const { selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const { user, activeLicenseV3 } = useAppContext();
|
||||
|
||||
const [timeRange, setTimeRange] = useState(() => {
|
||||
const now = new Date().getTime();
|
||||
return {
|
||||
startTime: now - homeInterval,
|
||||
endTime: now,
|
||||
selectedInterval: homeInterval,
|
||||
};
|
||||
});
|
||||
|
||||
const { queries } = useResourceAttribute();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
|
||||
const selectedTags = useMemo(
|
||||
() => (convertRawQueriesToTraceSelectedTags(queries) as Tags[]) || [],
|
||||
[queries],
|
||||
);
|
||||
|
||||
const [isError, setIsError] = useState(false);
|
||||
|
||||
const queryKey: QueryKey = useMemo(
|
||||
() => [
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
selectedTags,
|
||||
globalSelectedInterval,
|
||||
],
|
||||
[
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
selectedTags,
|
||||
globalSelectedInterval,
|
||||
],
|
||||
);
|
||||
|
||||
const {
|
||||
data,
|
||||
isLoading: isLoadingTopLevelOperations,
|
||||
isError: isErrorTopLevelOperations,
|
||||
} = useGetTopLevelOperations(queryKey, {
|
||||
start: timeRange.startTime * 1e6,
|
||||
end: timeRange.endTime * 1e6,
|
||||
});
|
||||
|
||||
const handleTimeIntervalChange = useCallback((value: number): void => {
|
||||
const timeInterval = TIME_PICKER_OPTIONS.find(
|
||||
(option) => option.value === value,
|
||||
);
|
||||
|
||||
logEvent('Homepage: Services time interval updated', {
|
||||
updatedTimeInterval: timeInterval?.label,
|
||||
});
|
||||
|
||||
const now = new Date();
|
||||
setTimeRange({
|
||||
startTime: now.getTime() - value,
|
||||
endTime: now.getTime(),
|
||||
selectedInterval: value,
|
||||
});
|
||||
}, []);
|
||||
|
||||
const topLevelOperations = useMemo(() => Object.entries(data || {}), [data]);
|
||||
|
||||
const queryRangeRequestData = useMemo(
|
||||
() =>
|
||||
getQueryRangeRequestData({
|
||||
topLevelOperations,
|
||||
minTime: timeRange.startTime * 1e6,
|
||||
maxTime: timeRange.endTime * 1e6,
|
||||
globalSelectedInterval,
|
||||
}),
|
||||
[
|
||||
globalSelectedInterval,
|
||||
timeRange.endTime,
|
||||
timeRange.startTime,
|
||||
topLevelOperations,
|
||||
],
|
||||
);
|
||||
|
||||
const dataQueries = useGetQueriesRange(
|
||||
queryRangeRequestData,
|
||||
ENTITY_VERSION_V4,
|
||||
{
|
||||
queryKey: useMemo(
|
||||
() => [
|
||||
`GetMetricsQueryRange-home-${globalSelectedInterval}`,
|
||||
timeRange.endTime,
|
||||
timeRange.startTime,
|
||||
globalSelectedInterval,
|
||||
],
|
||||
[globalSelectedInterval, timeRange.endTime, timeRange.startTime],
|
||||
),
|
||||
keepPreviousData: true,
|
||||
enabled: true,
|
||||
refetchOnMount: false,
|
||||
onError: () => {
|
||||
setIsError(true);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const isLoading = useMemo(() => dataQueries.some((query) => query.isLoading), [
|
||||
dataQueries,
|
||||
]);
|
||||
|
||||
const services: ServicesList[] = useMemo(
|
||||
() =>
|
||||
getServiceListFromQuery({
|
||||
queries: dataQueries,
|
||||
topLevelOperations,
|
||||
isLoading,
|
||||
}),
|
||||
[dataQueries, topLevelOperations, isLoading],
|
||||
);
|
||||
|
||||
const sortedServices = useMemo(
|
||||
() =>
|
||||
services?.sort((a, b) => {
|
||||
const aUpdateAt = new Date(a.p99).getTime();
|
||||
const bUpdateAt = new Date(b.p99).getTime();
|
||||
return bUpdateAt - aUpdateAt;
|
||||
}) || [],
|
||||
[services],
|
||||
);
|
||||
|
||||
const servicesExist = sortedServices.length > 0;
|
||||
const top5Services = useMemo(() => sortedServices.slice(0, 5), [
|
||||
sortedServices,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!loadingUserPreferences && servicesExist) {
|
||||
onUpdateChecklistDoneItem('SETUP_SERVICES');
|
||||
}
|
||||
}, [onUpdateChecklistDoneItem, loadingUserPreferences, servicesExist]);
|
||||
|
||||
const handleRowClick = useCallback(
|
||||
(record: ServicesList) => {
|
||||
logEvent('Homepage: Service clicked', {
|
||||
serviceName: record.serviceName,
|
||||
});
|
||||
safeNavigate(`${ROUTES.APPLICATION}/${record.serviceName}`);
|
||||
},
|
||||
[safeNavigate],
|
||||
);
|
||||
|
||||
if (isLoadingTopLevelOperations || isLoading) {
|
||||
return (
|
||||
<Card className="services-list-card home-data-card loading-card">
|
||||
<Card.Content>
|
||||
<Skeleton active />
|
||||
</Card.Content>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
if (isErrorTopLevelOperations || isError) {
|
||||
return (
|
||||
<Card className="services-list-card home-data-card error-card">
|
||||
<Card.Content>
|
||||
<Skeleton active />
|
||||
</Card.Content>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Card className="services-list-card home-data-card">
|
||||
{servicesExist && (
|
||||
<Card.Header>
|
||||
<div className="services-header home-data-card-header">
|
||||
{' '}
|
||||
Services
|
||||
<div className="services-header-actions">
|
||||
<Select
|
||||
value={timeRange.selectedInterval}
|
||||
onChange={handleTimeIntervalChange}
|
||||
options={TIME_PICKER_OPTIONS}
|
||||
className="services-header-select"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</Card.Header>
|
||||
)}
|
||||
<Card.Content>
|
||||
{servicesExist ? (
|
||||
<ServicesListTable services={top5Services} onRowClick={handleRowClick} />
|
||||
) : (
|
||||
<EmptyState user={user} activeLicenseV3={activeLicenseV3} />
|
||||
)}
|
||||
</Card.Content>
|
||||
|
||||
{servicesExist && (
|
||||
<Card.Footer>
|
||||
<div className="services-footer home-data-card-footer">
|
||||
<Link to="/services">
|
||||
<Button
|
||||
type="link"
|
||||
className="periscope-btn link learn-more-link"
|
||||
onClick={(): void => {
|
||||
logEvent('Homepage: All Services clicked', {});
|
||||
}}
|
||||
>
|
||||
All Services <ArrowRight size={12} />
|
||||
</Button>
|
||||
</Link>
|
||||
</div>
|
||||
</Card.Footer>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(ServiceMetrics);
|
||||
|
||||
@@ -21,17 +21,10 @@ function Services({
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className="home-services-container">
|
||||
{isSpanMetricEnabled ? (
|
||||
<ServiceMetrics
|
||||
<ServiceTraces
|
||||
onUpdateChecklistDoneItem={onUpdateChecklistDoneItem}
|
||||
loadingUserPreferences={loadingUserPreferences}
|
||||
/>
|
||||
) : (
|
||||
<ServiceTraces
|
||||
onUpdateChecklistDoneItem={onUpdateChecklistDoneItem}
|
||||
loadingUserPreferences={loadingUserPreferences}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
|
||||
@@ -481,7 +481,6 @@ export const apDexMetricsQueryBuilderQueries = ({
|
||||
export const operationPerSec = ({
|
||||
servicename,
|
||||
tagFilterItems,
|
||||
topLevelOperations,
|
||||
}: OperationPerSecProps): QueryBuilderData => {
|
||||
const autocompleteData: BaseAutocompleteData[] = [
|
||||
{
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import getTopLevelOperations, {
|
||||
ServiceDataProps,
|
||||
} from 'api/metrics/getTopLevelOperations';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
@@ -110,21 +107,6 @@ function Application(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const {
|
||||
data: topLevelOperations,
|
||||
error: topLevelOperationsError,
|
||||
isLoading: topLevelOperationsIsLoading,
|
||||
isError: topLevelOperationsIsError,
|
||||
} = useQuery<ServiceDataProps>({
|
||||
queryKey: [servicename, minTime, maxTime],
|
||||
queryFn: (): Promise<ServiceDataProps> =>
|
||||
getTopLevelOperations({
|
||||
service: servicename || '',
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
}),
|
||||
});
|
||||
|
||||
const selectedTraceTags: string = JSON.stringify(
|
||||
convertRawQueriesToTraceSelectedTags(queries) || [],
|
||||
);
|
||||
@@ -137,14 +119,6 @@ function Application(): JSX.Element {
|
||||
[queries],
|
||||
);
|
||||
|
||||
const topLevelOperationsRoute = useMemo(
|
||||
() =>
|
||||
topLevelOperations
|
||||
? defaultTo(topLevelOperations[servicename || ''], [])
|
||||
: [],
|
||||
[servicename, topLevelOperations],
|
||||
);
|
||||
|
||||
const operationPerSecWidget = useMemo(
|
||||
() =>
|
||||
getWidgetQueryBuilder({
|
||||
|
||||
@@ -3,7 +3,7 @@ import 'dayjs/locale/en';
|
||||
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Flex, Form, Input, Typography } from 'antd';
|
||||
import { Button, Flex, Form, Input, Tooltip, Typography } from 'antd';
|
||||
import getAll from 'api/alerts/getAll';
|
||||
import { useDeleteDowntimeSchedule } from 'api/plannedDowntime/deleteDowntimeSchedule';
|
||||
import {
|
||||
@@ -13,8 +13,10 @@ import {
|
||||
import dayjs from 'dayjs';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Search } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import React, { ChangeEvent, useEffect, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
|
||||
import { PlannedDowntimeDeleteModal } from './PlannedDowntimeDeleteModal';
|
||||
import { PlannedDowntimeForm } from './PlannedDowntimeForm';
|
||||
@@ -33,6 +35,7 @@ export function PlannedDowntime(): JSX.Element {
|
||||
});
|
||||
const [isOpen, setIsOpen] = React.useState(false);
|
||||
const [form] = Form.useForm();
|
||||
const { user } = useAppContext();
|
||||
|
||||
const [initialValues, setInitialValues] = useState<
|
||||
Partial<DowntimeSchedules & { editMode: boolean }>
|
||||
@@ -108,18 +111,27 @@ export function PlannedDowntime(): JSX.Element {
|
||||
value={searchValue}
|
||||
onChange={handleSearch}
|
||||
/>
|
||||
<Button
|
||||
icon={<PlusOutlined />}
|
||||
type="primary"
|
||||
onClick={(): void => {
|
||||
setInitialValues({ ...defautlInitialValues, editMode: false });
|
||||
setIsOpen(true);
|
||||
setEditMode(false);
|
||||
form.resetFields();
|
||||
}}
|
||||
<Tooltip
|
||||
title={
|
||||
user?.role === USER_ROLES.VIEWER
|
||||
? 'You need edit permissions to create a planned downtime'
|
||||
: ''
|
||||
}
|
||||
>
|
||||
New downtime
|
||||
</Button>
|
||||
<Button
|
||||
icon={<PlusOutlined />}
|
||||
type="primary"
|
||||
onClick={(): void => {
|
||||
setInitialValues({ ...defautlInitialValues, editMode: false });
|
||||
setIsOpen(true);
|
||||
setEditMode(false);
|
||||
form.resetFields();
|
||||
}}
|
||||
disabled={user?.role === USER_ROLES.VIEWER}
|
||||
>
|
||||
New downtime
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</Flex>
|
||||
<br />
|
||||
<PlannedDowntimeList
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
import { screen } from '@testing-library/react';
|
||||
import { render } from 'tests/test-utils';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
|
||||
import { PlannedDowntime } from '../PlannedDowntime';
|
||||
|
||||
describe('PlannedDowntime Component', () => {
|
||||
it('renders the PlannedDowntime component properly', () => {
|
||||
render(<PlannedDowntime />, {}, 'ADMIN');
|
||||
|
||||
// Check if title is rendered
|
||||
expect(screen.getByText('Planned Downtime')).toBeInTheDocument();
|
||||
|
||||
// Check if subtitle is rendered
|
||||
expect(
|
||||
screen.getByText('Create and manage planned downtimes.'),
|
||||
).toBeInTheDocument();
|
||||
|
||||
// Check if search input is rendered
|
||||
expect(
|
||||
screen.getByPlaceholderText('Search for a planned downtime...'),
|
||||
).toBeInTheDocument();
|
||||
|
||||
// Check if "New downtime" button is enabled for ADMIN
|
||||
const newDowntimeButton = screen.getByRole('button', {
|
||||
name: /new downtime/i,
|
||||
});
|
||||
expect(newDowntimeButton).toBeInTheDocument();
|
||||
expect(newDowntimeButton).not.toBeDisabled();
|
||||
});
|
||||
|
||||
it('disables the "New downtime" button for users with VIEWER role', () => {
|
||||
render(<PlannedDowntime />, {}, USER_ROLES.VIEWER);
|
||||
|
||||
// Check if "New downtime" button is disabled for VIEWER
|
||||
const newDowntimeButton = screen.getByRole('button', {
|
||||
name: /new downtime/i,
|
||||
});
|
||||
expect(newDowntimeButton).toBeInTheDocument();
|
||||
expect(newDowntimeButton).toBeDisabled();
|
||||
|
||||
expect(newDowntimeButton).toHaveAttribute('disabled');
|
||||
});
|
||||
});
|
||||
@@ -1,224 +0,0 @@
|
||||
/* eslint-disable */
|
||||
//@ts-nocheck
|
||||
|
||||
import { Select, Space, Typography } from 'antd';
|
||||
import Graph from 'components/Graph';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { connect, useSelector } from 'react-redux';
|
||||
import { withRouter } from 'react-router-dom';
|
||||
import { GetService, getUsageData, UsageDataItem } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalTime } from 'types/actions/globalTime';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import MetricReducer from 'types/reducer/metrics';
|
||||
import { isOnboardingSkipped } from 'utils/app';
|
||||
|
||||
import { Card } from './styles';
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
interface UsageExplorerProps {
|
||||
usageData: UsageDataItem[];
|
||||
getUsageData: (
|
||||
minTime: number,
|
||||
maxTime: number,
|
||||
selectedInterval: number,
|
||||
selectedService: string,
|
||||
) => void;
|
||||
getServicesList: ({
|
||||
selectedTimeInterval,
|
||||
}: {
|
||||
selectedTimeInterval: GlobalReducer['selectedTime'];
|
||||
}) => void;
|
||||
globalTime: GlobalTime;
|
||||
servicesList: servicesListItem[];
|
||||
totalCount: number;
|
||||
}
|
||||
const timeDaysOptions = [
|
||||
{ value: 30, label: 'Last 30 Days' },
|
||||
{ value: 7, label: 'Last week' },
|
||||
{ value: 1, label: 'Last day' },
|
||||
];
|
||||
|
||||
const interval = [
|
||||
{
|
||||
value: 604800,
|
||||
chartDivideMultiplier: 1,
|
||||
label: 'Weekly',
|
||||
applicableOn: [timeDaysOptions[0]],
|
||||
},
|
||||
{
|
||||
value: 86400,
|
||||
chartDivideMultiplier: 30,
|
||||
label: 'Daily',
|
||||
applicableOn: [timeDaysOptions[0], timeDaysOptions[1]],
|
||||
},
|
||||
{
|
||||
value: 3600,
|
||||
chartDivideMultiplier: 10,
|
||||
label: 'Hours',
|
||||
applicableOn: [timeDaysOptions[2], timeDaysOptions[1]],
|
||||
},
|
||||
];
|
||||
|
||||
function _UsageExplorer(props: UsageExplorerProps): JSX.Element {
|
||||
const [selectedTime, setSelectedTime] = useState(timeDaysOptions[1]);
|
||||
const [selectedInterval, setSelectedInterval] = useState(interval[2]);
|
||||
const [selectedService, setSelectedService] = useState<string>('');
|
||||
const { selectedTime: globalSelectedTime } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
const {
|
||||
getServicesList,
|
||||
getUsageData,
|
||||
globalTime,
|
||||
totalCount,
|
||||
usageData,
|
||||
} = props;
|
||||
const { services } = useSelector<AppState, MetricReducer>(
|
||||
(state) => state.metrics,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (selectedTime && selectedInterval) {
|
||||
const maxTime = new Date().getTime() * 1000000;
|
||||
const minTime = maxTime - selectedTime.value * 24 * 3600000 * 1000000;
|
||||
|
||||
getUsageData(minTime, maxTime, selectedInterval.value, selectedService);
|
||||
}
|
||||
}, [selectedTime, selectedInterval, selectedService, getUsageData]);
|
||||
|
||||
useEffect(() => {
|
||||
getServicesList({
|
||||
selectedTimeInterval: globalSelectedTime,
|
||||
});
|
||||
}, [globalTime, getServicesList, globalSelectedTime]);
|
||||
|
||||
const data = {
|
||||
labels: usageData.map((s) => new Date(s.timestamp / 1000000)),
|
||||
datasets: [
|
||||
{
|
||||
label: 'Span Count',
|
||||
data: usageData.map((s) => s.count),
|
||||
backgroundColor: 'rgba(255, 99, 132, 0.2)',
|
||||
borderColor: 'rgba(255, 99, 132, 1)',
|
||||
borderWidth: 2,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<Space style={{ marginTop: 40, marginLeft: 20 }}>
|
||||
<Space>
|
||||
<Select
|
||||
onSelect={(value): void => {
|
||||
setSelectedTime(
|
||||
timeDaysOptions.filter((item) => item.value == parseInt(value))[0],
|
||||
);
|
||||
}}
|
||||
value={selectedTime.label}
|
||||
>
|
||||
{timeDaysOptions.map(({ value, label }) => (
|
||||
<Option key={value} value={value}>
|
||||
{label}
|
||||
</Option>
|
||||
))}
|
||||
</Select>
|
||||
</Space>
|
||||
<Space>
|
||||
<Select
|
||||
onSelect={(value): void => {
|
||||
setSelectedInterval(
|
||||
interval.filter((item) => item.value === parseInt(value))[0],
|
||||
);
|
||||
}}
|
||||
value={selectedInterval.label}
|
||||
>
|
||||
{interval
|
||||
.filter((interval) => interval.applicableOn.includes(selectedTime))
|
||||
.map((item) => (
|
||||
<Option key={item.label} value={item.value}>
|
||||
{item.label}
|
||||
</Option>
|
||||
))}
|
||||
</Select>
|
||||
</Space>
|
||||
|
||||
<Space>
|
||||
<Select
|
||||
onSelect={(value): void => {
|
||||
setSelectedService(value);
|
||||
}}
|
||||
value={selectedService || 'All Services'}
|
||||
>
|
||||
<Option value="">All Services</Option>
|
||||
{services?.map((service) => (
|
||||
<Option key={service.serviceName} value={service.serviceName}>
|
||||
{service.serviceName}
|
||||
</Option>
|
||||
))}
|
||||
</Select>
|
||||
</Space>
|
||||
|
||||
{isOnboardingSkipped() && totalCount === 0 ? (
|
||||
<Space
|
||||
style={{
|
||||
width: '100%',
|
||||
margin: '40px 0',
|
||||
marginLeft: 20,
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
>
|
||||
<Typography>
|
||||
No spans found. Please add instrumentation (follow this
|
||||
<a
|
||||
href="https://signoz.io/docs/instrumentation/overview"
|
||||
target="_blank"
|
||||
style={{ marginLeft: 3 }}
|
||||
rel="noreferrer"
|
||||
>
|
||||
guide
|
||||
</a>
|
||||
)
|
||||
</Typography>
|
||||
</Space>
|
||||
) : (
|
||||
<Space style={{ display: 'block', marginLeft: 20, width: 200 }}>
|
||||
<Typography>{`Total count is ${totalCount}`}</Typography>
|
||||
</Space>
|
||||
)}
|
||||
</Space>
|
||||
|
||||
<Card>
|
||||
<Graph name="usage" data={data} type="bar" />
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
const mapStateToProps = (
|
||||
state: AppState,
|
||||
): {
|
||||
totalCount: number;
|
||||
globalTime: GlobalTime;
|
||||
usageData: UsageDataItem[];
|
||||
} => {
|
||||
let totalCount = 0;
|
||||
for (const item of state.usageDate) {
|
||||
totalCount += item.count;
|
||||
}
|
||||
return {
|
||||
totalCount,
|
||||
usageData: state.usageDate,
|
||||
globalTime: state.globalTime,
|
||||
};
|
||||
};
|
||||
|
||||
export const UsageExplorer = withRouter(
|
||||
connect(mapStateToProps, {
|
||||
getUsageData,
|
||||
getServicesList: GetService,
|
||||
})(_UsageExplorer),
|
||||
);
|
||||
@@ -1,7 +0,0 @@
|
||||
import { UsageExplorer } from './UsageExplorer';
|
||||
|
||||
function UsageExplorerContainer(): JSX.Element {
|
||||
return <UsageExplorer />;
|
||||
}
|
||||
|
||||
export default UsageExplorerContainer;
|
||||
@@ -1,13 +0,0 @@
|
||||
import { Card as CardComponent } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const Card = styled(CardComponent)`
|
||||
&&& {
|
||||
width: 90%;
|
||||
margin-top: 2rem;
|
||||
}
|
||||
|
||||
.ant-card-body {
|
||||
height: 70vh;
|
||||
}
|
||||
`;
|
||||
@@ -2,4 +2,3 @@ export * from './global';
|
||||
export * from './metrics';
|
||||
export * from './serviceMap';
|
||||
export * from './types';
|
||||
export * from './usage';
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import GetLogs from 'api/logs/GetLogs';
|
||||
import { Dispatch } from 'redux';
|
||||
import AppActions from 'types/actions';
|
||||
import { SET_LOADING, SET_LOGS } from 'types/actions/logs';
|
||||
import { Props } from 'types/api/logs/getLogs';
|
||||
|
||||
export const getLogs = (
|
||||
props: Props,
|
||||
): ((dispatch: Dispatch<AppActions>) => void) => async (
|
||||
dispatch,
|
||||
): Promise<void> => {
|
||||
dispatch({
|
||||
type: SET_LOADING,
|
||||
payload: true,
|
||||
});
|
||||
|
||||
const response = await GetLogs(props);
|
||||
|
||||
if (response.payload)
|
||||
dispatch({
|
||||
type: SET_LOGS,
|
||||
payload: response.payload,
|
||||
});
|
||||
else
|
||||
dispatch({
|
||||
type: SET_LOGS,
|
||||
payload: [],
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: SET_LOADING,
|
||||
payload: false,
|
||||
});
|
||||
};
|
||||
@@ -1,17 +1,14 @@
|
||||
import { ServiceMapItemAction, ServiceMapLoading } from './serviceMap';
|
||||
import { GetUsageDataAction } from './usage';
|
||||
|
||||
export enum ActionTypes {
|
||||
updateTimeInterval = 'UPDATE_TIME_INTERVAL',
|
||||
getServiceMapItems = 'GET_SERVICE_MAP_ITEMS',
|
||||
getServices = 'GET_SERVICES',
|
||||
getUsageData = 'GET_USAGE_DATE',
|
||||
fetchTraces = 'FETCH_TRACES',
|
||||
fetchTraceItem = 'FETCH_TRACE_ITEM',
|
||||
serviceMapLoading = 'UPDATE_SERVICE_MAP_LOADING',
|
||||
}
|
||||
|
||||
export type Action =
|
||||
| GetUsageDataAction
|
||||
| ServiceMapItemAction
|
||||
| ServiceMapLoading;
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import api from 'api';
|
||||
import { Dispatch } from 'redux';
|
||||
import { toUTCEpoch } from 'utils/timeUtils';
|
||||
|
||||
import { ActionTypes } from './types';
|
||||
|
||||
export interface UsageDataItem {
|
||||
timestamp: number;
|
||||
count: number;
|
||||
}
|
||||
|
||||
export interface GetUsageDataAction {
|
||||
type: ActionTypes.getUsageData;
|
||||
payload: UsageDataItem[];
|
||||
}
|
||||
|
||||
export const getUsageData = (
|
||||
minTime: number,
|
||||
maxTime: number,
|
||||
step: number,
|
||||
service: string,
|
||||
) => async (dispatch: Dispatch): Promise<void> => {
|
||||
const requesString = `/usage?start=${toUTCEpoch(minTime)}&end=${toUTCEpoch(
|
||||
maxTime,
|
||||
)}&step=${step}&service=${service || ''}`;
|
||||
// Step can only be multiple of 3600
|
||||
const response = await api.get<UsageDataItem[]>(requesString);
|
||||
|
||||
dispatch<GetUsageDataAction>({
|
||||
type: ActionTypes.getUsageData,
|
||||
payload: response.data,
|
||||
// PNOTE - response.data in the axios response has the actual API response
|
||||
});
|
||||
};
|
||||
@@ -6,11 +6,9 @@ import { LogsReducer } from './logs';
|
||||
import metricsReducers from './metric';
|
||||
import { ServiceMapReducer } from './serviceMap';
|
||||
import traceReducer from './trace';
|
||||
import { usageDataReducer } from './usage';
|
||||
|
||||
const reducers = combineReducers({
|
||||
traces: traceReducer,
|
||||
usageDate: usageDataReducer,
|
||||
globalTime: globalTimeReducer,
|
||||
serviceMap: ServiceMapReducer,
|
||||
app: appReducer,
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-small-switch */
|
||||
import { Action, ActionTypes, UsageDataItem } from 'store/actions';
|
||||
|
||||
export const usageDataReducer = (
|
||||
state: UsageDataItem[] = [{ timestamp: 0, count: 0 }],
|
||||
action: Action,
|
||||
): UsageDataItem[] => {
|
||||
switch (action.type) {
|
||||
case ActionTypes.getUsageData:
|
||||
return action.payload;
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
};
|
||||
@@ -5522,10 +5522,10 @@ axe-core@^4.6.2:
|
||||
resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.7.0.tgz"
|
||||
integrity sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ==
|
||||
|
||||
axios@1.7.7:
|
||||
version "1.7.7"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.7.tgz#2f554296f9892a72ac8d8e4c5b79c14a91d0a47f"
|
||||
integrity sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==
|
||||
axios@1.8.2:
|
||||
version "1.8.2"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-1.8.2.tgz#fabe06e241dfe83071d4edfbcaa7b1c3a40f7979"
|
||||
integrity sha512-ls4GYBm5aig9vWx8AWDSGLpnpDQRtWAfrjU+EuytuODrFBkqesN2RkOQCBzrA1RQNHw1SmRMSDDDSwzNAYQ6Rg==
|
||||
dependencies:
|
||||
follow-redirects "^1.15.6"
|
||||
form-data "^4.0.0"
|
||||
|
||||
@@ -6,31 +6,31 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
CodeInvalidInput code = code{"invalid_input"}
|
||||
CodeInternal = code{"internal"}
|
||||
CodeUnsupported = code{"unsupported"}
|
||||
CodeNotFound = code{"not_found"}
|
||||
CodeMethodNotAllowed = code{"method_not_allowed"}
|
||||
CodeAlreadyExists = code{"already_exists"}
|
||||
CodeUnauthenticated = code{"unauthenticated"}
|
||||
CodeForbidden = code{"forbidden"}
|
||||
CodeInvalidInput Code = Code{"invalid_input"}
|
||||
CodeInternal = Code{"internal"}
|
||||
CodeUnsupported = Code{"unsupported"}
|
||||
CodeNotFound = Code{"not_found"}
|
||||
CodeMethodNotAllowed = Code{"method_not_allowed"}
|
||||
CodeAlreadyExists = Code{"already_exists"}
|
||||
CodeUnauthenticated = Code{"unauthenticated"}
|
||||
CodeForbidden = Code{"forbidden"}
|
||||
)
|
||||
|
||||
var (
|
||||
codeRegex = regexp.MustCompile(`^[a-z_]+$`)
|
||||
)
|
||||
|
||||
type code struct{ s string }
|
||||
type Code struct{ s string }
|
||||
|
||||
func NewCode(s string) (code, error) {
|
||||
func NewCode(s string) (Code, error) {
|
||||
if !codeRegex.MatchString(s) {
|
||||
return code{}, fmt.Errorf("invalid code: %v", s)
|
||||
return Code{}, fmt.Errorf("invalid code: %v", s)
|
||||
}
|
||||
|
||||
return code{s: s}, nil
|
||||
return Code{s: s}, nil
|
||||
}
|
||||
|
||||
func MustNewCode(s string) code {
|
||||
func MustNewCode(s string) Code {
|
||||
code, err := NewCode(s)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@@ -39,6 +39,6 @@ func MustNewCode(s string) code {
|
||||
return code
|
||||
}
|
||||
|
||||
func (c code) String() string {
|
||||
func (c Code) String() string {
|
||||
return c.s
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
codeUnknown code = MustNewCode("unknown")
|
||||
codeUnknown Code = MustNewCode("unknown")
|
||||
)
|
||||
|
||||
// base is the fundamental struct that implements the error interface.
|
||||
@@ -16,7 +16,7 @@ type base struct {
|
||||
// t denotes the custom type of the error.
|
||||
t typ
|
||||
// c denotes the short code for the error message.
|
||||
c code
|
||||
c Code
|
||||
// m contains error message passed through errors.New.
|
||||
m string
|
||||
// e is the actual error being wrapped.
|
||||
@@ -47,7 +47,7 @@ func (b *base) Error() string {
|
||||
}
|
||||
|
||||
// New returns a base error. It requires type, code and message as input.
|
||||
func New(t typ, code code, message string) *base {
|
||||
func New(t typ, code Code, message string) *base {
|
||||
return &base{
|
||||
t: t,
|
||||
c: code,
|
||||
@@ -59,7 +59,7 @@ func New(t typ, code code, message string) *base {
|
||||
}
|
||||
|
||||
// Newf returns a new base by formatting the error message with the supplied format specifier.
|
||||
func Newf(t typ, code code, format string, args ...interface{}) *base {
|
||||
func Newf(t typ, code Code, format string, args ...interface{}) *base {
|
||||
return &base{
|
||||
t: t,
|
||||
c: code,
|
||||
@@ -70,7 +70,7 @@ func Newf(t typ, code code, format string, args ...interface{}) *base {
|
||||
|
||||
// Wrapf returns a new error by formatting the error message with the supplied format specifier
|
||||
// and wrapping another error with base.
|
||||
func Wrapf(cause error, t typ, code code, format string, args ...interface{}) *base {
|
||||
func Wrapf(cause error, t typ, code Code, format string, args ...interface{}) *base {
|
||||
return &base{
|
||||
t: t,
|
||||
c: code,
|
||||
@@ -110,7 +110,7 @@ func (b *base) WithAdditional(a ...string) *base {
|
||||
// and the error itself.
|
||||
//
|
||||
//lint:ignore ST1008 we want to return arguments in the 'TCMEUA' order of the struct
|
||||
func Unwrapb(cause error) (typ, code, string, error, string, []string) {
|
||||
func Unwrapb(cause error) (typ, Code, string, error, string, []string) {
|
||||
base, ok := cause.(*base)
|
||||
if ok {
|
||||
return base.t, base.c, base.m, base.e, base.u, base.a
|
||||
@@ -127,7 +127,7 @@ func Ast(cause error, typ typ) bool {
|
||||
}
|
||||
|
||||
// Ast checks if the provided error matches the specified custom error code.
|
||||
func Asc(cause error, code code) bool {
|
||||
func Asc(cause error, code Code) bool {
|
||||
_, c, _, _, _, _ := Unwrapb(cause)
|
||||
|
||||
return c.s == code.s
|
||||
@@ -137,3 +137,7 @@ func Asc(cause error, code code) bool {
|
||||
func Join(errs ...error) error {
|
||||
return errors.Join(errs...)
|
||||
}
|
||||
|
||||
func As(err error, target any) bool {
|
||||
return errors.As(err, target)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
package implorganization
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
@@ -12,16 +14,19 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type organizationAPI struct {
|
||||
type handler struct {
|
||||
module organization.Module
|
||||
}
|
||||
|
||||
func NewAPI(module organization.Module) organization.API {
|
||||
return &organizationAPI{module: module}
|
||||
func NewHandler(module organization.Module) organization.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
func (api *organizationAPI) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -29,11 +34,11 @@ func (api *organizationAPI) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid org id"))
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
organization, err := api.module.Get(r.Context(), orgID)
|
||||
organization, err := handler.module.Get(ctx, orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -42,18 +47,11 @@ func (api *organizationAPI) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Success(rw, http.StatusOK, organization)
|
||||
}
|
||||
|
||||
func (api *organizationAPI) GetAll(rw http.ResponseWriter, r *http.Request) {
|
||||
organizations, err := api.module.GetAll(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
func (handler *handler) Update(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
render.Success(rw, http.StatusOK, organizations)
|
||||
}
|
||||
|
||||
func (api *organizationAPI) Update(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -72,7 +70,7 @@ func (api *organizationAPI) Update(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
req.ID = orgID
|
||||
err = api.module.Update(r.Context(), req)
|
||||
err = handler.module.Update(ctx, req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -8,26 +8,26 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type organizationModule struct {
|
||||
type module struct {
|
||||
store types.OrganizationStore
|
||||
}
|
||||
|
||||
func NewModule(organizationStore types.OrganizationStore) organization.Module {
|
||||
return &organizationModule{store: organizationStore}
|
||||
return &module{store: organizationStore}
|
||||
}
|
||||
|
||||
func (o *organizationModule) Create(ctx context.Context, organization *types.Organization) error {
|
||||
return o.store.Create(ctx, organization)
|
||||
func (module *module) Create(ctx context.Context, organization *types.Organization) error {
|
||||
return module.store.Create(ctx, organization)
|
||||
}
|
||||
|
||||
func (o *organizationModule) Get(ctx context.Context, id valuer.UUID) (*types.Organization, error) {
|
||||
return o.store.Get(ctx, id)
|
||||
func (module *module) Get(ctx context.Context, id valuer.UUID) (*types.Organization, error) {
|
||||
return module.store.Get(ctx, id)
|
||||
}
|
||||
|
||||
func (o *organizationModule) GetAll(ctx context.Context) ([]*types.Organization, error) {
|
||||
return o.store.GetAll(ctx)
|
||||
func (module *module) GetAll(ctx context.Context) ([]*types.Organization, error) {
|
||||
return module.store.GetAll(ctx)
|
||||
}
|
||||
|
||||
func (o *organizationModule) Update(ctx context.Context, updatedOrganization *types.Organization) error {
|
||||
return o.store.Update(ctx, updatedOrganization)
|
||||
func (module *module) Update(ctx context.Context, updatedOrganization *types.Organization) error {
|
||||
return module.store.Update(ctx, updatedOrganization)
|
||||
}
|
||||
|
||||
@@ -2,77 +2,69 @@ package implorganization
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
store sqlstore.SQLStore
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewStore(db sqlstore.SQLStore) types.OrganizationStore {
|
||||
return &store{store: db}
|
||||
func NewStore(sqlstore sqlstore.SQLStore) types.OrganizationStore {
|
||||
return &store{sqlstore: sqlstore}
|
||||
}
|
||||
|
||||
func (s *store) Create(ctx context.Context, organization *types.Organization) error {
|
||||
_, err := s.
|
||||
store.
|
||||
func (store *store) Create(ctx context.Context, organization *types.Organization) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewInsert().
|
||||
Model(organization).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create organization")
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrOrganizationAlreadyExists, "organization with name: %s already exists", organization.Name)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) Get(ctx context.Context, id valuer.UUID) (*types.Organization, error) {
|
||||
func (store *store) Get(ctx context.Context, id valuer.UUID) (*types.Organization, error) {
|
||||
organization := new(types.Organization)
|
||||
err := s.
|
||||
store.
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(organization).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "no organization found with id: %s", id.StringValue())
|
||||
}
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get organization with id: %s", id.StringValue())
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "organization with id: %s does not exist", id.StringValue())
|
||||
}
|
||||
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (s *store) GetAll(ctx context.Context) ([]*types.Organization, error) {
|
||||
func (store *store) GetAll(ctx context.Context) ([]*types.Organization, error) {
|
||||
organizations := make([]*types.Organization, 0)
|
||||
err := s.
|
||||
store.
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&organizations).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "no organizations found")
|
||||
}
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get all organizations")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return organizations, nil
|
||||
}
|
||||
|
||||
func (s *store) Update(ctx context.Context, organization *types.Organization) error {
|
||||
_, err := s.
|
||||
store.
|
||||
func (store *store) Update(ctx context.Context, organization *types.Organization) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewUpdate().
|
||||
Model(organization).
|
||||
@@ -81,21 +73,21 @@ func (s *store) Update(ctx context.Context, organization *types.Organization) er
|
||||
Where("id = ?", organization.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to update organization with id: %s", organization.ID.StringValue())
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrOrganizationAlreadyExists, "organization already exists")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) Delete(ctx context.Context, id valuer.UUID) error {
|
||||
_, err := s.
|
||||
store.
|
||||
func (store *store) Delete(ctx context.Context, id valuer.UUID) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewDelete().
|
||||
Model(new(types.Organization)).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete organization with id: %s", id.StringValue())
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@@ -22,13 +22,10 @@ type Module interface {
|
||||
Update(context.Context, *types.Organization) error
|
||||
}
|
||||
|
||||
type API interface {
|
||||
type Handler interface {
|
||||
// Get gets the organization based on the id in claims
|
||||
Get(http.ResponseWriter, *http.Request)
|
||||
|
||||
// GetAll gets all the organizations
|
||||
GetAll(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Update updates the organization based on the id in claims
|
||||
Update(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
package preference
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
type API interface {
|
||||
GetOrgPreference(http.ResponseWriter, *http.Request)
|
||||
UpdateOrgPreference(http.ResponseWriter, *http.Request)
|
||||
GetAllOrgPreferences(http.ResponseWriter, *http.Request)
|
||||
GetUserPreference(http.ResponseWriter, *http.Request)
|
||||
UpdateUserPreference(http.ResponseWriter, *http.Request)
|
||||
GetAllUserPreferences(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
type preferenceAPI struct {
|
||||
usecase Usecase
|
||||
}
|
||||
|
||||
func NewAPI(usecase Usecase) API {
|
||||
return &preferenceAPI{usecase: usecase}
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) GetOrgPreference(rw http.ResponseWriter, r *http.Request) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
preference, err := p.usecase.GetOrgPreference(
|
||||
r.Context(), preferenceId, claims.OrgID,
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preference)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) UpdateOrgPreference(rw http.ResponseWriter, r *http.Request) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
req := preferencetypes.UpdatablePreference{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = p.usecase.UpdateOrgPreference(r.Context(), preferenceId, req.PreferenceValue, claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) GetAllOrgPreferences(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
preferences, err := p.usecase.GetAllOrgPreferences(
|
||||
r.Context(), claims.OrgID,
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preferences)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) GetUserPreference(rw http.ResponseWriter, r *http.Request) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
preference, err := p.usecase.GetUserPreference(
|
||||
r.Context(), preferenceId, claims.OrgID, claims.UserID,
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preference)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) UpdateUserPreference(rw http.ResponseWriter, r *http.Request) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
req := preferencetypes.UpdatablePreference{}
|
||||
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
err = p.usecase.UpdateUserPreference(r.Context(), preferenceId, req.PreferenceValue, claims.UserID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) GetAllUserPreferences(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
preferences, err := p.usecase.GetAllUserPreferences(
|
||||
r.Context(), claims.OrgID, claims.UserID,
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preferences)
|
||||
}
|
||||
176
pkg/modules/preference/implpreference/handler.go
Normal file
176
pkg/modules/preference/implpreference/handler.go
Normal file
@@ -0,0 +1,176 @@
|
||||
package implpreference
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module preference.Module
|
||||
}
|
||||
|
||||
func NewHandler(module preference.Module) preference.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
func (handler *handler) GetOrg(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, ok := mux.Vars(r)["preferenceId"]
|
||||
if !ok {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is required"))
|
||||
return
|
||||
}
|
||||
|
||||
preference, err := handler.module.GetOrg(ctx, id, claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preference)
|
||||
}
|
||||
|
||||
func (handler *handler) UpdateOrg(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, ok := mux.Vars(r)["preferenceId"]
|
||||
if !ok {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is required"))
|
||||
return
|
||||
}
|
||||
|
||||
req := new(preferencetypes.UpdatablePreference)
|
||||
|
||||
err = json.NewDecoder(r.Body).Decode(req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.module.UpdateOrg(ctx, id, req.PreferenceValue, claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) GetAllOrg(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
preferences, err := handler.module.GetAllOrg(ctx, claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preferences)
|
||||
}
|
||||
|
||||
func (handler *handler) GetUser(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, ok := mux.Vars(r)["preferenceId"]
|
||||
if !ok {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is required"))
|
||||
return
|
||||
}
|
||||
|
||||
preference, err := handler.module.GetUser(ctx, id, claims.OrgID, claims.UserID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preference)
|
||||
}
|
||||
|
||||
func (handler *handler) UpdateUser(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, ok := mux.Vars(r)["preferenceId"]
|
||||
if !ok {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is required"))
|
||||
return
|
||||
}
|
||||
|
||||
req := new(preferencetypes.UpdatablePreference)
|
||||
err = json.NewDecoder(r.Body).Decode(req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.module.UpdateUser(ctx, id, req.PreferenceValue, claims.UserID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) GetAllUser(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
preferences, err := handler.module.GetAllUser(ctx, claims.OrgID, claims.UserID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preferences)
|
||||
}
|
||||
@@ -1,10 +1,9 @@
|
||||
package core
|
||||
package implpreference
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
@@ -12,27 +11,28 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type usecase struct {
|
||||
store preferencetypes.PreferenceStore
|
||||
// Do not take inspiration from this code, it is a work in progress. See Organization module for a better implementation.
|
||||
type module struct {
|
||||
store preferencetypes.Store
|
||||
defaultMap map[string]preferencetypes.Preference
|
||||
}
|
||||
|
||||
func NewPreference(store preferencetypes.PreferenceStore, defaultMap map[string]preferencetypes.Preference) preference.Usecase {
|
||||
return &usecase{store: store, defaultMap: defaultMap}
|
||||
func NewModule(store preferencetypes.Store, defaultMap map[string]preferencetypes.Preference) preference.Module {
|
||||
return &module{store: store, defaultMap: defaultMap}
|
||||
}
|
||||
|
||||
func (usecase *usecase) GetOrgPreference(ctx context.Context, preferenceID string, orgID string) (*preferencetypes.GettablePreference, error) {
|
||||
preference, seen := usecase.defaultMap[preferenceID]
|
||||
func (module *module) GetOrg(ctx context.Context, preferenceID string, orgID string) (*preferencetypes.GettablePreference, error) {
|
||||
preference, seen := module.defaultMap[preferenceID]
|
||||
if !seen {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot find preference with id: %s", preferenceID)
|
||||
}
|
||||
|
||||
isPreferenceEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if !isPreferenceEnabled {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at org scope: %s", preferenceID))
|
||||
isEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if !isEnabled {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "preference is not enabled at org scope: %s", preferenceID)
|
||||
}
|
||||
|
||||
orgPreference, err := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
|
||||
org, err := module.store.GetOrg(ctx, orgID, preferenceID)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return &preferencetypes.GettablePreference{
|
||||
@@ -40,24 +40,24 @@ func (usecase *usecase) GetOrgPreference(ctx context.Context, preferenceID strin
|
||||
PreferenceValue: preference.DefaultValue,
|
||||
}, nil
|
||||
}
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the org preference: %s", preferenceID))
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in fetching the org preference: %s", preferenceID)
|
||||
}
|
||||
|
||||
return &preferencetypes.GettablePreference{
|
||||
PreferenceID: preferenceID,
|
||||
PreferenceValue: preference.SanitizeValue(orgPreference.PreferenceValue),
|
||||
PreferenceValue: preference.SanitizeValue(org.PreferenceValue),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) UpdateOrgPreference(ctx context.Context, preferenceID string, preferenceValue interface{}, orgID string) error {
|
||||
preference, seen := usecase.defaultMap[preferenceID]
|
||||
func (module *module) UpdateOrg(ctx context.Context, preferenceID string, preferenceValue interface{}, orgID string) error {
|
||||
preference, seen := module.defaultMap[preferenceID]
|
||||
if !seen {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot find preference with id: %s", preferenceID)
|
||||
}
|
||||
|
||||
isPreferenceEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if !isPreferenceEnabled {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at org scope: %s", preferenceID))
|
||||
isEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if !isEnabled {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "preference is not enabled at org scope: %s", preferenceID)
|
||||
}
|
||||
|
||||
err := preference.IsValidValue(preferenceValue)
|
||||
@@ -65,26 +65,26 @@ func (usecase *usecase) UpdateOrgPreference(ctx context.Context, preferenceID st
|
||||
return err
|
||||
}
|
||||
|
||||
storablePreferenceValue, encodeErr := json.Marshal(preferenceValue)
|
||||
storableValue, encodeErr := json.Marshal(preferenceValue)
|
||||
if encodeErr != nil {
|
||||
return errors.Wrapf(encodeErr, errors.TypeInvalidInput, errors.CodeInvalidInput, "error in encoding the preference value")
|
||||
}
|
||||
|
||||
orgPreference, dberr := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
|
||||
org, dberr := module.store.GetOrg(ctx, orgID, preferenceID)
|
||||
if dberr != nil && dberr != sql.ErrNoRows {
|
||||
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in getting the preference value")
|
||||
}
|
||||
|
||||
if dberr != nil {
|
||||
orgPreference.ID = valuer.GenerateUUID()
|
||||
orgPreference.PreferenceID = preferenceID
|
||||
orgPreference.PreferenceValue = string(storablePreferenceValue)
|
||||
orgPreference.OrgID = orgID
|
||||
org.ID = valuer.GenerateUUID()
|
||||
org.PreferenceID = preferenceID
|
||||
org.PreferenceValue = string(storableValue)
|
||||
org.OrgID = orgID
|
||||
} else {
|
||||
orgPreference.PreferenceValue = string(storablePreferenceValue)
|
||||
org.PreferenceValue = string(storableValue)
|
||||
}
|
||||
|
||||
dberr = usecase.store.UpsertOrgPreference(ctx, orgPreference)
|
||||
dberr = module.store.UpsertOrg(ctx, org)
|
||||
if dberr != nil {
|
||||
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in setting the preference value")
|
||||
}
|
||||
@@ -92,19 +92,19 @@ func (usecase *usecase) UpdateOrgPreference(ctx context.Context, preferenceID st
|
||||
return nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) GetAllOrgPreferences(ctx context.Context, orgID string) ([]*preferencetypes.PreferenceWithValue, error) {
|
||||
allOrgPreferences := []*preferencetypes.PreferenceWithValue{}
|
||||
orgPreferences, err := usecase.store.GetAllOrgPreferences(ctx, orgID)
|
||||
func (module *module) GetAllOrg(ctx context.Context, orgID string) ([]*preferencetypes.PreferenceWithValue, error) {
|
||||
allOrgs := []*preferencetypes.PreferenceWithValue{}
|
||||
orgs, err := module.store.GetAllOrg(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all org preference values")
|
||||
}
|
||||
|
||||
preferenceValueMap := map[string]interface{}{}
|
||||
for _, preferenceValue := range orgPreferences {
|
||||
for _, preferenceValue := range orgs {
|
||||
preferenceValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
for _, preference := range usecase.defaultMap {
|
||||
for _, preference := range module.defaultMap {
|
||||
isEnabledForOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if isEnabledForOrgScope {
|
||||
preferenceWithValue := &preferencetypes.PreferenceWithValue{}
|
||||
@@ -126,16 +126,16 @@ func (usecase *usecase) GetAllOrgPreferences(ctx context.Context, orgID string)
|
||||
}
|
||||
|
||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||
allOrgPreferences = append(allOrgPreferences, preferenceWithValue)
|
||||
allOrgs = append(allOrgs, preferenceWithValue)
|
||||
}
|
||||
}
|
||||
return allOrgPreferences, nil
|
||||
return allOrgs, nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) GetUserPreference(ctx context.Context, preferenceID string, orgID string, userID string) (*preferencetypes.GettablePreference, error) {
|
||||
preference, seen := usecase.defaultMap[preferenceID]
|
||||
func (module *module) GetUser(ctx context.Context, preferenceID string, orgID string, userID string) (*preferencetypes.GettablePreference, error) {
|
||||
preference, seen := module.defaultMap[preferenceID]
|
||||
if !seen {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot find preference with id: %s", preferenceID)
|
||||
}
|
||||
|
||||
preferenceValue := preferencetypes.GettablePreference{
|
||||
@@ -143,29 +143,29 @@ func (usecase *usecase) GetUserPreference(ctx context.Context, preferenceID stri
|
||||
PreferenceValue: preference.DefaultValue,
|
||||
}
|
||||
|
||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||
if !isPreferenceEnabledAtUserScope {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at user scope: %s", preferenceID))
|
||||
isEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||
if !isEnabledAtUserScope {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "preference is not enabled at user scope: %s", preferenceID)
|
||||
}
|
||||
|
||||
isPreferenceEnabledAtOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if isPreferenceEnabledAtOrgScope {
|
||||
orgPreference, err := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
|
||||
isEnabledAtOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if isEnabledAtOrgScope {
|
||||
org, err := module.store.GetOrg(ctx, orgID, preferenceID)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the org preference: %s", preferenceID))
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in fetching the org preference: %s", preferenceID)
|
||||
}
|
||||
if err == nil {
|
||||
preferenceValue.PreferenceValue = orgPreference.PreferenceValue
|
||||
preferenceValue.PreferenceValue = org.PreferenceValue
|
||||
}
|
||||
}
|
||||
|
||||
userPreference, err := usecase.store.GetUserPreference(ctx, userID, preferenceID)
|
||||
user, err := module.store.GetUser(ctx, userID, preferenceID)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the user preference: %s", preferenceID))
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in fetching the user preference: %s", preferenceID)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
preferenceValue.PreferenceValue = userPreference.PreferenceValue
|
||||
preferenceValue.PreferenceValue = user.PreferenceValue
|
||||
}
|
||||
|
||||
return &preferencetypes.GettablePreference{
|
||||
@@ -174,15 +174,15 @@ func (usecase *usecase) GetUserPreference(ctx context.Context, preferenceID stri
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) UpdateUserPreference(ctx context.Context, preferenceID string, preferenceValue interface{}, userID string) error {
|
||||
preference, seen := usecase.defaultMap[preferenceID]
|
||||
func (module *module) UpdateUser(ctx context.Context, preferenceID string, preferenceValue interface{}, userID string) error {
|
||||
preference, seen := module.defaultMap[preferenceID]
|
||||
if !seen {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot find preference with id: %s", preferenceID)
|
||||
}
|
||||
|
||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||
if !isPreferenceEnabledAtUserScope {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at user scope: %s", preferenceID))
|
||||
isEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||
if !isEnabledAtUserScope {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "preference is not enabled at user scope: %s", preferenceID)
|
||||
}
|
||||
|
||||
err := preference.IsValidValue(preferenceValue)
|
||||
@@ -190,26 +190,26 @@ func (usecase *usecase) UpdateUserPreference(ctx context.Context, preferenceID s
|
||||
return err
|
||||
}
|
||||
|
||||
storablePreferenceValue, encodeErr := json.Marshal(preferenceValue)
|
||||
storableValue, encodeErr := json.Marshal(preferenceValue)
|
||||
if encodeErr != nil {
|
||||
return errors.Wrapf(encodeErr, errors.TypeInvalidInput, errors.CodeInvalidInput, "error in encoding the preference value")
|
||||
}
|
||||
|
||||
userPreference, dberr := usecase.store.GetUserPreference(ctx, userID, preferenceID)
|
||||
user, dberr := module.store.GetUser(ctx, userID, preferenceID)
|
||||
if dberr != nil && dberr != sql.ErrNoRows {
|
||||
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in getting the preference value")
|
||||
}
|
||||
|
||||
if dberr != nil {
|
||||
userPreference.ID = valuer.GenerateUUID()
|
||||
userPreference.PreferenceID = preferenceID
|
||||
userPreference.PreferenceValue = string(storablePreferenceValue)
|
||||
userPreference.UserID = userID
|
||||
user.ID = valuer.GenerateUUID()
|
||||
user.PreferenceID = preferenceID
|
||||
user.PreferenceValue = string(storableValue)
|
||||
user.UserID = userID
|
||||
} else {
|
||||
userPreference.PreferenceValue = string(storablePreferenceValue)
|
||||
user.PreferenceValue = string(storableValue)
|
||||
}
|
||||
|
||||
dberr = usecase.store.UpsertUserPreference(ctx, userPreference)
|
||||
dberr = module.store.UpsertUser(ctx, user)
|
||||
if dberr != nil {
|
||||
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in setting the preference value")
|
||||
}
|
||||
@@ -217,30 +217,30 @@ func (usecase *usecase) UpdateUserPreference(ctx context.Context, preferenceID s
|
||||
return nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) GetAllUserPreferences(ctx context.Context, orgID string, userID string) ([]*preferencetypes.PreferenceWithValue, error) {
|
||||
allUserPreferences := []*preferencetypes.PreferenceWithValue{}
|
||||
func (module *module) GetAllUser(ctx context.Context, orgID string, userID string) ([]*preferencetypes.PreferenceWithValue, error) {
|
||||
allUsers := []*preferencetypes.PreferenceWithValue{}
|
||||
|
||||
orgPreferences, err := usecase.store.GetAllOrgPreferences(ctx, orgID)
|
||||
orgs, err := module.store.GetAllOrg(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all org preference values")
|
||||
}
|
||||
|
||||
preferenceOrgValueMap := map[string]interface{}{}
|
||||
for _, preferenceValue := range orgPreferences {
|
||||
for _, preferenceValue := range orgs {
|
||||
preferenceOrgValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
userPreferences, err := usecase.store.GetAllUserPreferences(ctx, userID)
|
||||
users, err := module.store.GetAllUser(ctx, userID)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all user preference values")
|
||||
}
|
||||
|
||||
preferenceUserValueMap := map[string]interface{}{}
|
||||
for _, preferenceValue := range userPreferences {
|
||||
for _, preferenceValue := range users {
|
||||
preferenceUserValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
for _, preference := range usecase.defaultMap {
|
||||
for _, preference := range module.defaultMap {
|
||||
isEnabledForUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||
|
||||
if isEnabledForUserScope {
|
||||
@@ -271,8 +271,8 @@ func (usecase *usecase) GetAllUserPreferences(ctx context.Context, orgID string,
|
||||
}
|
||||
|
||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||
allUserPreferences = append(allUserPreferences, preferenceWithValue)
|
||||
allUsers = append(allUsers, preferenceWithValue)
|
||||
}
|
||||
}
|
||||
return allUserPreferences, nil
|
||||
return allUsers, nil
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package core
|
||||
package implpreference
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -11,11 +11,11 @@ type store struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewStore(db sqlstore.SQLStore) preferencetypes.PreferenceStore {
|
||||
func NewStore(db sqlstore.SQLStore) preferencetypes.Store {
|
||||
return &store{store: db}
|
||||
}
|
||||
|
||||
func (store *store) GetOrgPreference(ctx context.Context, orgID string, preferenceID string) (*preferencetypes.StorableOrgPreference, error) {
|
||||
func (store *store) GetOrg(ctx context.Context, orgID string, preferenceID string) (*preferencetypes.StorableOrgPreference, error) {
|
||||
orgPreference := new(preferencetypes.StorableOrgPreference)
|
||||
err := store.
|
||||
store.
|
||||
@@ -33,7 +33,7 @@ func (store *store) GetOrgPreference(ctx context.Context, orgID string, preferen
|
||||
return orgPreference, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAllOrgPreferences(ctx context.Context, orgID string) ([]*preferencetypes.StorableOrgPreference, error) {
|
||||
func (store *store) GetAllOrg(ctx context.Context, orgID string) ([]*preferencetypes.StorableOrgPreference, error) {
|
||||
orgPreferences := make([]*preferencetypes.StorableOrgPreference, 0)
|
||||
err := store.
|
||||
store.
|
||||
@@ -50,7 +50,7 @@ func (store *store) GetAllOrgPreferences(ctx context.Context, orgID string) ([]*
|
||||
return orgPreferences, nil
|
||||
}
|
||||
|
||||
func (store *store) UpsertOrgPreference(ctx context.Context, orgPreference *preferencetypes.StorableOrgPreference) error {
|
||||
func (store *store) UpsertOrg(ctx context.Context, orgPreference *preferencetypes.StorableOrgPreference) error {
|
||||
_, err := store.
|
||||
store.
|
||||
BunDB().
|
||||
@@ -65,7 +65,7 @@ func (store *store) UpsertOrgPreference(ctx context.Context, orgPreference *pref
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) GetUserPreference(ctx context.Context, userID string, preferenceID string) (*preferencetypes.StorableUserPreference, error) {
|
||||
func (store *store) GetUser(ctx context.Context, userID string, preferenceID string) (*preferencetypes.StorableUserPreference, error) {
|
||||
userPreference := new(preferencetypes.StorableUserPreference)
|
||||
err := store.
|
||||
store.
|
||||
@@ -83,7 +83,7 @@ func (store *store) GetUserPreference(ctx context.Context, userID string, prefer
|
||||
return userPreference, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAllUserPreferences(ctx context.Context, userID string) ([]*preferencetypes.StorableUserPreference, error) {
|
||||
func (store *store) GetAllUser(ctx context.Context, userID string) ([]*preferencetypes.StorableUserPreference, error) {
|
||||
userPreferences := make([]*preferencetypes.StorableUserPreference, 0)
|
||||
err := store.
|
||||
store.
|
||||
@@ -100,7 +100,7 @@ func (store *store) GetAllUserPreferences(ctx context.Context, userID string) ([
|
||||
return userPreferences, nil
|
||||
}
|
||||
|
||||
func (store *store) UpsertUserPreference(ctx context.Context, userPreference *preferencetypes.StorableUserPreference) error {
|
||||
func (store *store) UpsertUser(ctx context.Context, userPreference *preferencetypes.StorableUserPreference) error {
|
||||
_, err := store.
|
||||
store.
|
||||
BunDB().
|
||||
48
pkg/modules/preference/preference.go
Normal file
48
pkg/modules/preference/preference.go
Normal file
@@ -0,0 +1,48 @@
|
||||
package preference
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
// Returns the preference for the given organization
|
||||
GetOrg(ctx context.Context, preferenceId string, orgId string) (*preferencetypes.GettablePreference, error)
|
||||
|
||||
// Returns the preference for the given user
|
||||
GetUser(ctx context.Context, preferenceId string, orgId string, userId string) (*preferencetypes.GettablePreference, error)
|
||||
|
||||
// Returns all preferences for the given organization
|
||||
GetAllOrg(ctx context.Context, orgId string) ([]*preferencetypes.PreferenceWithValue, error)
|
||||
|
||||
// Returns all preferences for the given user
|
||||
GetAllUser(ctx context.Context, orgId string, userId string) ([]*preferencetypes.PreferenceWithValue, error)
|
||||
|
||||
// Updates the preference for the given organization
|
||||
UpdateOrg(ctx context.Context, preferenceId string, preferenceValue interface{}, orgId string) error
|
||||
|
||||
// Updates the preference for the given user
|
||||
UpdateUser(ctx context.Context, preferenceId string, preferenceValue interface{}, userId string) error
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
// Returns the preference for the given organization
|
||||
GetOrg(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Updates the preference for the given organization
|
||||
UpdateOrg(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Returns all preferences for the given organization
|
||||
GetAllOrg(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Returns the preference for the given user
|
||||
GetUser(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Updates the preference for the given user
|
||||
UpdateUser(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Returns all preferences for the given user
|
||||
GetAllUser(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
package preference
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
)
|
||||
|
||||
type Usecase interface {
|
||||
GetOrgPreference(ctx context.Context, preferenceId string, orgId string) (*preferencetypes.GettablePreference, error)
|
||||
UpdateOrgPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, orgId string) error
|
||||
GetAllOrgPreferences(ctx context.Context, orgId string) ([]*preferencetypes.PreferenceWithValue, error)
|
||||
GetUserPreference(ctx context.Context, preferenceId string, orgId string, userId string) (*preferencetypes.GettablePreference, error)
|
||||
UpdateUserPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, userId string) error
|
||||
GetAllUserPreferences(ctx context.Context, orgId string, userId string) ([]*preferencetypes.PreferenceWithValue, error)
|
||||
}
|
||||
@@ -17,8 +17,6 @@ const (
|
||||
|
||||
const (
|
||||
defaultTraceDB string = "signoz_traces"
|
||||
defaultOperationsTable string = "distributed_signoz_operations"
|
||||
defaultIndexTable string = "distributed_signoz_index_v2"
|
||||
defaultLocalIndexTable string = "signoz_index_v2"
|
||||
defaultErrorTable string = "distributed_signoz_error_index_v2"
|
||||
defaultDurationTable string = "distributed_durationSort"
|
||||
@@ -59,19 +57,10 @@ type namespaceConfig struct {
|
||||
Enabled bool
|
||||
Datasource string
|
||||
TraceDB string
|
||||
OperationsTable string
|
||||
IndexTable string
|
||||
LocalIndexTable string
|
||||
DurationTable string
|
||||
UsageExplorerTable string
|
||||
SpansTable string
|
||||
ErrorTable string
|
||||
LocalIndexTable string
|
||||
SpanAttributeTableV2 string
|
||||
SpanAttributeKeysTable string
|
||||
DependencyGraphTable string
|
||||
TopLevelOperationsTable string
|
||||
LogsDB string
|
||||
LogsTable string
|
||||
LogsLocalTable string
|
||||
LogsAttributeKeysTable string
|
||||
LogsResourceKeysTable string
|
||||
@@ -82,6 +71,7 @@ type namespaceConfig struct {
|
||||
Encoding Encoding
|
||||
Connector Connector
|
||||
|
||||
LogsDB string
|
||||
LogsLocalTableV2 string
|
||||
LogsTableV2 string
|
||||
LogsResourceLocalTableV2 string
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -23,8 +23,6 @@ import (
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
@@ -51,7 +49,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
@@ -91,7 +88,6 @@ func NewRouter() *mux.Router {
|
||||
// APIHandler implements the query service public API
|
||||
type APIHandler struct {
|
||||
reader interfaces.Reader
|
||||
skipConfig *model.SkipConfig
|
||||
appDao dao.ModelDao
|
||||
ruleManager *rules.Manager
|
||||
featureFlags interfaces.FeatureLookup
|
||||
@@ -121,9 +117,6 @@ type APIHandler struct {
|
||||
// Websocket connection upgrader
|
||||
Upgrader *websocket.Upgrader
|
||||
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
|
||||
hostsRepo *inframetrics.HostsRepo
|
||||
processesRepo *inframetrics.ProcessesRepo
|
||||
podsRepo *inframetrics.PodsRepo
|
||||
@@ -147,11 +140,6 @@ type APIHandler struct {
|
||||
FieldsAPI *fields.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
|
||||
Preference preference.API
|
||||
|
||||
OrganizationAPI organization.API
|
||||
OrganizationModule organization.Module
|
||||
}
|
||||
|
||||
type APIHandlerOpts struct {
|
||||
@@ -159,8 +147,6 @@ type APIHandlerOpts struct {
|
||||
// business data reader e.g. clickhouse
|
||||
Reader interfaces.Reader
|
||||
|
||||
SkipConfig *model.SkipConfig
|
||||
|
||||
PreferSpanMetrics bool
|
||||
|
||||
// dao layer to perform crud on app objects like dashboard, alerts etc
|
||||
@@ -187,11 +173,6 @@ type APIHandlerOpts struct {
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
|
||||
// Use Logs New schema
|
||||
UseLogsNewSchema bool
|
||||
|
||||
UseTraceNewSchema bool
|
||||
|
||||
JWT *authtypes.JWT
|
||||
|
||||
AlertmanagerAPI *alertmanager.API
|
||||
@@ -199,30 +180,22 @@ type APIHandlerOpts struct {
|
||||
FieldsAPI *fields.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
|
||||
Preference preference.API
|
||||
OrganizationAPI organization.API
|
||||
OrganizationModule organization.Module
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
querierOpts := querier.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
}
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
}
|
||||
|
||||
querier := querier.NewQuerier(querierOpts)
|
||||
@@ -244,7 +217,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
aH := &APIHandler{
|
||||
reader: opts.Reader,
|
||||
appDao: opts.AppDao,
|
||||
skipConfig: opts.SkipConfig,
|
||||
preferSpanMetrics: opts.PreferSpanMetrics,
|
||||
temporalityMap: make(map[string]map[v3.Temporality]bool),
|
||||
ruleManager: opts.RuleManager,
|
||||
@@ -254,8 +226,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
querier: querier,
|
||||
querierV2: querierv2,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
hostsRepo: hostsRepo,
|
||||
processesRepo: processesRepo,
|
||||
podsRepo: podsRepo,
|
||||
@@ -271,21 +241,11 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
SummaryService: summaryService,
|
||||
AlertmanagerAPI: opts.AlertmanagerAPI,
|
||||
Signoz: opts.Signoz,
|
||||
Preference: opts.Preference,
|
||||
FieldsAPI: opts.FieldsAPI,
|
||||
OrganizationAPI: opts.OrganizationAPI,
|
||||
OrganizationModule: opts.OrganizationModule,
|
||||
}
|
||||
|
||||
logsQueryBuilder := logsv3.PrepareLogsQuery
|
||||
if opts.UseLogsNewSchema {
|
||||
logsQueryBuilder = logsv4.PrepareLogsQuery
|
||||
}
|
||||
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if opts.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
logsQueryBuilder := logsv4.PrepareLogsQuery
|
||||
tracesQueryBuilder := tracesV4.PrepareTracesQuery
|
||||
|
||||
builderOpts := queryBuilder.QueryBuilderOptions{
|
||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||
@@ -568,12 +528,8 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
|
||||
// router.HandleFunc("/api/v1/get_percentiles", aH.getApplicationPercentiles).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/services", am.ViewAccess(aH.getServices)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/services/list", am.ViewAccess(aH.getServicesList)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/service/top_operations", am.ViewAccess(aH.getTopOperations)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/service/top_level_operations", am.ViewAccess(aH.getServicesTopLevelOps)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(aH.SearchTraces)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/usage", am.ViewAccess(aH.getUsage)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/dependency_graph", am.ViewAccess(aH.dependencyGraph)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.AdminAccess(aH.setTTL)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.ViewAccess(aH.getTTL)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.AdminAccess(aH.setApdexSettings)).Methods(http.MethodPost)
|
||||
@@ -596,23 +552,13 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
|
||||
router.HandleFunc("/api/v1/disks", am.ViewAccess(aH.getDisks)).Methods(http.MethodGet)
|
||||
|
||||
// === Preference APIs ===
|
||||
router.HandleFunc("/api/v1/user/preferences", am.ViewAccess(aH.Signoz.Handlers.Preference.GetAllUser)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/user/preferences/{preferenceId}", am.ViewAccess(aH.Signoz.Handlers.Preference.GetUser)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/user/preferences/{preferenceId}", am.ViewAccess(aH.Signoz.Handlers.Preference.UpdateUser)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/org/preferences", am.AdminAccess(aH.Signoz.Handlers.Preference.GetAllOrg)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/org/preferences/{preferenceId}", am.AdminAccess(aH.Signoz.Handlers.Preference.GetOrg)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/org/preferences/{preferenceId}", am.AdminAccess(aH.Signoz.Handlers.Preference.UpdateOrg)).Methods(http.MethodPut)
|
||||
|
||||
// user actions
|
||||
router.HandleFunc("/api/v1/user/preferences", am.ViewAccess(aH.getAllUserPreferences)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/user/preferences/{preferenceId}", am.ViewAccess(aH.getUserPreference)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/user/preferences/{preferenceId}", am.ViewAccess(aH.updateUserPreference)).Methods(http.MethodPut)
|
||||
|
||||
// org actions
|
||||
router.HandleFunc("/api/v1/org/preferences", am.AdminAccess(aH.getAllOrgPreferences)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/org/preferences/{preferenceId}", am.AdminAccess(aH.getOrgPreference)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/org/preferences/{preferenceId}", am.AdminAccess(aH.updateOrgPreference)).Methods(http.MethodPut)
|
||||
|
||||
// === Authentication APIs ===
|
||||
router.HandleFunc("/api/v1/invite", am.AdminAccess(aH.inviteUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/invite/bulk", am.AdminAccess(aH.inviteUsers)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(aH.getInvite)).Methods(http.MethodGet)
|
||||
@@ -633,9 +579,8 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
|
||||
router.HandleFunc("/api/v1/orgUsers/{id}", am.AdminAccess(aH.getOrgUsers)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v2/orgs", am.AdminAccess(aH.getOrgs)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/orgs/me", am.AdminAccess(aH.getOrg)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/orgs/me", am.AdminAccess(aH.updateOrg)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v2/orgs/me", am.AdminAccess(aH.Signoz.Handlers.Organization.Get)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/orgs/me", am.AdminAccess(aH.Signoz.Handlers.Organization.Update)).Methods(http.MethodPut)
|
||||
|
||||
router.HandleFunc("/api/v1/getResetPasswordToken/{id}", am.AdminAccess(aH.getResetPasswordToken)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/resetPassword", am.OpenAccess(aH.resetPassword)).Methods(http.MethodPost)
|
||||
@@ -1653,122 +1598,13 @@ func (aH *APIHandler) getTopOperations(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getUsage(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
query, err := parseGetUsageRequest(r)
|
||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
result, err := aH.reader.GetUsage(r.Context(), query)
|
||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getServicesTopLevelOps(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
var start, end time.Time
|
||||
var services []string
|
||||
|
||||
type topLevelOpsParams struct {
|
||||
Service string `json:"service"`
|
||||
Start string `json:"start"`
|
||||
End string `json:"end"`
|
||||
}
|
||||
|
||||
var params topLevelOpsParams
|
||||
err := json.NewDecoder(r.Body).Decode(¶ms)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in getting req body for get top operations API", zap.Error(err))
|
||||
}
|
||||
|
||||
if params.Service != "" {
|
||||
services = []string{params.Service}
|
||||
}
|
||||
|
||||
startEpoch := params.Start
|
||||
if startEpoch != "" {
|
||||
startEpochInt, err := strconv.ParseInt(startEpoch, 10, 64)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading start time")
|
||||
return
|
||||
}
|
||||
start = time.Unix(0, startEpochInt)
|
||||
}
|
||||
endEpoch := params.End
|
||||
if endEpoch != "" {
|
||||
endEpochInt, err := strconv.ParseInt(endEpoch, 10, 64)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading end time")
|
||||
return
|
||||
}
|
||||
end = time.Unix(0, endEpochInt)
|
||||
}
|
||||
|
||||
result, apiErr := aH.reader.GetTopLevelOperations(r.Context(), aH.skipConfig, start, end, services)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
query, err := parseGetServicesRequest(r)
|
||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.reader.GetServices(r.Context(), query, aH.skipConfig)
|
||||
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||
return
|
||||
}
|
||||
|
||||
data := map[string]interface{}{
|
||||
"number": len(*result),
|
||||
}
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data, claims.Email, true, false)
|
||||
}
|
||||
|
||||
if (data["number"] != 0) && (data["number"] != telemetry.DEFAULT_NUMBER_OF_SERVICES) {
|
||||
telemetry.GetInstance().AddActiveTracesUser()
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) dependencyGraph(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
query, err := parseGetServicesRequest(r)
|
||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
result, err := aH.reader.GetDependencyGraph(r.Context(), query)
|
||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getServicesList(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
result, err := aH.reader.GetServicesList(r.Context())
|
||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) SearchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -2064,7 +1900,7 @@ func (aH *APIHandler) inviteUsers(w http.ResponseWriter, r *http.Request) {
|
||||
func (aH *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
|
||||
token := mux.Vars(r)["token"]
|
||||
|
||||
resp, err := auth.GetInvite(context.Background(), token, aH.OrganizationModule)
|
||||
resp, err := auth.GetInvite(context.Background(), token, aH.Signoz.Modules.Organization)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorNotFound}, nil)
|
||||
return
|
||||
@@ -2105,7 +1941,7 @@ func (aH *APIHandler) listPendingInvites(w http.ResponseWriter, r *http.Request)
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "invalid org_id in the invite"))
|
||||
}
|
||||
org, err := aH.OrganizationModule.Get(ctx, orgID)
|
||||
org, err := aH.Signoz.Modules.Organization.Get(ctx, orgID)
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, err.Error()))
|
||||
}
|
||||
@@ -2132,7 +1968,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
_, apiErr := auth.Register(context.Background(), req, aH.Signoz.Alertmanager, aH.OrganizationModule)
|
||||
_, apiErr := auth.Register(context.Background(), req, aH.Signoz.Alertmanager, aH.Signoz.Modules.Organization)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
@@ -2391,18 +2227,6 @@ func (aH *APIHandler) editRole(w http.ResponseWriter, r *http.Request) {
|
||||
aH.WriteJSON(w, r, map[string]string{"data": "user group updated successfully"})
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getOrgs(w http.ResponseWriter, r *http.Request) {
|
||||
aH.OrganizationAPI.GetAll(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getOrg(w http.ResponseWriter, r *http.Request) {
|
||||
aH.OrganizationAPI.Get(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) updateOrg(w http.ResponseWriter, r *http.Request) {
|
||||
aH.OrganizationAPI.Update(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getOrgUsers(w http.ResponseWriter, r *http.Request) {
|
||||
id := mux.Vars(r)["id"]
|
||||
users, apiErr := dao.DB().GetUsersByOrg(context.Background(), id)
|
||||
@@ -3437,44 +3261,6 @@ func (aH *APIHandler) getProducerConsumerEval(
|
||||
aH.Respond(w, resp)
|
||||
}
|
||||
|
||||
// Preferences
|
||||
|
||||
func (aH *APIHandler) getUserPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
aH.Preference.GetUserPreference(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) updateUserPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
aH.Preference.UpdateUserPreference(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getAllUserPreferences(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
aH.Preference.GetAllUserPreferences(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getOrgPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
aH.Preference.GetOrgPreference(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) updateOrgPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
aH.Preference.UpdateOrgPreference(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getAllOrgPreferences(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
aH.Preference.GetAllOrgPreferences(w, r)
|
||||
}
|
||||
|
||||
// RegisterIntegrationRoutes Registers all Integrations
|
||||
func (aH *APIHandler) RegisterIntegrationRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
subRouter := router.PathPrefix("/api/v1/integrations").Subrouter()
|
||||
@@ -4318,11 +4104,8 @@ func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
|
||||
// logs
|
||||
func (aH *APIHandler) RegisterLogsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
subRouter := router.PathPrefix("/api/v1/logs").Subrouter()
|
||||
subRouter.HandleFunc("", am.ViewAccess(aH.getLogs)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/tail", am.ViewAccess(aH.tailLogs)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/fields", am.ViewAccess(aH.logFields)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/fields", am.EditAccess(aH.logFieldUpdate)).Methods(http.MethodPost)
|
||||
subRouter.HandleFunc("/aggregate", am.ViewAccess(aH.logAggregate)).Methods(http.MethodGet)
|
||||
|
||||
// log pipelines
|
||||
subRouter.HandleFunc("/pipelines/preview", am.ViewAccess(aH.PreviewLogsPipelinesHandler)).Methods(http.MethodPost)
|
||||
@@ -4362,81 +4145,6 @@ func (aH *APIHandler) logFieldUpdate(w http.ResponseWriter, r *http.Request) {
|
||||
aH.WriteJSON(w, r, field)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getLogs(w http.ResponseWriter, r *http.Request) {
|
||||
params, err := logs.ParseLogFilterParams(r)
|
||||
if err != nil {
|
||||
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErr, "Incorrect params")
|
||||
return
|
||||
}
|
||||
res, apiErr := aH.reader.GetLogs(r.Context(), params)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, "Failed to fetch logs from the DB")
|
||||
return
|
||||
}
|
||||
aH.WriteJSON(w, r, map[string]interface{}{"results": res})
|
||||
}
|
||||
|
||||
func (aH *APIHandler) tailLogs(w http.ResponseWriter, r *http.Request) {
|
||||
params, err := logs.ParseLogFilterParams(r)
|
||||
if err != nil {
|
||||
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErr, "Incorrect params")
|
||||
return
|
||||
}
|
||||
|
||||
// create the client
|
||||
client := &model.LogsTailClient{Name: r.RemoteAddr, Logs: make(chan *model.SignozLog, 1000), Done: make(chan *bool), Error: make(chan error), Filter: *params}
|
||||
go aH.reader.TailLogs(r.Context(), client)
|
||||
|
||||
w.Header().Set("Connection", "keep-alive")
|
||||
w.Header().Set("Content-Type", "text/event-stream")
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
w.WriteHeader(200)
|
||||
|
||||
flusher, ok := w.(http.Flusher)
|
||||
if !ok {
|
||||
err := model.ApiError{Typ: model.ErrorStreamingNotSupported, Err: nil}
|
||||
RespondError(w, &err, "streaming is not supported")
|
||||
return
|
||||
}
|
||||
// flush the headers
|
||||
flusher.Flush()
|
||||
|
||||
for {
|
||||
select {
|
||||
case log := <-client.Logs:
|
||||
var buf bytes.Buffer
|
||||
enc := json.NewEncoder(&buf)
|
||||
enc.Encode(log)
|
||||
fmt.Fprintf(w, "data: %v\n\n", buf.String())
|
||||
flusher.Flush()
|
||||
case <-client.Done:
|
||||
zap.L().Debug("done!")
|
||||
return
|
||||
case err := <-client.Error:
|
||||
zap.L().Error("error occured", zap.Error(err))
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (aH *APIHandler) logAggregate(w http.ResponseWriter, r *http.Request) {
|
||||
params, err := logs.ParseLogAggregateParams(r)
|
||||
if err != nil {
|
||||
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErr, "Incorrect params")
|
||||
return
|
||||
}
|
||||
res, apiErr := aH.reader.AggregateLogs(r.Context(), params)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, "Failed to fetch logs aggregate from the DB")
|
||||
return
|
||||
}
|
||||
aH.WriteJSON(w, r, res)
|
||||
}
|
||||
|
||||
const logPipelines = "log_pipelines"
|
||||
|
||||
func parseAgentConfigVersion(r *http.Request) (int, *model.ApiError) {
|
||||
@@ -4700,7 +4408,6 @@ func (aH *APIHandler) updateSavedView(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (aH *APIHandler) deleteSavedView(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
viewID := mux.Vars(r)["viewId"]
|
||||
viewUUID, err := valuer.NewUUID(viewID)
|
||||
if err != nil {
|
||||
@@ -4919,30 +4626,10 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
if aH.UseTraceNewSchema {
|
||||
tracesV4.Enrich(queryRangeParams, spanKeys)
|
||||
} else {
|
||||
tracesV3.Enrich(queryRangeParams, spanKeys)
|
||||
}
|
||||
tracesV4.Enrich(queryRangeParams, spanKeys)
|
||||
|
||||
}
|
||||
|
||||
// WARN: Only works for AND operator in traces query
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
|
||||
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
|
||||
if isUsed && len(traceIDs) > 0 {
|
||||
zap.L().Debug("traceID used as filter in traces query")
|
||||
// query signoz_spans table with traceID to get min and max timestamp
|
||||
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
|
||||
if err == nil {
|
||||
// add timestamp filter to queryRange params
|
||||
tracesV3.AddTimestampFilters(min, max, queryRangeParams)
|
||||
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Hook up query progress tracking if requested
|
||||
queryIdHeader := r.Header.Get("X-SIGNOZ-QUERY-ID")
|
||||
if len(queryIdHeader) > 0 {
|
||||
@@ -5282,88 +4969,7 @@ func (aH *APIHandler) liveTailLogsV2(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) {
|
||||
if aH.UseLogsNewSchema {
|
||||
aH.liveTailLogsV2(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// get the param from url and add it to body
|
||||
stringReader := strings.NewReader(r.URL.Query().Get("q"))
|
||||
r.Body = io.NopCloser(stringReader)
|
||||
|
||||
queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
|
||||
if apiErrorObj != nil {
|
||||
zap.L().Error(apiErrorObj.Err.Error())
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var err error
|
||||
var queryString string
|
||||
switch queryRangeParams.CompositeQuery.QueryType {
|
||||
case v3.QueryTypeBuilder:
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(queryRangeParams) {
|
||||
logsFields, err := aH.reader.GetLogFields(r.Context())
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, nil)
|
||||
return
|
||||
}
|
||||
// get the fields if any logs query is present
|
||||
fields := model.GetLogFieldsV3(r.Context(), queryRangeParams, logsFields)
|
||||
logsv3.Enrich(queryRangeParams, fields)
|
||||
}
|
||||
|
||||
queryString, err = aH.queryBuilder.PrepareLiveTailQuery(queryRangeParams)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
default:
|
||||
err = fmt.Errorf("invalid query type")
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// create the client
|
||||
client := &model.LogsLiveTailClient{Name: r.RemoteAddr, Logs: make(chan *model.SignozLog, 1000), Done: make(chan *bool), Error: make(chan error)}
|
||||
go aH.reader.LiveTailLogsV3(r.Context(), queryString, uint64(queryRangeParams.Start), "", client)
|
||||
|
||||
w.Header().Set("Connection", "keep-alive")
|
||||
w.Header().Set("Content-Type", "text/event-stream")
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
w.WriteHeader(200)
|
||||
|
||||
flusher, ok := w.(http.Flusher)
|
||||
if !ok {
|
||||
err := model.ApiError{Typ: model.ErrorStreamingNotSupported, Err: nil}
|
||||
RespondError(w, &err, "streaming is not supported")
|
||||
return
|
||||
}
|
||||
// flush the headers
|
||||
flusher.Flush()
|
||||
for {
|
||||
select {
|
||||
case log := <-client.Logs:
|
||||
var buf bytes.Buffer
|
||||
enc := json.NewEncoder(&buf)
|
||||
enc.Encode(log)
|
||||
fmt.Fprintf(w, "data: %v\n\n", buf.String())
|
||||
flusher.Flush()
|
||||
case <-client.Done:
|
||||
zap.L().Debug("done!")
|
||||
return
|
||||
case err := <-client.Error:
|
||||
zap.L().Error("error occurred", zap.Error(err))
|
||||
fmt.Fprintf(w, "event: error\ndata: %v\n\n", err.Error())
|
||||
flusher.Flush()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
aH.liveTailLogsV2(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getMetricMetadata(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -5404,27 +5010,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
if aH.UseTraceNewSchema {
|
||||
tracesV4.Enrich(queryRangeParams, spanKeys)
|
||||
} else {
|
||||
tracesV3.Enrich(queryRangeParams, spanKeys)
|
||||
}
|
||||
}
|
||||
|
||||
// WARN: Only works for AND operator in traces query
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
|
||||
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
|
||||
if isUsed && len(traceIDs) > 0 {
|
||||
zap.L().Debug("traceID used as filter in traces query")
|
||||
// query signoz_spans table with traceID to get min and max timestamp
|
||||
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
|
||||
if err == nil {
|
||||
// add timestamp filter to queryRange params
|
||||
tracesV3.AddTimestampFilters(min, max, queryRangeParams)
|
||||
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams))
|
||||
}
|
||||
}
|
||||
tracesV4.Enrich(queryRangeParams, spanKeys)
|
||||
}
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, queryRangeParams)
|
||||
|
||||
@@ -171,42 +171,6 @@ func parseQueryRangeRequest(r *http.Request) (*model.QueryRangeParams, *model.Ap
|
||||
return &queryRangeParams, nil
|
||||
}
|
||||
|
||||
func parseGetUsageRequest(r *http.Request) (*model.GetUsageParams, error) {
|
||||
startTime, err := parseTime("start", r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
endTime, err := parseTime("end", r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stepStr := r.URL.Query().Get("step")
|
||||
if len(stepStr) == 0 {
|
||||
return nil, errors.New("step param missing in query")
|
||||
}
|
||||
stepInt, err := strconv.Atoi(stepStr)
|
||||
if err != nil {
|
||||
return nil, errors.New("step param is not in correct format")
|
||||
}
|
||||
|
||||
serviceName := r.URL.Query().Get("service")
|
||||
stepHour := stepInt / 3600
|
||||
|
||||
getUsageParams := model.GetUsageParams{
|
||||
StartTime: startTime.Format(time.RFC3339Nano),
|
||||
EndTime: endTime.Format(time.RFC3339Nano),
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
ServiceName: serviceName,
|
||||
Period: fmt.Sprintf("PT%dH", stepHour),
|
||||
StepHour: stepHour,
|
||||
}
|
||||
|
||||
return &getUsageParams, nil
|
||||
|
||||
}
|
||||
|
||||
func parseGetServicesRequest(r *http.Request) (*model.GetServicesParams, error) {
|
||||
|
||||
var postData *model.GetServicesParams
|
||||
|
||||
@@ -6,10 +6,8 @@ import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
logsV3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
|
||||
logsV4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||
metricsV3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
@@ -19,19 +17,15 @@ import (
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func prepareLogsQuery(_ context.Context,
|
||||
useLogsNewSchema bool,
|
||||
func prepareLogsQuery(
|
||||
_ context.Context,
|
||||
start,
|
||||
end int64,
|
||||
builderQuery *v3.BuilderQuery,
|
||||
params *v3.QueryRangeParamsV3,
|
||||
) (string, error) {
|
||||
query := ""
|
||||
|
||||
logsQueryBuilder := logsV3.PrepareLogsQuery
|
||||
if useLogsNewSchema {
|
||||
logsQueryBuilder = logsV4.PrepareLogsQuery
|
||||
}
|
||||
logsQueryBuilder := logsV4.PrepareLogsQuery
|
||||
|
||||
if params == nil || builderQuery == nil {
|
||||
return query, fmt.Errorf("params and builderQuery cannot be nil")
|
||||
@@ -102,7 +96,7 @@ func (q *querier) runBuilderQuery(
|
||||
var err error
|
||||
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
||||
zap.L().Info("skipping cache for logs query", zap.String("queryName", queryName), zap.Int64("start", start), zap.Int64("end", end), zap.Int64("step", builderQuery.StepInterval), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, start, end, builderQuery, params)
|
||||
query, err = prepareLogsQuery(ctx, start, end, builderQuery, params)
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||
return
|
||||
@@ -117,7 +111,7 @@ func (q *querier) runBuilderQuery(
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, miss.Start, miss.End, builderQuery, params)
|
||||
query, err = prepareLogsQuery(ctx, miss.Start, miss.End, builderQuery, params)
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||
return
|
||||
@@ -169,11 +163,7 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
|
||||
if builderQuery.DataSource == v3.DataSourceTraces {
|
||||
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if q.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
tracesQueryBuilder := tracesV4.PrepareTracesQuery
|
||||
|
||||
var query string
|
||||
var err error
|
||||
|
||||
@@ -6,11 +6,9 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
logsV3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
|
||||
logsV4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||
metricsV3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
@@ -52,9 +50,6 @@ type querier struct {
|
||||
timeRanges [][]int
|
||||
returnedSeries []*v3.Series
|
||||
returnedErr error
|
||||
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
}
|
||||
|
||||
type QuerierOptions struct {
|
||||
@@ -64,22 +59,14 @@ type QuerierOptions struct {
|
||||
FluxInterval time.Duration
|
||||
|
||||
// used for testing
|
||||
TestingMode bool
|
||||
ReturnedSeries []*v3.Series
|
||||
ReturnedErr error
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
TestingMode bool
|
||||
ReturnedSeries []*v3.Series
|
||||
ReturnedErr error
|
||||
}
|
||||
|
||||
func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
logsQueryBuilder := logsV3.PrepareLogsQuery
|
||||
if opts.UseLogsNewSchema {
|
||||
logsQueryBuilder = logsV4.PrepareLogsQuery
|
||||
}
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if opts.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
logsQueryBuilder := logsV4.PrepareLogsQuery
|
||||
tracesQueryBuilder := tracesV4.PrepareTracesQuery
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(opts.Cache), querycache.WithFluxInterval(opts.FluxInterval))
|
||||
|
||||
@@ -96,11 +83,9 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
BuildMetricQuery: metricsV3.PrepareMetricQuery,
|
||||
}),
|
||||
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
returnedErr: opts.ReturnedErr,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
returnedErr: opts.ReturnedErr,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -445,11 +430,6 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
||||
len(params.CompositeQuery.BuilderQueries) == 1 &&
|
||||
params.CompositeQuery.PanelType != v3.PanelTypeTrace {
|
||||
for _, v := range params.CompositeQuery.BuilderQueries {
|
||||
if (v.DataSource == v3.DataSourceLogs && !q.UseLogsNewSchema) ||
|
||||
(v.DataSource == v3.DataSourceTraces && !q.UseTraceNewSchema) {
|
||||
break
|
||||
}
|
||||
|
||||
// only allow of logs queries with timestamp ordering desc
|
||||
// TODO(nitya): allow for timestamp asc
|
||||
if (v.DataSource == v3.DataSourceLogs || v.DataSource == v3.DataSourceTraces) &&
|
||||
|
||||
@@ -1370,8 +1370,6 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
true,
|
||||
true,
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
)
|
||||
|
||||
@@ -6,11 +6,9 @@ import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
logsV3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
|
||||
logsV4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||
metricsV3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
||||
metricsV4 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v4"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
@@ -19,17 +17,14 @@ import (
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func prepareLogsQuery(_ context.Context,
|
||||
useLogsNewSchema bool,
|
||||
func prepareLogsQuery(
|
||||
_ context.Context,
|
||||
start,
|
||||
end int64,
|
||||
builderQuery *v3.BuilderQuery,
|
||||
params *v3.QueryRangeParamsV3,
|
||||
) (string, error) {
|
||||
logsQueryBuilder := logsV3.PrepareLogsQuery
|
||||
if useLogsNewSchema {
|
||||
logsQueryBuilder = logsV4.PrepareLogsQuery
|
||||
}
|
||||
logsQueryBuilder := logsV4.PrepareLogsQuery
|
||||
query := ""
|
||||
|
||||
if params == nil || builderQuery == nil {
|
||||
@@ -102,7 +97,7 @@ func (q *querier) runBuilderQuery(
|
||||
var err error
|
||||
if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
|
||||
zap.L().Info("skipping cache for logs query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName]))
|
||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, start, end, builderQuery, params)
|
||||
query, err = prepareLogsQuery(ctx, start, end, builderQuery, params)
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||
return
|
||||
@@ -116,7 +111,7 @@ func (q *querier) runBuilderQuery(
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, miss.Start, miss.End, builderQuery, params)
|
||||
query, err = prepareLogsQuery(ctx, miss.Start, miss.End, builderQuery, params)
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||
return
|
||||
@@ -169,11 +164,7 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
|
||||
if builderQuery.DataSource == v3.DataSourceTraces {
|
||||
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if q.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
tracesQueryBuilder := tracesV4.PrepareTracesQuery
|
||||
|
||||
var query string
|
||||
var err error
|
||||
|
||||
@@ -6,11 +6,9 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
logsV3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
|
||||
logsV4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||
metricsV4 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
@@ -49,11 +47,9 @@ type querier struct {
|
||||
testingMode bool
|
||||
queriesExecuted []string
|
||||
// tuple of start and end time in milliseconds
|
||||
timeRanges [][]int
|
||||
returnedSeries []*v3.Series
|
||||
returnedErr error
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
timeRanges [][]int
|
||||
returnedSeries []*v3.Series
|
||||
returnedErr error
|
||||
}
|
||||
|
||||
type QuerierOptions struct {
|
||||
@@ -63,23 +59,14 @@ type QuerierOptions struct {
|
||||
FluxInterval time.Duration
|
||||
|
||||
// used for testing
|
||||
TestingMode bool
|
||||
ReturnedSeries []*v3.Series
|
||||
ReturnedErr error
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
TestingMode bool
|
||||
ReturnedSeries []*v3.Series
|
||||
ReturnedErr error
|
||||
}
|
||||
|
||||
func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
logsQueryBuilder := logsV3.PrepareLogsQuery
|
||||
if opts.UseLogsNewSchema {
|
||||
logsQueryBuilder = logsV4.PrepareLogsQuery
|
||||
}
|
||||
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if opts.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
logsQueryBuilder := logsV4.PrepareLogsQuery
|
||||
tracesQueryBuilder := tracesV4.PrepareTracesQuery
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(opts.Cache), querycache.WithFluxInterval(opts.FluxInterval))
|
||||
|
||||
@@ -96,11 +83,9 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
BuildMetricQuery: metricsV4.PrepareMetricQuery,
|
||||
}),
|
||||
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
returnedErr: opts.ReturnedErr,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
returnedErr: opts.ReturnedErr,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -446,11 +431,6 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
||||
len(params.CompositeQuery.BuilderQueries) == 1 &&
|
||||
params.CompositeQuery.PanelType != v3.PanelTypeTrace {
|
||||
for _, v := range params.CompositeQuery.BuilderQueries {
|
||||
if (v.DataSource == v3.DataSourceLogs && !q.UseLogsNewSchema) ||
|
||||
(v.DataSource == v3.DataSourceTraces && !q.UseTraceNewSchema) {
|
||||
break
|
||||
}
|
||||
|
||||
// only allow of logs queries with timestamp ordering desc
|
||||
// TODO(nitya): allow for timestamp asc
|
||||
if (v.DataSource == v3.DataSourceLogs || v.DataSource == v3.DataSourceTraces) &&
|
||||
|
||||
@@ -1424,8 +1424,6 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
true,
|
||||
true,
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
)
|
||||
|
||||
@@ -14,9 +14,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
@@ -30,7 +27,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/SigNoz/signoz/pkg/web"
|
||||
"github.com/rs/cors"
|
||||
"github.com/soheilhy/cmux"
|
||||
@@ -42,7 +38,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
@@ -50,21 +45,14 @@ import (
|
||||
)
|
||||
|
||||
type ServerOptions struct {
|
||||
Config signoz.Config
|
||||
PromConfigPath string
|
||||
SkipTopLvlOpsPath string
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
// alert specific params
|
||||
DisableRules bool
|
||||
RuleRepoURL string
|
||||
Config signoz.Config
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
PreferSpanMetrics bool
|
||||
CacheConfigPath string
|
||||
FluxInterval string
|
||||
FluxIntervalForTraceDetail string
|
||||
Cluster string
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
SigNoz *signoz.SigNoz
|
||||
Jwt *authtypes.JWT
|
||||
}
|
||||
@@ -120,21 +108,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
serverOptions.Cluster,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
fluxIntervalForTraceDetail,
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
|
||||
skipConfig := &model.SkipConfig{}
|
||||
if serverOptions.SkipTopLvlOpsPath != "" {
|
||||
// read skip config
|
||||
skipConfig, err = model.ReadSkipConfig(serverOptions.SkipTopLvlOpsPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
@@ -145,13 +122,9 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.RuleRepoURL,
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
c,
|
||||
serverOptions.DisableRules,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
@@ -183,12 +156,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
telemetry.GetInstance().SetReader(reader)
|
||||
preferenceAPI := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(serverOptions.SigNoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
|
||||
organizationAPI := implorganization.NewAPI(implorganization.NewModule(implorganization.NewStore(serverOptions.SigNoz.SQLStore)))
|
||||
organizationModule := implorganization.NewModule(implorganization.NewStore(serverOptions.SigNoz.SQLStore))
|
||||
apiHandler, err := NewAPIHandler(APIHandlerOpts{
|
||||
Reader: reader,
|
||||
SkipConfig: skipConfig,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
AppDao: dao.DB(),
|
||||
RuleManager: rm,
|
||||
@@ -198,23 +167,16 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
Cache: c,
|
||||
FluxInterval: fluxInterval,
|
||||
UseLogsNewSchema: serverOptions.UseLogsNewSchema,
|
||||
UseTraceNewSchema: serverOptions.UseTraceNewSchema,
|
||||
JWT: serverOptions.Jwt,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(serverOptions.SigNoz.Alertmanager),
|
||||
FieldsAPI: fields.NewAPI(serverOptions.SigNoz.TelemetryStore),
|
||||
Signoz: serverOptions.SigNoz,
|
||||
Preference: preferenceAPI,
|
||||
OrganizationAPI: organizationAPI,
|
||||
OrganizationModule: organizationModule,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s := &Server{
|
||||
// logger: logger,
|
||||
// tracer: tracer,
|
||||
ruleManager: rm,
|
||||
serverOptions: serverOptions,
|
||||
unavailableChannel: make(chan healthcheck.Status),
|
||||
@@ -374,13 +336,7 @@ func (s *Server) initListeners() error {
|
||||
|
||||
// Start listening on http and private http port concurrently
|
||||
func (s *Server) Start(ctx context.Context) error {
|
||||
|
||||
// initiate rule manager first
|
||||
if !s.serverOptions.DisableRules {
|
||||
s.ruleManager.Start(ctx)
|
||||
} else {
|
||||
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
}
|
||||
s.ruleManager.Start(ctx)
|
||||
|
||||
err := s.initListeners()
|
||||
if err != nil {
|
||||
@@ -468,32 +424,24 @@ func (s *Server) Stop(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func makeRulesManager(
|
||||
ruleRepoURL string,
|
||||
db *sqlx.DB,
|
||||
ch interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
) (*rules.Manager, error) {
|
||||
// create manager opts
|
||||
managerOpts := &rules.ManagerOptions{
|
||||
TelemetryStore: telemetryStore,
|
||||
Prometheus: prometheus,
|
||||
RepoURL: ruleRepoURL,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
DisableRules: disableRules,
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: constants.GetEvalDelay(),
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
SQLStore: sqlstore,
|
||||
TelemetryStore: telemetryStore,
|
||||
Prometheus: prometheus,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
Reader: ch,
|
||||
Cache: cache,
|
||||
EvalDelay: constants.GetEvalDelay(),
|
||||
SQLStore: sqlstore,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -18,10 +18,6 @@ const (
|
||||
OpAmpWsEndpoint = "0.0.0.0:4320" // address for opamp websocket
|
||||
)
|
||||
|
||||
type ContextKey string
|
||||
|
||||
const ContextUserKey ContextKey = "user"
|
||||
|
||||
var DEFAULT_TELEMETRY_ANONYMOUS = false
|
||||
|
||||
func IsOSSTelemetryEnabled() bool {
|
||||
@@ -57,9 +53,6 @@ var TELEMETRY_ACTIVE_USER_DURATION_MINUTES = GetOrDefaultEnvInt("TELEMETRY_ACTIV
|
||||
|
||||
var InviteEmailTemplate = GetOrDefaultEnv("INVITE_EMAIL_TEMPLATE", "/root/templates/invitation_email_template.html")
|
||||
|
||||
// [Deprecated] SIGNOZ_LOCAL_DB_PATH is deprecated and scheduled for removal. Please use SIGNOZ_SQLSTORE_SQLITE_PATH instead.
|
||||
var RELATIONAL_DATASOURCE_PATH = GetOrDefaultEnv("SIGNOZ_LOCAL_DB_PATH", "/var/lib/signoz/signoz.db")
|
||||
|
||||
var MetricsExplorerClickhouseThreads = GetOrDefaultEnvInt("METRICS_EXPLORER_CLICKHOUSE_THREADS", 8)
|
||||
var UpdatedMetricsMetadataCachePrefix = GetOrDefaultEnv("METRICS_UPDATED_METADATA_CACHE_KEY", "UPDATED_METRICS_METADATA")
|
||||
|
||||
|
||||
@@ -208,7 +208,7 @@ func (mds *ModelDaoSqlite) GetUser(ctx context.Context,
|
||||
query := mds.bundb.NewSelect().
|
||||
Table("users").
|
||||
Column("users.id", "users.name", "users.email", "users.password", "users.created_at", "users.profile_picture_url", "users.org_id", "users.role").
|
||||
ColumnExpr("o.name as organization").
|
||||
ColumnExpr("o.display_name as organization").
|
||||
Join("JOIN organizations o ON o.id = users.org_id").
|
||||
Where("users.id = ?", id)
|
||||
|
||||
@@ -243,7 +243,7 @@ func (mds *ModelDaoSqlite) GetUserByEmail(ctx context.Context,
|
||||
query := mds.bundb.NewSelect().
|
||||
Table("users").
|
||||
Column("users.id", "users.name", "users.email", "users.password", "users.created_at", "users.profile_picture_url", "users.org_id", "users.role").
|
||||
ColumnExpr("o.name as organization").
|
||||
ColumnExpr("o.display_name as organization").
|
||||
Join("JOIN organizations o ON o.id = users.org_id").
|
||||
Where("users.email = ?", email)
|
||||
|
||||
@@ -277,7 +277,7 @@ func (mds *ModelDaoSqlite) GetUsersWithOpts(ctx context.Context, limit int) ([]t
|
||||
Table("users").
|
||||
Column("users.id", "users.name", "users.email", "users.password", "users.created_at", "users.profile_picture_url", "users.org_id", "users.role").
|
||||
ColumnExpr("users.role as role").
|
||||
ColumnExpr("o.name as organization").
|
||||
ColumnExpr("o.display_name as organization").
|
||||
Join("JOIN organizations o ON o.id = users.org_id")
|
||||
|
||||
if limit > 0 {
|
||||
@@ -300,7 +300,7 @@ func (mds *ModelDaoSqlite) GetUsersByOrg(ctx context.Context,
|
||||
Table("users").
|
||||
Column("users.id", "users.name", "users.email", "users.password", "users.created_at", "users.profile_picture_url", "users.org_id", "users.role").
|
||||
ColumnExpr("users.role as role").
|
||||
ColumnExpr("o.name as organization").
|
||||
ColumnExpr("o.display_name as organization").
|
||||
Join("JOIN organizations o ON o.id = users.org_id").
|
||||
Where("users.org_id = ?", orgId)
|
||||
|
||||
@@ -318,7 +318,7 @@ func (mds *ModelDaoSqlite) GetUsersByRole(ctx context.Context, role authtypes.Ro
|
||||
Table("users").
|
||||
Column("users.id", "users.name", "users.email", "users.password", "users.created_at", "users.profile_picture_url", "users.org_id", "users.role").
|
||||
ColumnExpr("users.role as role").
|
||||
ColumnExpr("o.name as organization").
|
||||
ColumnExpr("o.display_name as organization").
|
||||
Join("JOIN organizations o ON o.id = users.org_id").
|
||||
Where("users.role = ?", role)
|
||||
|
||||
|
||||
@@ -15,12 +15,8 @@ import (
|
||||
type Reader interface {
|
||||
GetInstantQueryMetricsResult(ctx context.Context, query *model.InstantQueryMetricsParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
|
||||
GetQueryRangeResult(ctx context.Context, query *model.QueryRangeParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
|
||||
GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig, start, end time.Time, services []string) (*map[string][]string, *model.ApiError)
|
||||
GetServices(ctx context.Context, query *model.GetServicesParams, skipConfig *model.SkipConfig) (*[]model.ServiceItem, *model.ApiError)
|
||||
GetTopOperations(ctx context.Context, query *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError)
|
||||
GetUsage(ctx context.Context, query *model.GetUsageParams) (*[]model.UsageItem, error)
|
||||
GetServicesList(ctx context.Context) (*[]string, error)
|
||||
GetDependencyGraph(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error)
|
||||
|
||||
GetTTL(ctx context.Context, orgID string, ttlParams *model.GetTTLParams) (*model.GetTTLResponseItem, *model.ApiError)
|
||||
|
||||
@@ -74,9 +70,6 @@ type Reader interface {
|
||||
// Logs
|
||||
GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError)
|
||||
UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError
|
||||
GetLogs(ctx context.Context, params *model.LogsFilterParams) (*[]model.SignozLog, *model.ApiError)
|
||||
TailLogs(ctx context.Context, client *model.LogsTailClient)
|
||||
AggregateLogs(ctx context.Context, params *model.LogsAggregateParams) (*model.GetLogsAggregatesResponse, *model.ApiError)
|
||||
GetLogAttributeKeys(ctx context.Context, req *v3.FilterAttributeKeyRequest) (*v3.FilterAttributeKeyResponse, error)
|
||||
GetLogAttributeValues(ctx context.Context, req *v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error)
|
||||
GetLogAggregateAttributes(ctx context.Context, req *v3.AggregateAttributeRequest) (*v3.AggregateAttributeResponse, error)
|
||||
@@ -100,8 +93,6 @@ type Reader interface {
|
||||
ReadRuleStateHistoryTopContributorsByRuleID(ctx context.Context, ruleID string, params *model.QueryRuleStateHistory) ([]model.RuleStateHistoryContributor, error)
|
||||
GetLastSavedRuleStateHistory(ctx context.Context, ruleID string) ([]model.RuleStateHistory, error)
|
||||
|
||||
GetMinAndMaxTimestampForTraceID(ctx context.Context, traceID []string) (int64, int64, error)
|
||||
|
||||
// Query Progress tracking helpers.
|
||||
ReportQueryStartForProgressTracking(queryId string) (reportQueryFinished func(), err *model.ApiError)
|
||||
SubscribeToQueryProgress(queryId string) (<-chan model.QueryProgress, func(), *model.ApiError)
|
||||
|
||||
@@ -45,12 +45,18 @@ func main() {
|
||||
var maxOpenConns int
|
||||
var dialTimeout time.Duration
|
||||
|
||||
// Deprecated
|
||||
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
|
||||
// Deprecated
|
||||
flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces")
|
||||
// Deprecated
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
// Deprecated
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
// Deprecated
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
// Deprecated
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
@@ -58,8 +64,11 @@ func main() {
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
// Allow using the consistent naming with the signoz collector
|
||||
flag.StringVar(&cluster, "cluster-name", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
// Deprecated
|
||||
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool, only used with clickhouse if not set in ClickHouseUrl env var DSN.)")
|
||||
// Deprecated
|
||||
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time, only used with clickhouse if not set in ClickHouseUrl env var DSN.)")
|
||||
// Deprecated
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection, only used with clickhouse if not set in ClickHouseUrl env var DSN.)")
|
||||
flag.Parse()
|
||||
|
||||
@@ -113,18 +122,12 @@ func main() {
|
||||
serverOptions := &app.ServerOptions{
|
||||
Config: config,
|
||||
HTTPHostPort: constants.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
SkipTopLvlOpsPath: skipTopLvlOpsPath,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: constants.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
RuleRepoURL: ruleRepoURL,
|
||||
CacheConfigPath: cacheConfigPath,
|
||||
FluxInterval: fluxInterval,
|
||||
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
|
||||
Cluster: cluster,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
SigNoz: signoz,
|
||||
Jwt: jwt,
|
||||
}
|
||||
|
||||
@@ -88,6 +88,11 @@ type ChangePasswordRequest struct {
|
||||
NewPassword string `json:"newPassword"`
|
||||
}
|
||||
|
||||
type ResetPasswordRequest struct {
|
||||
Password string `json:"password"`
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
type UserRole struct {
|
||||
UserId string `json:"user_id"`
|
||||
GroupName string `json:"group_name"`
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
type SkipConfig struct {
|
||||
Services []ServiceSkipConfig `yaml:"services"`
|
||||
}
|
||||
|
||||
type ServiceSkipConfig struct {
|
||||
Name string `yaml:"name"`
|
||||
Operations []string `yaml:"operations"`
|
||||
}
|
||||
|
||||
func (s *SkipConfig) ShouldSkip(serviceName, name string) bool {
|
||||
for _, service := range s.Services {
|
||||
if service.Name == serviceName {
|
||||
for _, operation := range service.Operations {
|
||||
if name == operation {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func ReadYaml(path string, v interface{}) error {
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
decoder := yaml.NewDecoder(f)
|
||||
err = decoder.Decode(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func ReadSkipConfig(path string) (*SkipConfig, error) {
|
||||
if path == "" {
|
||||
return &SkipConfig{}, nil
|
||||
}
|
||||
|
||||
skipConfig := &SkipConfig{}
|
||||
err := ReadYaml(path, skipConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return skipConfig, nil
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package model
|
||||
|
||||
import "time"
|
||||
|
||||
type ResetPasswordRequest struct {
|
||||
Password string `json:"password"`
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
type IngestionKey struct {
|
||||
KeyId string `json:"keyId" db:"key_id"`
|
||||
Name string `json:"name" db:"name"`
|
||||
CreatedAt time.Time `json:"createdAt" db:"created_at"`
|
||||
IngestionKey string `json:"ingestionKey" db:"ingestion_key"`
|
||||
IngestionURL string `json:"ingestionURL" db:"ingestion_url"`
|
||||
DataRegion string `json:"dataRegion" db:"data_region"`
|
||||
}
|
||||
@@ -70,16 +70,6 @@ type RegisterEventParams struct {
|
||||
RateLimited bool `json:"rateLimited"`
|
||||
}
|
||||
|
||||
type GetUsageParams struct {
|
||||
StartTime string
|
||||
EndTime string
|
||||
ServiceName string
|
||||
Period string
|
||||
StepHour int
|
||||
Start *time.Time
|
||||
End *time.Time
|
||||
}
|
||||
|
||||
type GetServicesParams struct {
|
||||
StartTime string `json:"start"`
|
||||
EndTime string `json:"end"`
|
||||
|
||||
@@ -34,33 +34,29 @@ import (
|
||||
)
|
||||
|
||||
type PrepareTaskOptions struct {
|
||||
Rule *ruletypes.PostableRule
|
||||
TaskName string
|
||||
RuleStore ruletypes.RuleStore
|
||||
MaintenanceStore ruletypes.MaintenanceStore
|
||||
Logger *zap.Logger
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
ManagerOpts *ManagerOptions
|
||||
NotifyFunc NotifyFunc
|
||||
SQLStore sqlstore.SQLStore
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
OrgID string
|
||||
Rule *ruletypes.PostableRule
|
||||
TaskName string
|
||||
RuleStore ruletypes.RuleStore
|
||||
MaintenanceStore ruletypes.MaintenanceStore
|
||||
Logger *zap.Logger
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
ManagerOpts *ManagerOptions
|
||||
NotifyFunc NotifyFunc
|
||||
SQLStore sqlstore.SQLStore
|
||||
OrgID string
|
||||
}
|
||||
|
||||
type PrepareTestRuleOptions struct {
|
||||
Rule *ruletypes.PostableRule
|
||||
RuleStore ruletypes.RuleStore
|
||||
MaintenanceStore ruletypes.MaintenanceStore
|
||||
Logger *zap.Logger
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
ManagerOpts *ManagerOptions
|
||||
NotifyFunc NotifyFunc
|
||||
SQLStore sqlstore.SQLStore
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
Rule *ruletypes.PostableRule
|
||||
RuleStore ruletypes.RuleStore
|
||||
MaintenanceStore ruletypes.MaintenanceStore
|
||||
Logger *zap.Logger
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
ManagerOpts *ManagerOptions
|
||||
NotifyFunc NotifyFunc
|
||||
SQLStore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
const taskNamesuffix = "webAppEditor"
|
||||
@@ -84,25 +80,18 @@ func prepareTaskName(ruleId interface{}) string {
|
||||
type ManagerOptions struct {
|
||||
TelemetryStore telemetrystore.TelemetryStore
|
||||
Prometheus prometheus.Prometheus
|
||||
// RepoURL is used to generate a backlink in sent alert messages
|
||||
RepoURL string
|
||||
|
||||
// rule db conn
|
||||
DBConn *sqlx.DB
|
||||
|
||||
Context context.Context
|
||||
Logger *zap.Logger
|
||||
ResendDelay time.Duration
|
||||
DisableRules bool
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
Context context.Context
|
||||
Logger *zap.Logger
|
||||
ResendDelay time.Duration
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
|
||||
EvalDelay time.Duration
|
||||
|
||||
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
|
||||
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
|
||||
PrepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
Alertmanager alertmanager.Alertmanager
|
||||
SQLStore sqlstore.SQLStore
|
||||
@@ -125,9 +114,6 @@ type Manager struct {
|
||||
prepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
|
||||
prepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
|
||||
alertmanager alertmanager.Alertmanager
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
@@ -160,8 +146,6 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
ruleId,
|
||||
opts.Rule,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
@@ -395,11 +379,9 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, idStr string) er
|
||||
return err
|
||||
}
|
||||
|
||||
if !m.opts.DisableRules {
|
||||
err = m.syncRuleStateWithTask(ctx, claims.OrgID, prepareTaskName(existingRule.ID.StringValue()), parsedRule)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = m.syncRuleStateWithTask(ctx, claims.OrgID, prepareTaskName(existingRule.ID.StringValue()), parsedRule)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -413,19 +395,17 @@ func (m *Manager) editTask(_ context.Context, orgID string, rule *ruletypes.Post
|
||||
zap.L().Debug("editing a rule task", zap.String("name", taskName))
|
||||
|
||||
newTask, err := m.prepareTaskFunc(PrepareTaskOptions{
|
||||
Rule: rule,
|
||||
TaskName: taskName,
|
||||
RuleStore: m.ruleStore,
|
||||
MaintenanceStore: m.maintenanceStore,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
UseLogsNewSchema: m.opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: m.opts.UseTraceNewSchema,
|
||||
OrgID: orgID,
|
||||
Rule: rule,
|
||||
TaskName: taskName,
|
||||
RuleStore: m.ruleStore,
|
||||
MaintenanceStore: m.maintenanceStore,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
OrgID: orgID,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -496,9 +476,7 @@ func (m *Manager) DeleteRule(ctx context.Context, idStr string) error {
|
||||
}
|
||||
|
||||
taskName := prepareTaskName(id.StringValue())
|
||||
if !m.opts.DisableRules {
|
||||
m.deleteTask(taskName)
|
||||
}
|
||||
m.deleteTask(taskName)
|
||||
|
||||
return nil
|
||||
})
|
||||
@@ -581,10 +559,8 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*ruletypes.Ge
|
||||
}
|
||||
|
||||
taskName := prepareTaskName(id.StringValue())
|
||||
if !m.opts.DisableRules {
|
||||
if err := m.addTask(ctx, claims.OrgID, parsedRule, taskName); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := m.addTask(ctx, claims.OrgID, parsedRule, taskName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -605,19 +581,17 @@ func (m *Manager) addTask(_ context.Context, orgID string, rule *ruletypes.Posta
|
||||
|
||||
zap.L().Debug("adding a new rule task", zap.String("name", taskName))
|
||||
newTask, err := m.prepareTaskFunc(PrepareTaskOptions{
|
||||
Rule: rule,
|
||||
TaskName: taskName,
|
||||
RuleStore: m.ruleStore,
|
||||
MaintenanceStore: m.maintenanceStore,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
UseLogsNewSchema: m.opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: m.opts.UseTraceNewSchema,
|
||||
OrgID: orgID,
|
||||
Rule: rule,
|
||||
TaskName: taskName,
|
||||
RuleStore: m.ruleStore,
|
||||
MaintenanceStore: m.maintenanceStore,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
OrgID: orgID,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -724,9 +698,6 @@ func (m *Manager) prepareNotifyFunc() NotifyFunc {
|
||||
|
||||
for _, alert := range alerts {
|
||||
generatorURL := alert.GeneratorURL
|
||||
if generatorURL == "" {
|
||||
generatorURL = m.opts.RepoURL
|
||||
}
|
||||
|
||||
a := &alertmanagertypes.PostableAlert{
|
||||
Annotations: alert.Annotations.Map(),
|
||||
@@ -759,9 +730,6 @@ func (m *Manager) prepareTestNotifyFunc() NotifyFunc {
|
||||
|
||||
alert := alerts[0]
|
||||
generatorURL := alert.GeneratorURL
|
||||
if generatorURL == "" {
|
||||
generatorURL = m.opts.RepoURL
|
||||
}
|
||||
|
||||
a := &alertmanagertypes.PostableAlert{
|
||||
Annotations: alert.Annotations.Map(),
|
||||
@@ -1003,17 +971,15 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m
|
||||
}
|
||||
|
||||
alertCount, apiErr := m.prepareTestRuleFunc(PrepareTestRuleOptions{
|
||||
Rule: parsedRule,
|
||||
RuleStore: m.ruleStore,
|
||||
MaintenanceStore: m.maintenanceStore,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareTestNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
UseLogsNewSchema: m.opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: m.opts.UseTraceNewSchema,
|
||||
Rule: parsedRule,
|
||||
RuleStore: m.ruleStore,
|
||||
MaintenanceStore: m.maintenanceStore,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareTestNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
})
|
||||
|
||||
return alertCount, apiErr
|
||||
|
||||
@@ -15,7 +15,6 @@ import (
|
||||
// TestNotification prepares a dummy rule for given rule parameters and
|
||||
// sends a test notification. returns alert count and error (if any)
|
||||
func defaultTestNotification(opts PrepareTestRuleOptions) (int, *model.ApiError) {
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if opts.Rule == nil {
|
||||
@@ -48,8 +47,6 @@ func defaultTestNotification(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
WithSendAlways(),
|
||||
WithSendUnmatched(),
|
||||
WithSQLStore(opts.SQLStore),
|
||||
|
||||
@@ -29,7 +29,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
|
||||
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/formatter"
|
||||
|
||||
@@ -52,16 +51,12 @@ type ThresholdRule struct {
|
||||
// used for attribute metadata enrichment for logs and traces
|
||||
logsKeys map[string]v3.AttributeKey
|
||||
spansKeys map[string]v3.AttributeKey
|
||||
|
||||
useTraceNewSchema bool
|
||||
}
|
||||
|
||||
func NewThresholdRule(
|
||||
id string,
|
||||
p *ruletypes.PostableRule,
|
||||
reader interfaces.Reader,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
opts ...RuleOption,
|
||||
) (*ThresholdRule, error) {
|
||||
|
||||
@@ -73,25 +68,20 @@ func NewThresholdRule(
|
||||
}
|
||||
|
||||
t := ThresholdRule{
|
||||
BaseRule: baseRule,
|
||||
version: p.Version,
|
||||
useTraceNewSchema: useTraceNewSchema,
|
||||
BaseRule: baseRule,
|
||||
version: p.Version,
|
||||
}
|
||||
|
||||
querierOption := querier.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
t.querier = querier.NewQuerier(querierOption)
|
||||
@@ -301,11 +291,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time) (rul
|
||||
return nil, err
|
||||
}
|
||||
r.spansKeys = spanKeys
|
||||
if r.useTraceNewSchema {
|
||||
tracesV4.Enrich(params, spanKeys)
|
||||
} else {
|
||||
tracesV3.Enrich(params, spanKeys)
|
||||
}
|
||||
tracesV4.Enrich(params, spanKeys)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -801,7 +801,7 @@ func TestThresholdRuleShouldAlert(t *testing.T) {
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -889,7 +889,7 @@ func TestPrepareLinksToLogs(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -930,7 +930,7 @@ func TestPrepareLinksToTraces(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -1005,7 +1005,7 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -1057,7 +1057,7 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
|
||||
}
|
||||
|
||||
for idx, c := range cases {
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true) // no eval delay
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil) // no eval delay
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -1105,7 +1105,7 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
|
||||
}
|
||||
|
||||
for idx, c := range cases {
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -1244,8 +1244,8 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
readerCache, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{TTL: DefaultFrequency}})
|
||||
require.NoError(t, err)
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", true, true, time.Duration(time.Second), readerCache)
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader, true, true)
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), readerCache)
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
v3.Delta: true,
|
||||
@@ -1340,9 +1340,9 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
}
|
||||
readerCache, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{TTL: DefaultFrequency}})
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", true, true, time.Duration(time.Second), readerCache)
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), readerCache)
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader, true, true)
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
v3.Delta: true,
|
||||
@@ -1444,9 +1444,9 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
}
|
||||
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", true, true, time.Duration(time.Second), nil)
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), nil)
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader, true, true)
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
v3.Delta: true,
|
||||
@@ -1565,9 +1565,9 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
}
|
||||
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", true, true, time.Duration(time.Second), nil)
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), nil)
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader, true, true)
|
||||
rule, err := NewThresholdRule("69", &postableRule, reader)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
v3.Delta: true,
|
||||
@@ -1643,7 +1643,7 @@ func TestThresholdRuleShiftBy(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true)
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil)
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
mockhouse "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -297,11 +298,17 @@ func NewFilterSuggestionsTestBed(t *testing.T) *FilterSuggestionsTestBed {
|
||||
reader, mockClickhouse := NewMockClickhouseReader(t, testDB)
|
||||
mockClickhouse.MatchExpectationsInOrder(false)
|
||||
|
||||
modules := signoz.NewModules(testDB)
|
||||
|
||||
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
|
||||
Reader: reader,
|
||||
AppDao: dao.DB(),
|
||||
FeatureFlags: fm,
|
||||
JWT: jwt,
|
||||
Signoz: &signoz.SigNoz{
|
||||
Modules: modules,
|
||||
Handlers: signoz.NewHandlers(modules),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("could not create a new ApiHandler: %v", err)
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
@@ -360,12 +361,19 @@ func NewCloudIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *CloudI
|
||||
reader, mockClickhouse := NewMockClickhouseReader(t, testDB)
|
||||
mockClickhouse.MatchExpectationsInOrder(false)
|
||||
|
||||
modules := signoz.NewModules(testDB)
|
||||
handlers := signoz.NewHandlers(modules)
|
||||
|
||||
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
|
||||
Reader: reader,
|
||||
AppDao: dao.DB(),
|
||||
CloudIntegrationsController: controller,
|
||||
FeatureFlags: fm,
|
||||
JWT: jwt,
|
||||
Signoz: &signoz.SigNoz{
|
||||
Modules: modules,
|
||||
Handlers: handlers,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("could not create a new ApiHandler: %v", err)
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
@@ -566,6 +567,9 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration
|
||||
t.Fatalf("could not create cloud integrations controller: %v", err)
|
||||
}
|
||||
|
||||
modules := signoz.NewModules(testDB)
|
||||
handlers := signoz.NewHandlers(modules)
|
||||
|
||||
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
|
||||
Reader: reader,
|
||||
AppDao: dao.DB(),
|
||||
@@ -573,6 +577,10 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration
|
||||
FeatureFlags: fm,
|
||||
JWT: jwt,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
Signoz: &signoz.SigNoz{
|
||||
Modules: modules,
|
||||
Handlers: handlers,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("could not create a new ApiHandler: %v", err)
|
||||
|
||||
@@ -46,8 +46,6 @@ func NewMockClickhouseReader(t *testing.T, testDB sqlstore.SQLStore) (*clickhous
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
true,
|
||||
true,
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
)
|
||||
|
||||
20
pkg/signoz/handler.go
Normal file
20
pkg/signoz/handler.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
)
|
||||
|
||||
type Handlers struct {
|
||||
Organization organization.Handler
|
||||
Preference preference.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(modules Modules) Handlers {
|
||||
return Handlers{
|
||||
Organization: implorganization.NewHandler(modules.Organization),
|
||||
Preference: implpreference.NewHandler(modules.Preference),
|
||||
}
|
||||
}
|
||||
22
pkg/signoz/module.go
Normal file
22
pkg/signoz/module.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
)
|
||||
|
||||
type Modules struct {
|
||||
Organization organization.Module
|
||||
Preference preference.Module
|
||||
}
|
||||
|
||||
func NewModules(sqlstore sqlstore.SQLStore) Modules {
|
||||
return Modules{
|
||||
Organization: implorganization.NewModule(implorganization.NewStore(sqlstore)),
|
||||
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewDefaultPreferenceMap()),
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,8 @@ type SigNoz struct {
|
||||
TelemetryStore telemetrystore.TelemetryStore
|
||||
Prometheus prometheus.Prometheus
|
||||
Alertmanager alertmanager.Alertmanager
|
||||
Modules Modules
|
||||
Handlers Handlers
|
||||
}
|
||||
|
||||
func New(
|
||||
@@ -124,6 +126,7 @@ func New(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize alertmanager from the available alertmanager provider factories
|
||||
alertmanager, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
providerSettings,
|
||||
@@ -135,6 +138,12 @@ func New(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules)
|
||||
|
||||
registry, err := factory.NewRegistry(
|
||||
instrumentation.Logger(),
|
||||
factory.NewNamedService(factory.MustNewName("instrumentation"), instrumentation),
|
||||
@@ -153,5 +162,7 @@ func New(
|
||||
TelemetryStore: telemetrystore,
|
||||
Prometheus: prometheus,
|
||||
Alertmanager: alertmanager,
|
||||
Modules: modules,
|
||||
Handlers: handlers,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -38,14 +38,26 @@ func (migration *dropLicensesSites) Up(ctx context.Context, db *bun.DB) error {
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
if _, err := tx.NewDropTable().IfExists().Table("sites").Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := tx.NewDropTable().IfExists().Table("licenses").Exec(ctx); err != nil {
|
||||
if _, err := tx.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Table("sites").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = migration.store.Dialect().RenameColumn(ctx, tx, "saved_views", "uuid", "id")
|
||||
if _, err := tx.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Table("licenses").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
RenameColumn(ctx, tx, "saved_views", "uuid", "id")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -42,12 +42,9 @@ type newInvite struct {
|
||||
}
|
||||
|
||||
func NewUpdateInvitesFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_invites"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateInvites(ctx, ps, c, sqlstore)
|
||||
})
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_invites"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateInvites(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateInvites(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
@@ -55,8 +52,7 @@ func newUpdateInvites(_ context.Context, _ factory.ProviderSettings, _ Config, s
|
||||
}
|
||||
|
||||
func (migration *updateInvites) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -64,8 +60,7 @@ func (migration *updateInvites) Register(migrations *migrate.Migrations) error {
|
||||
}
|
||||
|
||||
func (migration *updateInvites) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -88,8 +83,7 @@ func (migration *updateInvites) Up(ctx context.Context, db *bun.DB) error {
|
||||
}
|
||||
|
||||
if err == nil && len(existingInvites) > 0 {
|
||||
newInvites := migration.
|
||||
CopyOldInvitesToNewInvites(existingInvites)
|
||||
newInvites := migration.CopyOldInvitesToNewInvites(existingInvites)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newInvites).
|
||||
|
||||
@@ -20,9 +20,7 @@ func NewUpdatePatFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQL
|
||||
}
|
||||
|
||||
func newUpdatePat(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &updatePat{
|
||||
store: store,
|
||||
}, nil
|
||||
return &updatePat{store: store}, nil
|
||||
}
|
||||
|
||||
func (migration *updatePat) Register(migrations *migrate.Migrations) error {
|
||||
@@ -34,25 +32,33 @@ func (migration *updatePat) Register(migrations *migrate.Migrations) error {
|
||||
}
|
||||
|
||||
func (migration *updatePat) Up(ctx context.Context, db *bun.DB) error {
|
||||
|
||||
// begin transaction
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer tx.Rollback()
|
||||
|
||||
for _, column := range []string{"last_used", "expires_at"} {
|
||||
if err := migration.store.Dialect().AddNotNullDefaultToColumn(ctx, tx, "personal_access_tokens", column, "INTEGER", "0"); err != nil {
|
||||
if err := migration.
|
||||
store.
|
||||
Dialect().
|
||||
AddNotNullDefaultToColumn(ctx, tx, "personal_access_tokens", column, "INTEGER", "0"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := migration.store.Dialect().AddNotNullDefaultToColumn(ctx, tx, "personal_access_tokens", "revoked", "BOOLEAN", "false"); err != nil {
|
||||
if err := migration.
|
||||
store.
|
||||
Dialect().
|
||||
AddNotNullDefaultToColumn(ctx, tx, "personal_access_tokens", "revoked", "BOOLEAN", "false"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := migration.store.Dialect().AddNotNullDefaultToColumn(ctx, tx, "personal_access_tokens", "updated_by_user_id", "TEXT", "''"); err != nil {
|
||||
if err := migration.
|
||||
store.
|
||||
Dialect().
|
||||
AddNotNullDefaultToColumn(ctx, tx, "personal_access_tokens", "updated_by_user_id", "TEXT", "''"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -77,12 +77,9 @@ type newAlertmanagerState struct {
|
||||
}
|
||||
|
||||
func NewUpdateAlertmanagerFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_alertmanager"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateAlertmanager(ctx, ps, c, sqlstore)
|
||||
})
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_alertmanager"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateAlertmanager(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateAlertmanager(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
@@ -90,8 +87,7 @@ func newUpdateAlertmanager(_ context.Context, _ factory.ProviderSettings, _ Conf
|
||||
}
|
||||
|
||||
func (migration *updateAlertmanager) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -99,8 +95,7 @@ func (migration *updateAlertmanager) Register(migrations *migrate.Migrations) er
|
||||
}
|
||||
|
||||
func (migration *updateAlertmanager) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -49,12 +49,9 @@ type newUserPreference struct {
|
||||
}
|
||||
|
||||
func NewUpdatePreferencesFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_preferences"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdatePreferences(ctx, ps, c, sqlstore)
|
||||
})
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_preferences"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdatePreferences(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdatePreferences(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
@@ -62,8 +59,7 @@ func newUpdatePreferences(_ context.Context, _ factory.ProviderSettings, _ Confi
|
||||
}
|
||||
|
||||
func (migration *updatePreferences) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -71,8 +67,7 @@ func (migration *updatePreferences) Register(migrations *migrate.Migrations) err
|
||||
}
|
||||
|
||||
func (migration *updatePreferences) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -135,8 +130,7 @@ func (migration *updatePreferences) Up(ctx context.Context, db *bun.DB) error {
|
||||
}
|
||||
|
||||
if err == nil && len(existingUserPreferences) > 0 {
|
||||
newUserPreferences := migration.
|
||||
CopyOldUserPreferencesToNewUserPreferences(existingUserPreferences)
|
||||
newUserPreferences := migration.CopyOldUserPreferencesToNewUserPreferences(existingUserPreferences)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newUserPreferences).
|
||||
|
||||
@@ -61,12 +61,9 @@ type newTTLStatus struct {
|
||||
}
|
||||
|
||||
func NewUpdateApdexTtlFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_apdex_ttl"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateApdexTtl(ctx, ps, c, sqlstore)
|
||||
})
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_apdex_ttl"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateApdexTtl(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateApdexTtl(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
@@ -74,8 +71,7 @@ func newUpdateApdexTtl(_ context.Context, _ factory.ProviderSettings, _ Config,
|
||||
}
|
||||
|
||||
func (migration *updateApdexTtl) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -83,8 +79,7 @@ func (migration *updateApdexTtl) Register(migrations *migrate.Migrations) error
|
||||
}
|
||||
|
||||
func (migration *updateApdexTtl) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -161,8 +156,7 @@ func (migration *updateApdexTtl) Up(ctx context.Context, db *bun.DB) error {
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
newTTLStatus := migration.
|
||||
CopyExistingTTLStatusToNewTTLStatus(existingTTLStatus, orgID)
|
||||
newTTLStatus := migration.CopyExistingTTLStatusToNewTTLStatus(existingTTLStatus, orgID)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newTTLStatus).
|
||||
|
||||
@@ -61,12 +61,9 @@ type newPersonalAccessToken struct {
|
||||
}
|
||||
|
||||
func NewUpdateResetPasswordFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_reset_password"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateResetPassword(ctx, ps, c, sqlstore)
|
||||
})
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_reset_password"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateResetPassword(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateResetPassword(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
@@ -74,8 +71,7 @@ func newUpdateResetPassword(_ context.Context, _ factory.ProviderSettings, _ Con
|
||||
}
|
||||
|
||||
func (migration *updateResetPassword) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -83,8 +79,7 @@ func (migration *updateResetPassword) Register(migrations *migrate.Migrations) e
|
||||
}
|
||||
|
||||
func (migration *updateResetPassword) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -104,8 +99,7 @@ func (migration *updateResetPassword) Up(ctx context.Context, db *bun.DB) error
|
||||
}
|
||||
|
||||
if err == nil && len(existingResetPasswordRequests) > 0 {
|
||||
newResetPasswordRequests := migration.
|
||||
CopyExistingResetPasswordRequestsToNewResetPasswordRequests(existingResetPasswordRequests)
|
||||
newResetPasswordRequests := migration.CopyExistingResetPasswordRequestsToNewResetPasswordRequests(existingResetPasswordRequests)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newResetPasswordRequests).
|
||||
@@ -134,8 +128,7 @@ func (migration *updateResetPassword) Up(ctx context.Context, db *bun.DB) error
|
||||
}
|
||||
|
||||
if err == nil && len(existingPersonalAccessTokens) > 0 {
|
||||
newPersonalAccessTokens := migration.
|
||||
CopyExistingPATsToNewPATs(existingPersonalAccessTokens)
|
||||
newPersonalAccessTokens := migration.CopyExistingPATsToNewPATs(existingPersonalAccessTokens)
|
||||
_, err = tx.NewInsert().Model(&newPersonalAccessTokens).Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -26,9 +26,7 @@ func NewUpdateIntegrationsFactory(sqlstore sqlstore.SQLStore) factory.ProviderFa
|
||||
}
|
||||
|
||||
func newUpdateIntegrations(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &updateIntegrations{
|
||||
store: store,
|
||||
}, nil
|
||||
return &updateIntegrations{store: store}, nil
|
||||
}
|
||||
|
||||
func (migration *updateIntegrations) Register(migrations *migrate.Migrations) error {
|
||||
@@ -136,9 +134,7 @@ func (migration *updateIntegrations) Up(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ---
|
||||
// installed integrations
|
||||
// ---
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
@@ -171,9 +167,7 @@ func (migration *updateIntegrations) Up(ctx context.Context, db *bun.DB) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// ---
|
||||
// cloud integrations
|
||||
// ---
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
@@ -213,9 +207,7 @@ func (migration *updateIntegrations) Up(ctx context.Context, db *bun.DB) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// ---
|
||||
// cloud integration service
|
||||
// ---
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
|
||||
@@ -93,12 +93,9 @@ type ruleHistory struct {
|
||||
}
|
||||
|
||||
func NewUpdateRulesFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_rules"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateRules(ctx, ps, c, sqlstore)
|
||||
})
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_rules"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateRules(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateRules(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
@@ -106,8 +103,7 @@ func newUpdateRules(_ context.Context, _ factory.ProviderSettings, _ Config, sto
|
||||
}
|
||||
|
||||
func (migration *updateRules) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -115,8 +111,7 @@ func (migration *updateRules) Register(migrations *migrate.Migrations) error {
|
||||
}
|
||||
|
||||
func (migration *updateRules) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -14,11 +14,9 @@ type updateOrganizations struct {
|
||||
}
|
||||
|
||||
func NewUpdateOrganizationsFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(
|
||||
factory.MustNewName("update_organizations"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateOrganizations(ctx, ps, c, sqlstore)
|
||||
})
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_organizations"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateOrganizations(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateOrganizations(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
@@ -26,8 +24,7 @@ func newUpdateOrganizations(_ context.Context, _ factory.ProviderSettings, _ Con
|
||||
}
|
||||
|
||||
func (migration *updateOrganizations) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -35,8 +32,7 @@ func (migration *updateOrganizations) Register(migrations *migrate.Migrations) e
|
||||
}
|
||||
|
||||
func (migration *updateOrganizations) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -4,10 +4,11 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/jmoiron/sqlx"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
sqlite3 "github.com/mattn/go-sqlite3"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/dialect/sqlitedialect"
|
||||
)
|
||||
@@ -77,3 +78,21 @@ func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
func (provider *provider) RunInTxCtx(ctx context.Context, opts *sql.TxOptions, cb func(ctx context.Context) error) error {
|
||||
return provider.bundb.RunInTxCtx(ctx, opts, cb)
|
||||
}
|
||||
|
||||
func (provider *provider) WrapNotFoundErrf(err error, code errors.Code, format string, args ...any) error {
|
||||
if err == sql.ErrNoRows {
|
||||
return errors.Wrapf(err, errors.TypeNotFound, code, format, args...)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, format string, args ...any) error {
|
||||
if sqlite3Err, ok := err.(sqlite3.Error); ok {
|
||||
if sqlite3Err.ExtendedCode == sqlite3.ErrConstraintUnique {
|
||||
return errors.Wrapf(err, errors.TypeAlreadyExists, code, format, args...)
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -30,6 +31,12 @@ type SQLStore interface {
|
||||
// BunDBCtx returns an instance of bun.IDB for the given context.
|
||||
// If a transaction is present in the context, it will be used. Otherwise, the default will be used.
|
||||
BunDBCtx(ctx context.Context) bun.IDB
|
||||
|
||||
// WrapNotFoundErrf wraps the given error with the given message and returns it.
|
||||
WrapNotFoundErrf(err error, code errors.Code, format string, args ...any) error
|
||||
|
||||
// WrapAlreadyExistsErrf wraps the given error with the given message and returns it.
|
||||
WrapAlreadyExistsErrf(err error, code errors.Code, format string, args ...any) error
|
||||
}
|
||||
|
||||
type SQLStoreHook interface {
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/uptrace/bun"
|
||||
@@ -74,3 +75,11 @@ func (provider *Provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
func (provider *Provider) RunInTxCtx(ctx context.Context, opts *sql.TxOptions, cb func(ctx context.Context) error) error {
|
||||
return cb(ctx)
|
||||
}
|
||||
|
||||
func (provider *Provider) WrapNotFoundErrf(err error, code errors.Code, format string, args ...any) error {
|
||||
return fmt.Errorf(format, args...)
|
||||
}
|
||||
|
||||
func (provider *Provider) WrapAlreadyExistsErrf(err error, code errors.Code, format string, args ...any) error {
|
||||
return fmt.Errorf(format, args...)
|
||||
}
|
||||
|
||||
@@ -4,10 +4,16 @@ import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrOrganizationAlreadyExists = errors.MustNewCode("organization_already_exists")
|
||||
ErrOrganizationNotFound = errors.MustNewCode("organization_not_found")
|
||||
)
|
||||
|
||||
type Organization struct {
|
||||
bun.BaseModel `bun:"table:organizations"`
|
||||
TimeAuditable
|
||||
|
||||
@@ -2,7 +2,6 @@ package preferencetypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -133,7 +132,7 @@ func NewDefaultPreferenceMap() map[string]Preference {
|
||||
}
|
||||
|
||||
func (p *Preference) ErrorValueTypeMismatch() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("the preference value is not of expected type: %s", p.ValueType))
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "the preference value is not of expected type: %s", p.ValueType)
|
||||
}
|
||||
|
||||
func (p *Preference) checkIfInAllowedValues(preferenceValue interface{}) (bool, error) {
|
||||
@@ -219,7 +218,7 @@ func (p *Preference) IsValidValue(preferenceValue interface{}) error {
|
||||
}
|
||||
if !p.IsDiscreteValues {
|
||||
if val < p.Range.Min || val > p.Range.Max {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("the preference value is not in the range specified, min: %v , max:%v", p.Range.Min, p.Range.Max))
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "the preference value is not in the range specified, min: %v , max: %v", p.Range.Min, p.Range.Max)
|
||||
}
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
@@ -248,7 +247,7 @@ func (p *Preference) IsValidValue(preferenceValue interface{}) error {
|
||||
return valueMisMatchErr
|
||||
}
|
||||
if !isInAllowedValues {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("the preference value is not in the list of allowedValues: %v", p.AllowedValues))
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "the preference value is not in the list of allowedValues: %v", p.AllowedValues)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -280,11 +279,11 @@ func (p *Preference) SanitizeValue(preferenceValue interface{}) interface{} {
|
||||
}
|
||||
}
|
||||
|
||||
type PreferenceStore interface {
|
||||
GetOrgPreference(context.Context, string, string) (*StorableOrgPreference, error)
|
||||
GetAllOrgPreferences(context.Context, string) ([]*StorableOrgPreference, error)
|
||||
UpsertOrgPreference(context.Context, *StorableOrgPreference) error
|
||||
GetUserPreference(context.Context, string, string) (*StorableUserPreference, error)
|
||||
GetAllUserPreferences(context.Context, string) ([]*StorableUserPreference, error)
|
||||
UpsertUserPreference(context.Context, *StorableUserPreference) error
|
||||
type Store interface {
|
||||
GetOrg(context.Context, string, string) (*StorableOrgPreference, error)
|
||||
GetAllOrg(context.Context, string) ([]*StorableOrgPreference, error)
|
||||
UpsertOrg(context.Context, *StorableOrgPreference) error
|
||||
GetUser(context.Context, string, string) (*StorableUserPreference, error)
|
||||
GetAllUser(context.Context, string) ([]*StorableUserPreference, error)
|
||||
UpsertUser(context.Context, *StorableUserPreference) error
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user