Compare commits
11 Commits
v0.87.0-af
...
feat/issue
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e8605488e0 | ||
|
|
67f02c6609 | ||
|
|
cadc599643 | ||
|
|
2f74cef1cb | ||
|
|
bfffe424c2 | ||
|
|
3b46c72374 | ||
|
|
9e531ac52d | ||
|
|
78ef1e7896 | ||
|
|
2b647ddcb4 | ||
|
|
5a548d14e0 | ||
|
|
43ca30e23e |
7
.github/CODEOWNERS
vendored
7
.github/CODEOWNERS
vendored
@@ -12,9 +12,4 @@
|
||||
/pkg/factory/ @grandwizard28
|
||||
/pkg/types/ @grandwizard28
|
||||
.golangci.yml @grandwizard28
|
||||
/pkg/zeus/ @vikrantgupta25
|
||||
/pkg/licensing/ @vikrantgupta25
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
/ee/zeus/ @vikrantgupta25
|
||||
/ee/licensing/ @vikrantgupta25
|
||||
/ee/sqlmigration/ @vikrantgupta25
|
||||
**/(zeus|licensing|sqlmigration)/ @vikrantgupta25
|
||||
@@ -224,6 +224,3 @@ statsreporter:
|
||||
enabled: true
|
||||
# The interval at which the stats are collected.
|
||||
interval: 6h
|
||||
collect:
|
||||
# Whether to collect identities and traits (emails).
|
||||
identities: true
|
||||
|
||||
@@ -100,18 +100,12 @@ services:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
# - "9181:9181"
|
||||
|
||||
configs:
|
||||
- source: clickhouse-config
|
||||
target: /etc/clickhouse-server/config.xml
|
||||
- source: clickhouse-users
|
||||
target: /etc/clickhouse-server/users.xml
|
||||
- source: clickhouse-custom-function
|
||||
target: /etc/clickhouse-server/custom-function.xml
|
||||
- source: clickhouse-cluster
|
||||
target: /etc/clickhouse-server/config.d/cluster.xml
|
||||
|
||||
volumes:
|
||||
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
- ../common/clickhouse/cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
- clickhouse:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
@@ -123,10 +117,9 @@ services:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
volumes:
|
||||
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
|
||||
- ../common/dashboards:/root/config/dashboards
|
||||
- sqlite:/var/lib/signoz/
|
||||
configs:
|
||||
- source: signoz-prometheus-config
|
||||
target: /root/config/prometheus.yml
|
||||
environment:
|
||||
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
||||
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
@@ -154,11 +147,9 @@ services:
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
configs:
|
||||
- source: otel-collector-config
|
||||
target: /etc/otel-collector-config.yaml
|
||||
- source: otel-manager-config
|
||||
target: /etc/manager-config.yaml
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
@@ -195,26 +186,3 @@ volumes:
|
||||
name: signoz-sqlite
|
||||
zookeeper-1:
|
||||
name: signoz-zookeeper-1
|
||||
|
||||
configs:
|
||||
clickhouse-config:
|
||||
file: ../common/clickhouse/config.xml
|
||||
clickhouse-users:
|
||||
file: ../common/clickhouse/users.xml
|
||||
clickhouse-custom-function:
|
||||
file: ../common/clickhouse/custom-function.xml
|
||||
clickhouse-cluster:
|
||||
file: ../common/clickhouse/cluster.xml
|
||||
|
||||
signoz-prometheus-config:
|
||||
file: ../common/signoz/prometheus.yml
|
||||
# If you have multiple dashboard files, you can list them individually:
|
||||
# dashboard-foo:
|
||||
# file: ../common/dashboards/foo.json
|
||||
# dashboard-bar:
|
||||
# file: ../common/dashboards/bar.json
|
||||
|
||||
otel-collector-config:
|
||||
file: ./otel-collector-config.yaml
|
||||
otel-manager-config:
|
||||
file: ../common/signoz/otel-collector-opamp-config.yaml
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/handlers"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/db"
|
||||
@@ -106,6 +107,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
)
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
serverOptions.SigNoz.Cache,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
@@ -120,7 +122,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// initiate opamp
|
||||
opAmpModel.InitDB(serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.Instrumentation.Logger(), serverOptions.SigNoz.Modules.OrgGetter)
|
||||
_, err = opAmpModel.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
integrationsController, err := integrations.NewController(serverOptions.SigNoz.SQLStore)
|
||||
if err != nil {
|
||||
@@ -138,8 +143,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
// ingestion pipelines manager
|
||||
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
integrationsController.GetPipelinesForInstalledIntegrations,
|
||||
serverOptions.SigNoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -147,7 +151,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
// initiate agent config handler
|
||||
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
|
||||
Store: serverOptions.SigNoz.SQLStore,
|
||||
DB: serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController},
|
||||
})
|
||||
if err != nil {
|
||||
@@ -442,6 +446,7 @@ func (s *Server) Stop(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func makeRulesManager(
|
||||
db *sqlx.DB,
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
@@ -454,6 +459,7 @@ func makeRulesManager(
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
TelemetryStore: telemetryStore,
|
||||
Prometheus: prometheus,
|
||||
DBConn: db,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
Reader: ch,
|
||||
|
||||
@@ -17,21 +17,19 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
UserNoCascade = "user_no_cascade"
|
||||
FactorPassword = "factor_password"
|
||||
CloudIntegration = "cloud_integration"
|
||||
AgentConfigVersion = "agent_config_version"
|
||||
Org = "org"
|
||||
User = "user"
|
||||
UserNoCascade = "user_no_cascade"
|
||||
FactorPassword = "factor_password"
|
||||
CloudIntegration = "cloud_integration"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
|
||||
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
AgentConfigVersionReference = `("version_id") REFERENCES "agent_config_version" ("id")`
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
|
||||
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
)
|
||||
|
||||
type dialect struct{}
|
||||
@@ -276,8 +274,6 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
fkReferences = append(fkReferences, FactorPasswordReference)
|
||||
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||
} else if reference == AgentConfigVersion && !slices.Contains(fkReferences, AgentConfigVersionReference) {
|
||||
fkReferences = append(fkReferences, AgentConfigVersionReference)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/jackc/pgx/v5/pgconn"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/jackc/pgx/v5/stdlib"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
)
|
||||
@@ -18,6 +19,7 @@ type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
sqlxdb *sqlx.DB
|
||||
dialect *dialect
|
||||
}
|
||||
|
||||
@@ -59,6 +61,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
|
||||
sqlxdb: sqlx.NewDb(sqldb, "postgres"),
|
||||
dialect: new(dialect),
|
||||
}, nil
|
||||
}
|
||||
@@ -71,6 +74,10 @@ func (provider *provider) SQLDB() *sql.DB {
|
||||
return provider.sqldb
|
||||
}
|
||||
|
||||
func (provider *provider) SQLxDB() *sqlx.DB {
|
||||
return provider.sqlxdb
|
||||
}
|
||||
|
||||
func (provider *provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
@@ -129,6 +129,5 @@
|
||||
"text_num_points": "data points in each result group",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query",
|
||||
"alert_rule_not_found": "Alert Rule not found"
|
||||
"selected_query_placeholder": "Select query"
|
||||
}
|
||||
|
||||
@@ -126,8 +126,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
|
||||
const isRouteEnabledForWorkspaceBlockedState =
|
||||
isAdmin &&
|
||||
(path === ROUTES.SETTINGS ||
|
||||
path === ROUTES.ORG_SETTINGS ||
|
||||
(path === ROUTES.ORG_SETTINGS ||
|
||||
path === ROUTES.BILLING ||
|
||||
path === ROUTES.MY_SETTINGS);
|
||||
|
||||
|
||||
@@ -131,6 +131,10 @@ export const CreateAlertChannelAlerts = Loadable(
|
||||
() => import(/* webpackChunkName: "Create Channels" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const EditAlertChannelsAlerts = Loadable(
|
||||
() => import(/* webpackChunkName: "Edit Channels" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const AllAlertChannels = Loadable(
|
||||
() => import(/* webpackChunkName: "All Channels" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
CreateNewAlerts,
|
||||
DashboardPage,
|
||||
DashboardWidget,
|
||||
EditAlertChannelsAlerts,
|
||||
EditRulesPage,
|
||||
ErrorDetails,
|
||||
Home,
|
||||
@@ -252,6 +253,13 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'CHANNELS_NEW',
|
||||
},
|
||||
{
|
||||
path: ROUTES.CHANNELS_EDIT,
|
||||
exact: true,
|
||||
component: EditAlertChannelsAlerts,
|
||||
isPrivate: true,
|
||||
key: 'CHANNELS_EDIT',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ALL_CHANNELS,
|
||||
exact: true,
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import axios, { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
|
||||
|
||||
const getChangelogByVersion = async (
|
||||
versionId: string,
|
||||
): Promise<SuccessResponse<ChangelogSchema> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(`
|
||||
https://cms.signoz.cloud/api/release-changelogs?filters[version][$eq]=${versionId}&populate[features][sort]=sort_order:asc&populate[features][populate][media][fields]=id,ext,url,mime,alternativeText
|
||||
`);
|
||||
|
||||
if (!Array.isArray(response.data.data) || response.data.data.length === 0) {
|
||||
throw new Error('No changelog found!');
|
||||
}
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.statusText,
|
||||
payload: response.data.data[0],
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default getChangelogByVersion;
|
||||
@@ -119,7 +119,6 @@ export const updateFunnelSteps = async (
|
||||
export interface ValidateFunnelPayload {
|
||||
start_time: number;
|
||||
end_time: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface ValidateFunnelResponse {
|
||||
@@ -133,11 +132,12 @@ export interface ValidateFunnelResponse {
|
||||
}
|
||||
|
||||
export const validateFunnelSteps = async (
|
||||
funnelId: string,
|
||||
payload: ValidateFunnelPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<ValidateFunnelResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/validate`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/validate`,
|
||||
payload,
|
||||
{ signal },
|
||||
);
|
||||
@@ -185,7 +185,6 @@ export interface FunnelOverviewPayload {
|
||||
end_time: number;
|
||||
step_start?: number;
|
||||
step_end?: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface FunnelOverviewResponse {
|
||||
@@ -203,11 +202,12 @@ export interface FunnelOverviewResponse {
|
||||
}
|
||||
|
||||
export const getFunnelOverview = async (
|
||||
funnelId: string,
|
||||
payload: FunnelOverviewPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<FunnelOverviewResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/overview`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/overview`,
|
||||
payload,
|
||||
{
|
||||
signal,
|
||||
@@ -235,11 +235,12 @@ export interface SlowTraceData {
|
||||
}
|
||||
|
||||
export const getFunnelSlowTraces = async (
|
||||
funnelId: string,
|
||||
payload: FunnelOverviewPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<SlowTraceData> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/slow-traces`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/slow-traces`,
|
||||
payload,
|
||||
{
|
||||
signal,
|
||||
@@ -272,7 +273,7 @@ export const getFunnelErrorTraces = async (
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<ErrorTraceData> | ErrorResponse> => {
|
||||
const response: AxiosResponse = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/error-traces`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/error-traces`,
|
||||
payload,
|
||||
{
|
||||
signal,
|
||||
@@ -290,7 +291,6 @@ export const getFunnelErrorTraces = async (
|
||||
export interface FunnelStepsPayload {
|
||||
start_time: number;
|
||||
end_time: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface FunnelStepGraphMetrics {
|
||||
@@ -307,11 +307,12 @@ export interface FunnelStepsResponse {
|
||||
}
|
||||
|
||||
export const getFunnelSteps = async (
|
||||
funnelId: string,
|
||||
payload: FunnelStepsPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<FunnelStepsResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/steps`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps`,
|
||||
payload,
|
||||
{ signal },
|
||||
);
|
||||
@@ -329,7 +330,6 @@ export interface FunnelStepsOverviewPayload {
|
||||
end_time: number;
|
||||
step_start?: number;
|
||||
step_end?: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface FunnelStepsOverviewResponse {
|
||||
@@ -341,11 +341,12 @@ export interface FunnelStepsOverviewResponse {
|
||||
}
|
||||
|
||||
export const getFunnelStepsOverview = async (
|
||||
funnelId: string,
|
||||
payload: FunnelStepsOverviewPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<FunnelStepsOverviewResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/steps/overview`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps/overview`,
|
||||
payload,
|
||||
{ signal },
|
||||
);
|
||||
|
||||
@@ -1,161 +0,0 @@
|
||||
.changelog-modal {
|
||||
.ant-modal-content {
|
||||
padding: unset;
|
||||
background-color: var(--bg-ink-400, #121317);
|
||||
|
||||
.ant-modal-header {
|
||||
margin-bottom: unset;
|
||||
}
|
||||
|
||||
.ant-modal-footer {
|
||||
margin-top: unset;
|
||||
}
|
||||
}
|
||||
|
||||
&-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
background-color: var(--bg-ink-400, #121317);
|
||||
padding: 16px;
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
color: var(--text-vanilla-100, #fff);
|
||||
border-bottom: 1px solid var(--bg-slate-500, #161922);
|
||||
}
|
||||
|
||||
&-footer.scroll-available {
|
||||
.scroll-btn-container {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
|
||||
&-footer {
|
||||
position: relative;
|
||||
border: 1px solid var(--bg-slate-500, #161922);
|
||||
padding: 12px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
|
||||
&-label {
|
||||
color: var(--text-robin-400, #7190f9);
|
||||
font-size: 14px;
|
||||
line-height: 24px;
|
||||
position: relative;
|
||||
padding-left: 14px;
|
||||
|
||||
&::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 0;
|
||||
transform: translateY(-50%);
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
border-radius: 100%;
|
||||
background-color: var(--bg-robin-500, #7190f9);
|
||||
}
|
||||
}
|
||||
|
||||
&-ctas {
|
||||
display: flex;
|
||||
|
||||
& svg {
|
||||
font-size: 14px;
|
||||
}
|
||||
}
|
||||
|
||||
.scroll-btn-container {
|
||||
display: none;
|
||||
position: absolute;
|
||||
top: -40px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
|
||||
.scroll-btn {
|
||||
all: unset;
|
||||
padding: 4px 12px 4px 10px;
|
||||
background-color: var(--bg-slate-400, #1d212d);
|
||||
border-radius: 20px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 4px;
|
||||
transition: background-color 0.1s;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--bg-slate-200, #2c3140);
|
||||
}
|
||||
|
||||
&:active {
|
||||
background-color: var(--bg-slate-600, #1c1f2a);
|
||||
}
|
||||
|
||||
span {
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
color: var(--text-vanilla-400, #c0c1c3);
|
||||
}
|
||||
|
||||
// add animation to the chevrons down icon
|
||||
svg {
|
||||
animation: pulse 1s infinite;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&-content {
|
||||
max-height: calc(100vh - 300px);
|
||||
overflow-y: auto;
|
||||
padding: 16px;
|
||||
border: 1px solid var(--bg-slate-500, #161922);
|
||||
border-top-width: 0;
|
||||
border-bottom-width: 0;
|
||||
}
|
||||
}
|
||||
|
||||
// pulse for the scroll for more icon
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
opacity: 1;
|
||||
}
|
||||
50% {
|
||||
opacity: 0.5;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.changelog-modal {
|
||||
.ant-modal-content {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
border-color: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
&-title {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-500);
|
||||
border-color: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
&-content {
|
||||
border-color: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
&-footer {
|
||||
border-color: var(--bg-vanilla-300);
|
||||
|
||||
.scroll-btn-container {
|
||||
.scroll-btn {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
|
||||
span {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
import './ChangelogModal.styles.scss';
|
||||
|
||||
import { CheckOutlined, CloseOutlined } from '@ant-design/icons';
|
||||
import { Button, Modal } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import dayjs from 'dayjs';
|
||||
import { ChevronsDown, ScrollText } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
|
||||
import ChangelogRenderer from './components/ChangelogRenderer';
|
||||
|
||||
interface Props {
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
function ChangelogModal({ onClose }: Props): JSX.Element {
|
||||
const [hasScroll, setHasScroll] = useState(false);
|
||||
const changelogContentSectionRef = useRef<HTMLDivElement>(null);
|
||||
const { changelog } = useAppContext();
|
||||
|
||||
const formattedReleaseDate = dayjs(changelog?.release_date).format(
|
||||
'MMMM D, YYYY',
|
||||
);
|
||||
|
||||
const checkScroll = useCallback((): void => {
|
||||
if (changelogContentSectionRef.current) {
|
||||
const {
|
||||
scrollHeight,
|
||||
clientHeight,
|
||||
scrollTop,
|
||||
} = changelogContentSectionRef.current;
|
||||
const isAtBottom = scrollHeight - clientHeight - scrollTop <= 8;
|
||||
setHasScroll(scrollHeight > clientHeight + 24 && !isAtBottom); // 24px - buffer height to show show more
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
checkScroll();
|
||||
const changelogContentSection = changelogContentSectionRef.current;
|
||||
|
||||
if (changelogContentSection) {
|
||||
changelogContentSection.addEventListener('scroll', checkScroll);
|
||||
}
|
||||
|
||||
return (): void => {
|
||||
if (changelogContentSection) {
|
||||
changelogContentSection.removeEventListener('scroll', checkScroll);
|
||||
}
|
||||
};
|
||||
}, [checkScroll]);
|
||||
|
||||
const onClickUpdateWorkspace = (): void => {
|
||||
window.open(
|
||||
'https://github.com/SigNoz/signoz/releases',
|
||||
'_blank',
|
||||
'noopener,noreferrer',
|
||||
);
|
||||
};
|
||||
|
||||
const onClickScrollForMore = (): void => {
|
||||
if (changelogContentSectionRef.current) {
|
||||
changelogContentSectionRef.current.scrollTo({
|
||||
top: changelogContentSectionRef.current.scrollTop + 600, // Scroll 600px from the current position
|
||||
behavior: 'smooth',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
className={cx('changelog-modal')}
|
||||
title={
|
||||
<div className="changelog-modal-title">
|
||||
<ScrollText size={16} />
|
||||
What’s New ⎯ Changelog : {formattedReleaseDate}
|
||||
</div>
|
||||
}
|
||||
width={820}
|
||||
open
|
||||
onCancel={onClose}
|
||||
footer={
|
||||
<div
|
||||
className={cx('changelog-modal-footer', hasScroll && 'scroll-available')}
|
||||
>
|
||||
{changelog?.features && changelog.features.length > 0 && (
|
||||
<span className="changelog-modal-footer-label">
|
||||
{changelog.features.length} new
|
||||
{changelog.features.length > 1 ? 'features' : 'feature'}
|
||||
</span>
|
||||
)}
|
||||
<div className="changelog-modal-footer-ctas">
|
||||
<Button type="default" icon={<CloseOutlined />} onClick={onClose}>
|
||||
Skip for now
|
||||
</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<CheckOutlined />}
|
||||
onClick={onClickUpdateWorkspace}
|
||||
>
|
||||
Update my workspace
|
||||
</Button>
|
||||
</div>
|
||||
{changelog && (
|
||||
<div className="scroll-btn-container">
|
||||
<button
|
||||
data-testid="scroll-more-btn"
|
||||
type="button"
|
||||
className="scroll-btn"
|
||||
onClick={onClickScrollForMore}
|
||||
>
|
||||
<ChevronsDown size={14} />
|
||||
<span>Scroll for more</span>
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div
|
||||
className="changelog-modal-content"
|
||||
data-testid="changelog-content"
|
||||
ref={changelogContentSectionRef}
|
||||
>
|
||||
{changelog && <ChangelogRenderer changelog={changelog} />}
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
|
||||
export default ChangelogModal;
|
||||
@@ -1,79 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
/* eslint-disable @typescript-eslint/explicit-function-return-type */
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
|
||||
import ChangelogModal from '../ChangelogModal';
|
||||
|
||||
const mockChangelog = {
|
||||
release_date: '2025-06-10',
|
||||
features: [
|
||||
{
|
||||
id: 1,
|
||||
title: 'Feature 1',
|
||||
description: 'Description for feature 1',
|
||||
media: null,
|
||||
},
|
||||
],
|
||||
bug_fixes: 'Bug fix details',
|
||||
maintenance: 'Maintenance details',
|
||||
};
|
||||
|
||||
// Mock react-markdown to just render children as plain text
|
||||
jest.mock(
|
||||
'react-markdown',
|
||||
() =>
|
||||
function ReactMarkdown({ children }: any) {
|
||||
return <div>{children}</div>;
|
||||
},
|
||||
);
|
||||
|
||||
// mock useAppContext
|
||||
jest.mock('providers/App/App', () => ({
|
||||
useAppContext: jest.fn(() => ({ changelog: mockChangelog })),
|
||||
}));
|
||||
|
||||
describe('ChangelogModal', () => {
|
||||
it('renders modal with changelog data', () => {
|
||||
render(<ChangelogModal onClose={jest.fn()} />);
|
||||
expect(
|
||||
screen.getByText('What’s New ⎯ Changelog : June 10, 2025'),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText('Feature 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Description for feature 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Bug fix details')).toBeInTheDocument();
|
||||
expect(screen.getByText('Maintenance details')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls onClose when Skip for now is clicked', () => {
|
||||
const onClose = jest.fn();
|
||||
render(<ChangelogModal onClose={onClose} />);
|
||||
fireEvent.click(screen.getByText('Skip for now'));
|
||||
expect(onClose).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('opens migration docs when Update my workspace is clicked', () => {
|
||||
window.open = jest.fn();
|
||||
render(<ChangelogModal onClose={jest.fn()} />);
|
||||
fireEvent.click(screen.getByText('Update my workspace'));
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
'https://github.com/SigNoz/signoz/releases',
|
||||
'_blank',
|
||||
'noopener,noreferrer',
|
||||
);
|
||||
});
|
||||
|
||||
it('scrolls for more when Scroll for more is clicked', () => {
|
||||
render(<ChangelogModal onClose={jest.fn()} />);
|
||||
const scrollBtn = screen.getByTestId('scroll-more-btn');
|
||||
const contentDiv = screen.getByTestId('changelog-content');
|
||||
if (contentDiv) {
|
||||
contentDiv.scrollTo = jest.fn();
|
||||
}
|
||||
fireEvent.click(scrollBtn);
|
||||
if (contentDiv) {
|
||||
expect(contentDiv.scrollTo).toHaveBeenCalled();
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1,63 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
/* eslint-disable @typescript-eslint/explicit-function-return-type */
|
||||
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import ChangelogRenderer from '../components/ChangelogRenderer';
|
||||
|
||||
// Mock react-markdown to just render children as plain text
|
||||
jest.mock(
|
||||
'react-markdown',
|
||||
() =>
|
||||
function ReactMarkdown({ children }: any) {
|
||||
return <div>{children}</div>;
|
||||
},
|
||||
);
|
||||
|
||||
const mockChangelog = {
|
||||
id: 1,
|
||||
documentId: 'changelog-doc-1',
|
||||
version: '1.0.0',
|
||||
createdAt: '2025-06-09T12:00:00Z',
|
||||
release_date: '2025-06-10',
|
||||
features: [
|
||||
{
|
||||
id: 1,
|
||||
documentId: '1',
|
||||
title: 'Feature 1',
|
||||
description: 'Description for feature 1',
|
||||
sort_order: 1,
|
||||
createdAt: '',
|
||||
updatedAt: '',
|
||||
publishedAt: '',
|
||||
deployment_type: 'All',
|
||||
media: {
|
||||
id: 1,
|
||||
documentId: 'doc1',
|
||||
ext: '.webp',
|
||||
url: '/uploads/feature1.webp',
|
||||
mime: 'image/webp',
|
||||
alternativeText: null,
|
||||
},
|
||||
},
|
||||
],
|
||||
bug_fixes: 'Bug fix details',
|
||||
updatedAt: '2025-06-09T12:00:00Z',
|
||||
publishedAt: '2025-06-09T12:00:00Z',
|
||||
maintenance: 'Maintenance details',
|
||||
};
|
||||
|
||||
describe('ChangelogRenderer', () => {
|
||||
it('renders release date', () => {
|
||||
render(<ChangelogRenderer changelog={mockChangelog} />);
|
||||
expect(screen.getByText('June 10, 2025')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders features, media, and description', () => {
|
||||
render(<ChangelogRenderer changelog={mockChangelog} />);
|
||||
expect(screen.getByText('Feature 1')).toBeInTheDocument();
|
||||
expect(screen.getByAltText('Media')).toBeInTheDocument();
|
||||
expect(screen.getByText('Description for feature 1')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,136 +0,0 @@
|
||||
.changelog-renderer {
|
||||
position: relative;
|
||||
padding-left: 20px;
|
||||
|
||||
.changelog-release-date {
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
color: var(--text-vanilla-400, #c0c1c3);
|
||||
}
|
||||
|
||||
&-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 28px;
|
||||
}
|
||||
|
||||
&-line {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 6px;
|
||||
bottom: -30px;
|
||||
width: 1px;
|
||||
background-color: var(--bg-slate-400, #1d212d);
|
||||
|
||||
.inner-ball {
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
border-radius: 100%;
|
||||
transform: translateX(-50%);
|
||||
background-color: var(--bg-robin-500, #7190f9);
|
||||
}
|
||||
}
|
||||
|
||||
ul,
|
||||
ol {
|
||||
list-style: none;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
padding-left: 30px;
|
||||
|
||||
li {
|
||||
position: relative;
|
||||
&::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: -10px;
|
||||
top: 10px;
|
||||
width: 20px;
|
||||
height: 2px;
|
||||
background-color: var(--bg-robin-500, #7190f9);
|
||||
transform: translate(-100%, -50%);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
li,
|
||||
p {
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
color: var(--text-vanilla-400, #c0c1c3);
|
||||
}
|
||||
|
||||
code {
|
||||
padding: 2px 4px;
|
||||
background-color: var(--bg-slate-500, #161922);
|
||||
border-radius: 6px;
|
||||
font-size: 95%;
|
||||
vertical-align: middle;
|
||||
border: 1px solid var(--bg-slate-600, #1c1f2a);
|
||||
}
|
||||
a {
|
||||
color: var(--text-robin-500, #7190f9);
|
||||
font-weight: 600;
|
||||
text-decoration: underline;
|
||||
|
||||
&:hover {
|
||||
text-decoration: none;
|
||||
}
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
font-weight: 600;
|
||||
color: var(--text-vanilla-100, #fff);
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 24px;
|
||||
line-height: 32px;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 20px;
|
||||
line-height: 28px;
|
||||
}
|
||||
|
||||
.changelog-media-image {
|
||||
height: auto;
|
||||
width: 100%;
|
||||
overflow: hidden;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400, #1d212d);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.changelog-renderer {
|
||||
.changelog-release-date {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
|
||||
&-line {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
}
|
||||
li,
|
||||
p {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,89 +0,0 @@
|
||||
import './ChangelogRenderer.styles.scss';
|
||||
|
||||
import dayjs from 'dayjs';
|
||||
import ReactMarkdown from 'react-markdown';
|
||||
import {
|
||||
ChangelogSchema,
|
||||
Media,
|
||||
SupportedImageTypes,
|
||||
SupportedVideoTypes,
|
||||
} from 'types/api/changelog/getChangelogByVersion';
|
||||
|
||||
interface Props {
|
||||
changelog: ChangelogSchema;
|
||||
}
|
||||
|
||||
function renderMedia(media: Media): JSX.Element | null {
|
||||
if (SupportedImageTypes.includes(media.ext)) {
|
||||
return (
|
||||
<img
|
||||
src={media.url}
|
||||
alt={media.alternativeText || 'Media'}
|
||||
width={800}
|
||||
height={450}
|
||||
className="changelog-media-image"
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (SupportedVideoTypes.includes(media.ext)) {
|
||||
return (
|
||||
<video
|
||||
autoPlay
|
||||
controls
|
||||
controlsList="nodownload noplaybackrate"
|
||||
loop
|
||||
className="my-3 h-auto w-full rounded"
|
||||
>
|
||||
<source src={media.url} type={media.mime} />
|
||||
<track kind="captions" src="" label="No captions available" default />
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function ChangelogRenderer({ changelog }: Props): JSX.Element {
|
||||
const formattedReleaseDate = dayjs(changelog.release_date).format(
|
||||
'MMMM D, YYYY',
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="changelog-renderer">
|
||||
<div className="changelog-renderer-line">
|
||||
<div className="inner-ball" />
|
||||
</div>
|
||||
<span className="changelog-release-date">{formattedReleaseDate}</span>
|
||||
{changelog.features && changelog.features.length > 0 && (
|
||||
<div className="changelog-renderer-list flex flex-col gap-7">
|
||||
{changelog.features.map((feature) => (
|
||||
<div key={feature.id}>
|
||||
<h2>{feature.title}</h2>
|
||||
{feature.media && renderMedia(feature.media)}
|
||||
<ReactMarkdown>{feature.description}</ReactMarkdown>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{changelog.bug_fixes && changelog.bug_fixes.length > 0 && (
|
||||
<div>
|
||||
<h2>Bug Fixes</h2>
|
||||
{changelog.bug_fixes && (
|
||||
<ReactMarkdown>{changelog.bug_fixes}</ReactMarkdown>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{changelog.maintenance && changelog.maintenance.length > 0 && (
|
||||
<div>
|
||||
<h2>Maintenance</h2>
|
||||
{changelog.maintenance && (
|
||||
<ReactMarkdown>{changelog.maintenance}</ReactMarkdown>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ChangelogRenderer;
|
||||
@@ -30,5 +30,5 @@ export enum LOCALSTORAGE {
|
||||
SHOW_EXCEPTIONS_QUICK_FILTERS = 'SHOW_EXCEPTIONS_QUICK_FILTERS',
|
||||
BANNER_DISMISSED = 'BANNER_DISMISSED',
|
||||
QUICK_FILTERS_SETTINGS_ANNOUNCEMENT = 'QUICK_FILTERS_SETTINGS_ANNOUNCEMENT',
|
||||
FUNNEL_STEPS = 'FUNNEL_STEPS',
|
||||
UNEXECUTED_FUNNELS = 'UNEXECUTED_FUNNELS',
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ const ROUTES = {
|
||||
ALERT_OVERVIEW: '/alerts/overview',
|
||||
ALL_CHANNELS: '/settings/channels',
|
||||
CHANNELS_NEW: '/settings/channels/new',
|
||||
CHANNELS_EDIT: '/settings/channels/edit/:channelId',
|
||||
CHANNELS_EDIT: '/settings/channels/edit/:id',
|
||||
ALL_ERROR: '/exceptions',
|
||||
ERROR_DETAIL: '/error-detail',
|
||||
VERSION: '/status',
|
||||
@@ -62,10 +62,8 @@ const ROUTES = {
|
||||
WORKSPACE_SUSPENDED: '/workspace-suspended',
|
||||
SHORTCUTS: '/settings/shortcuts',
|
||||
INTEGRATIONS: '/integrations',
|
||||
MESSAGING_QUEUES_BASE: '/messaging-queues',
|
||||
MESSAGING_QUEUES_KAFKA: '/messaging-queues/kafka',
|
||||
MESSAGING_QUEUES_KAFKA_DETAIL: '/messaging-queues/kafka/detail',
|
||||
INFRASTRUCTURE_MONITORING_BASE: '/infrastructure-monitoring',
|
||||
INFRASTRUCTURE_MONITORING_HOSTS: '/infrastructure-monitoring/hosts',
|
||||
INFRASTRUCTURE_MONITORING_KUBERNETES: '/infrastructure-monitoring/kubernetes',
|
||||
MESSAGING_QUEUES_CELERY_TASK: '/messaging-queues/celery-task',
|
||||
@@ -73,7 +71,6 @@ const ROUTES = {
|
||||
METRICS_EXPLORER: '/metrics-explorer/summary',
|
||||
METRICS_EXPLORER_EXPLORER: '/metrics-explorer/explorer',
|
||||
METRICS_EXPLORER_VIEWS: '/metrics-explorer/views',
|
||||
API_MONITORING_BASE: '/api-monitoring',
|
||||
API_MONITORING: '/api-monitoring/explorer',
|
||||
METRICS_EXPLORER_BASE: '/metrics-explorer',
|
||||
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',
|
||||
|
||||
@@ -23,7 +23,7 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
|
||||
const onClickEditHandler = useCallback((id: string) => {
|
||||
history.push(
|
||||
generatePath(ROUTES.CHANNELS_EDIT, {
|
||||
channelId: id,
|
||||
id,
|
||||
}),
|
||||
);
|
||||
}, []);
|
||||
|
||||
@@ -1,34 +1,8 @@
|
||||
.app-banner-container {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.app-layout {
|
||||
position: relative;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
|
||||
&.isWorkspaceRestricted {
|
||||
height: calc(100% - 32px);
|
||||
|
||||
// same styles as its either trial expired or payment failed
|
||||
&.isTrialExpired {
|
||||
height: calc(100% - 64px);
|
||||
}
|
||||
|
||||
&.isPaymentFailed {
|
||||
height: calc(100% - 64px);
|
||||
}
|
||||
}
|
||||
|
||||
&.isTrialExpired {
|
||||
height: calc(100% - 32px);
|
||||
}
|
||||
|
||||
&.isPaymentFailed {
|
||||
height: calc(100% - 32px);
|
||||
}
|
||||
|
||||
.app-content {
|
||||
width: calc(100% - 64px); // width of the sidebar
|
||||
z-index: 0;
|
||||
@@ -189,9 +163,3 @@
|
||||
text-underline-offset: 2px;
|
||||
}
|
||||
}
|
||||
|
||||
.workspace-restricted-banner,
|
||||
.trial-expiry-banner,
|
||||
.payment-failed-banner {
|
||||
height: 32px;
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import * as Sentry from '@sentry/react';
|
||||
import { Flex } from 'antd';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import getChangelogByVersion from 'api/changelog/getChangelogByVersion';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import manageCreditCardApi from 'api/v1/portal/create';
|
||||
import getUserLatestVersion from 'api/v1/version/getLatestVersion';
|
||||
@@ -41,10 +40,9 @@ import {
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useMutation, useQueries } from 'react-query';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Dispatch } from 'redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import {
|
||||
UPDATE_CURRENT_ERROR,
|
||||
@@ -52,18 +50,15 @@ import {
|
||||
UPDATE_LATEST_VERSION,
|
||||
UPDATE_LATEST_VERSION_ERROR,
|
||||
} from 'types/actions/app';
|
||||
import { ErrorResponse, SuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout';
|
||||
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
|
||||
import APIError from 'types/api/error';
|
||||
import {
|
||||
LicenseEvent,
|
||||
LicensePlatform,
|
||||
LicenseState,
|
||||
} from 'types/api/licensesV3/getActive';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { checkVersionState } from 'utils/app';
|
||||
import { eventEmitter } from 'utils/getEventEmitter';
|
||||
import {
|
||||
getFormattedDate,
|
||||
@@ -86,7 +81,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
isFetchingFeatureFlags,
|
||||
featureFlagsFetchError,
|
||||
userPreferences,
|
||||
updateChangelog,
|
||||
} = useAppContext();
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
@@ -98,15 +92,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
|
||||
const [showSlowApiWarning, setShowSlowApiWarning] = useState(false);
|
||||
const [slowApiWarningShown, setSlowApiWarningShown] = useState(false);
|
||||
const [shouldFetchChangelog, setShouldFetchChangelog] = useState<boolean>(
|
||||
false,
|
||||
);
|
||||
|
||||
const { currentVersion, latestVersion } = useSelector<AppState, AppReducer>(
|
||||
(state) => state.app,
|
||||
);
|
||||
|
||||
const isLatestVersion = checkVersionState(currentVersion, latestVersion);
|
||||
|
||||
const handleBillingOnSuccess = (
|
||||
data: SuccessResponseV2<CheckoutSuccessPayloadProps>,
|
||||
@@ -144,11 +129,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
|
||||
const { isCloudUser: isCloudUserVal } = useGetTenantLicense();
|
||||
|
||||
const [
|
||||
getUserVersionResponse,
|
||||
getUserLatestVersionResponse,
|
||||
getChangelogByVersionResponse,
|
||||
] = useQueries([
|
||||
const [getUserVersionResponse, getUserLatestVersionResponse] = useQueries([
|
||||
{
|
||||
queryFn: getUserVersion,
|
||||
queryKey: ['getUserVersion', user?.accessJwt],
|
||||
@@ -159,12 +140,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
queryKey: ['getUserLatestVersion', user?.accessJwt],
|
||||
enabled: isLoggedIn,
|
||||
},
|
||||
{
|
||||
queryFn: (): Promise<SuccessResponse<ChangelogSchema> | ErrorResponse> =>
|
||||
getChangelogByVersion(latestVersion),
|
||||
queryKey: ['getChangelogByVersion', latestVersion],
|
||||
enabled: isLoggedIn && !isCloudUserVal && shouldFetchChangelog,
|
||||
},
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -267,30 +242,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
notifications,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isLatestVersion) {
|
||||
setShouldFetchChangelog(true);
|
||||
}
|
||||
}, [isLatestVersion]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
getChangelogByVersionResponse.isFetched &&
|
||||
getChangelogByVersionResponse.isSuccess &&
|
||||
getChangelogByVersionResponse.data &&
|
||||
getChangelogByVersionResponse.data.payload
|
||||
) {
|
||||
updateChangelog(getChangelogByVersionResponse.data.payload);
|
||||
}
|
||||
}, [
|
||||
updateChangelog,
|
||||
getChangelogByVersionResponse.isFetched,
|
||||
getChangelogByVersionResponse.isLoading,
|
||||
getChangelogByVersionResponse.isError,
|
||||
getChangelogByVersionResponse.data,
|
||||
getChangelogByVersionResponse.isSuccess,
|
||||
]);
|
||||
|
||||
const isToDisplayLayout = isLoggedIn;
|
||||
|
||||
const routeKey = useMemo(() => getRouteKey(pathname), [pathname]);
|
||||
@@ -600,63 +551,53 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
(preference) => preference.name === USER_PREFERENCES.SIDENAV_PINNED,
|
||||
)?.value as boolean;
|
||||
|
||||
const SHOW_TRIAL_EXPIRY_BANNER =
|
||||
showTrialExpiryBanner && !showPaymentFailedWarning;
|
||||
const SHOW_WORKSPACE_RESTRICTED_BANNER = showWorkspaceRestricted;
|
||||
const SHOW_PAYMENT_FAILED_BANNER =
|
||||
!showTrialExpiryBanner && showPaymentFailedWarning;
|
||||
|
||||
return (
|
||||
<Layout className={cx(isDarkMode ? 'darkMode dark' : 'lightMode')}>
|
||||
<Helmet>
|
||||
<title>{pageTitle}</title>
|
||||
</Helmet>
|
||||
|
||||
{isLoggedIn && (
|
||||
<div className={cx('app-banner-container')}>
|
||||
{SHOW_TRIAL_EXPIRY_BANNER && (
|
||||
<div className="trial-expiry-banner">
|
||||
You are in free trial period. Your free trial will end on{' '}
|
||||
<span>{getFormattedDate(trialInfo?.trialEnd || Date.now())}.</span>
|
||||
{user.role === USER_ROLES.ADMIN ? (
|
||||
<span>
|
||||
{' '}
|
||||
Please{' '}
|
||||
<a className="upgrade-link" onClick={handleUpgrade}>
|
||||
upgrade
|
||||
</a>
|
||||
to continue using SigNoz features.
|
||||
</span>
|
||||
) : (
|
||||
'Please contact your administrator for upgrading to a paid plan.'
|
||||
)}
|
||||
</div>
|
||||
{showTrialExpiryBanner && !showPaymentFailedWarning && (
|
||||
<div className="trial-expiry-banner">
|
||||
You are in free trial period. Your free trial will end on{' '}
|
||||
<span>{getFormattedDate(trialInfo?.trialEnd || Date.now())}.</span>
|
||||
{user.role === USER_ROLES.ADMIN ? (
|
||||
<span>
|
||||
{' '}
|
||||
Please{' '}
|
||||
<a className="upgrade-link" onClick={handleUpgrade}>
|
||||
upgrade
|
||||
</a>
|
||||
to continue using SigNoz features.
|
||||
</span>
|
||||
) : (
|
||||
'Please contact your administrator for upgrading to a paid plan.'
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{SHOW_WORKSPACE_RESTRICTED_BANNER && renderWorkspaceRestrictedBanner()}
|
||||
{showWorkspaceRestricted && renderWorkspaceRestrictedBanner()}
|
||||
|
||||
{SHOW_PAYMENT_FAILED_BANNER && (
|
||||
<div className="payment-failed-banner">
|
||||
Your bill payment has failed. Your workspace will get suspended on{' '}
|
||||
<span>
|
||||
{getFormattedDateWithMinutes(
|
||||
dayjs(activeLicense?.event_queue?.scheduled_at).unix() || Date.now(),
|
||||
)}
|
||||
.
|
||||
</span>
|
||||
{user.role === USER_ROLES.ADMIN ? (
|
||||
<span>
|
||||
{' '}
|
||||
Please{' '}
|
||||
<a className="upgrade-link" onClick={handleFailedPayment}>
|
||||
pay the bill
|
||||
</a>
|
||||
to continue using SigNoz features.
|
||||
</span>
|
||||
) : (
|
||||
' Please contact your administrator to pay the bill.'
|
||||
)}
|
||||
</div>
|
||||
{!showTrialExpiryBanner && showPaymentFailedWarning && (
|
||||
<div className="payment-failed-banner">
|
||||
Your bill payment has failed. Your workspace will get suspended on{' '}
|
||||
<span>
|
||||
{getFormattedDateWithMinutes(
|
||||
dayjs(activeLicense?.event_queue?.scheduled_at).unix() || Date.now(),
|
||||
)}
|
||||
.
|
||||
</span>
|
||||
{user.role === USER_ROLES.ADMIN ? (
|
||||
<span>
|
||||
{' '}
|
||||
Please{' '}
|
||||
<a className="upgrade-link" onClick={handleFailedPayment}>
|
||||
pay the bill
|
||||
</a>
|
||||
to continue using SigNoz features.
|
||||
</span>
|
||||
) : (
|
||||
' Please contact your administrator to pay the bill.'
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
@@ -666,9 +607,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
'app-layout',
|
||||
isDarkMode ? 'darkMode dark' : 'lightMode',
|
||||
sideNavPinned ? 'side-nav-pinned' : '',
|
||||
SHOW_WORKSPACE_RESTRICTED_BANNER ? 'isWorkspaceRestricted' : '',
|
||||
SHOW_TRIAL_EXPIRY_BANNER ? 'isTrialExpired' : '',
|
||||
SHOW_PAYMENT_FAILED_BANNER ? 'isPaymentFailed' : '',
|
||||
)}
|
||||
>
|
||||
{isToDisplayLayout && !renderFullScreen && (
|
||||
|
||||
@@ -28,6 +28,7 @@ import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
function EditAlertChannels({
|
||||
@@ -52,11 +53,7 @@ function EditAlertChannels({
|
||||
const [savingState, setSavingState] = useState<boolean>(false);
|
||||
const [testingState, setTestingState] = useState<boolean>(false);
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
// Extract channelId from URL pathname since useParams doesn't work in nested routing
|
||||
const { pathname } = window.location;
|
||||
const channelIdMatch = pathname.match(/\/settings\/channels\/edit\/([^/]+)/);
|
||||
const id = channelIdMatch ? channelIdMatch[1] : '';
|
||||
const { id } = useParams<{ id: string }>();
|
||||
|
||||
const [type, setType] = useState<ChannelType>(
|
||||
initialValue?.type ? (initialValue.type as ChannelType) : ChannelType.Slack,
|
||||
|
||||
@@ -149,7 +149,7 @@ function FormAlertChannels({
|
||||
</Button>
|
||||
<Button
|
||||
onClick={(): void => {
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
history.replace(ROUTES.SETTINGS);
|
||||
}}
|
||||
>
|
||||
{t('button_return')}
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import HostsEmptyOrIncorrectMetrics from '../HostsEmptyOrIncorrectMetrics';
|
||||
|
||||
describe('HostsEmptyOrIncorrectMetrics', () => {
|
||||
it('shows no data message when noData is true', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData incorrectData={false} />);
|
||||
expect(
|
||||
screen.getByText('No host metrics data received yet.'),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(/Infrastructure monitoring requires the/),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows incorrect data message when incorrectData is true', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData />);
|
||||
expect(
|
||||
screen.getByText(
|
||||
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
|
||||
),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show no data message when noData is false', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
|
||||
expect(
|
||||
screen.queryByText('No host metrics data received yet.'),
|
||||
).not.toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByText(/Infrastructure monitoring requires the/),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show incorrect data message when incorrectData is false', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
|
||||
expect(
|
||||
screen.queryByText(
|
||||
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
|
||||
),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,166 +0,0 @@
|
||||
/* eslint-disable react/button-has-type */
|
||||
import { render } from '@testing-library/react';
|
||||
import ROUTES from 'constants/routes';
|
||||
import * as useGetHostListHooks from 'hooks/infraMonitoring/useGetHostList';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import * as timezoneHooks from 'providers/Timezone';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
|
||||
import HostsList from '../HostsList';
|
||||
|
||||
jest.mock('lib/getMinMax', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(() => ({
|
||||
minTime: 1713734400000,
|
||||
maxTime: 1713738000000,
|
||||
isValidTimeFormat: jest.fn().mockReturnValue(true),
|
||||
})),
|
||||
}));
|
||||
jest.mock('components/CustomTimePicker/CustomTimePicker', () => ({
|
||||
__esModule: true,
|
||||
default: ({ onSelect, selectedTime, selectedValue }: any): JSX.Element => (
|
||||
<div data-testid="custom-time-picker">
|
||||
<button onClick={(): void => onSelect('custom')}>
|
||||
{selectedTime} - {selectedValue}
|
||||
</button>
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: (): any => ({
|
||||
globalTime: {
|
||||
selectedTime: {
|
||||
startTime: 1713734400000,
|
||||
endTime: 1713738000000,
|
||||
},
|
||||
maxTime: 1713738000000,
|
||||
minTime: 1713734400000,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: jest.fn().mockReturnValue({
|
||||
pathname: ROUTES.INFRASTRUCTURE_MONITORING_HOSTS,
|
||||
}),
|
||||
}));
|
||||
jest.mock('react-router-dom-v5-compat', () => {
|
||||
const actual = jest.requireActual('react-router-dom-v5-compat');
|
||||
return {
|
||||
...actual,
|
||||
useSearchParams: jest
|
||||
.fn()
|
||||
.mockReturnValue([
|
||||
{ get: jest.fn(), entries: jest.fn().mockReturnValue([]) },
|
||||
jest.fn(),
|
||||
]),
|
||||
useNavigationType: (): any => 'PUSH',
|
||||
};
|
||||
});
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
safeNavigate: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.spyOn(timezoneHooks, 'useTimezone').mockReturnValue({
|
||||
timezone: {
|
||||
offset: 0,
|
||||
},
|
||||
browserTimezone: {
|
||||
offset: 0,
|
||||
},
|
||||
} as any);
|
||||
jest.spyOn(useGetHostListHooks, 'useGetHostList').mockReturnValue({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
records: [
|
||||
{
|
||||
hostName: 'test-host',
|
||||
active: true,
|
||||
cpu: 0.75,
|
||||
memory: 0.65,
|
||||
wait: 0.03,
|
||||
},
|
||||
],
|
||||
isSendingK8SAgentMetrics: false,
|
||||
sentAnyHostMetricsData: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
} as any);
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
|
||||
describe('HostsList', () => {
|
||||
it('renders hosts list table', () => {
|
||||
const { container } = render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<HostsList />
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
expect(container.querySelector('.hosts-list-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders filters', () => {
|
||||
const { container } = render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<HostsList />
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
expect(container.querySelector('.filters')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,37 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import HostsListControls from '../HostsListControls';
|
||||
|
||||
jest.mock('container/QueryBuilder/filters/QueryBuilderSearch', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="query-builder-search">Search</div>
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">Date Time</div>
|
||||
),
|
||||
}));
|
||||
|
||||
describe('HostsListControls', () => {
|
||||
const mockHandleFiltersChange = jest.fn();
|
||||
const mockFilters = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
it('renders search and date time filters', () => {
|
||||
render(
|
||||
<HostsListControls
|
||||
handleFiltersChange={mockHandleFiltersChange}
|
||||
filters={mockFilters}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('query-builder-search')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,139 +0,0 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import HostsListTable from '../HostsListTable';
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
const EMPTY_STATE_CONTAINER_CLASS = '.hosts-empty-state-container';
|
||||
|
||||
describe('HostsListTable', () => {
|
||||
const mockHost = {
|
||||
hostName: 'test-host-1',
|
||||
active: true,
|
||||
cpu: 0.75,
|
||||
memory: 0.65,
|
||||
wait: 0.03,
|
||||
load15: 1.5,
|
||||
os: 'linux',
|
||||
};
|
||||
|
||||
const mockTableData = {
|
||||
payload: {
|
||||
data: {
|
||||
hosts: [mockHost],
|
||||
},
|
||||
},
|
||||
};
|
||||
const mockOnHostClick = jest.fn();
|
||||
const mockSetCurrentPage = jest.fn();
|
||||
const mockSetOrderBy = jest.fn();
|
||||
const mockSetPageSize = jest.fn();
|
||||
const mockProps = {
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
tableData: mockTableData,
|
||||
hostMetricsData: [mockHost],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
onHostClick: mockOnHostClick,
|
||||
currentPage: 1,
|
||||
setCurrentPage: mockSetCurrentPage,
|
||||
pageSize: 10,
|
||||
setOrderBy: mockSetOrderBy,
|
||||
setPageSize: mockSetPageSize,
|
||||
} as any;
|
||||
|
||||
it('renders loading state if isLoading is true', () => {
|
||||
const { container } = render(<HostsListTable {...mockProps} isLoading />);
|
||||
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders loading state if isFetching is true', () => {
|
||||
const { container } = render(<HostsListTable {...mockProps} isFetching />);
|
||||
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders error state if isError is true', () => {
|
||||
render(<HostsListTable {...mockProps} isError />);
|
||||
expect(screen.getByText('Something went wrong')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders empty state if no hosts are found', () => {
|
||||
const { container } = render(<HostsListTable {...mockProps} />);
|
||||
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders empty state if sentAnyHostMetricsData is false', () => {
|
||||
const { container } = render(
|
||||
<HostsListTable
|
||||
{...mockProps}
|
||||
tableData={{
|
||||
...mockTableData,
|
||||
payload: {
|
||||
...mockTableData.payload,
|
||||
data: {
|
||||
...mockTableData.payload.data,
|
||||
sentAnyHostMetricsData: false,
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders empty state if isSendingIncorrectK8SAgentMetrics is true', () => {
|
||||
const { container } = render(
|
||||
<HostsListTable
|
||||
{...mockProps}
|
||||
tableData={{
|
||||
...mockTableData,
|
||||
payload: {
|
||||
...mockTableData.payload,
|
||||
data: {
|
||||
...mockTableData.payload.data,
|
||||
isSendingIncorrectK8SAgentMetrics: true,
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders table data', () => {
|
||||
const { container } = render(
|
||||
<HostsListTable
|
||||
{...mockProps}
|
||||
tableData={{
|
||||
...mockTableData,
|
||||
payload: {
|
||||
...mockTableData.payload,
|
||||
data: {
|
||||
...mockTableData.payload.data,
|
||||
isSendingIncorrectK8SAgentMetrics: false,
|
||||
sentAnyHostMetricsData: true,
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
expect(container.querySelector('.hosts-list-table')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
@@ -1,104 +0,0 @@
|
||||
import { render } from '@testing-library/react';
|
||||
|
||||
import { formatDataForTable, GetHostsQuickFiltersConfig } from '../utils';
|
||||
|
||||
const PROGRESS_BAR_CLASS = '.progress-bar';
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
describe('InfraMonitoringHosts utils', () => {
|
||||
describe('formatDataForTable', () => {
|
||||
it('should format host data correctly', () => {
|
||||
const mockData = [
|
||||
{
|
||||
hostName: 'test-host',
|
||||
active: true,
|
||||
cpu: 0.95,
|
||||
memory: 0.85,
|
||||
wait: 0.05,
|
||||
load15: 2.5,
|
||||
os: 'linux',
|
||||
},
|
||||
] as any;
|
||||
|
||||
const result = formatDataForTable(mockData);
|
||||
|
||||
expect(result[0].hostName).toBe('test-host');
|
||||
expect(result[0].wait).toBe('5%');
|
||||
expect(result[0].load15).toBe(2.5);
|
||||
|
||||
// Test active tag rendering
|
||||
const activeTag = render(result[0].active as JSX.Element);
|
||||
expect(activeTag.container.textContent).toBe('ACTIVE');
|
||||
expect(activeTag.container.querySelector('.active')).toBeTruthy();
|
||||
|
||||
// Test CPU progress bar
|
||||
const cpuProgress = render(result[0].cpu as JSX.Element);
|
||||
const cpuProgressBar = cpuProgress.container.querySelector(
|
||||
PROGRESS_BAR_CLASS,
|
||||
);
|
||||
expect(cpuProgressBar).toBeTruthy();
|
||||
|
||||
// Test memory progress bar
|
||||
const memoryProgress = render(result[0].memory as JSX.Element);
|
||||
const memoryProgressBar = memoryProgress.container.querySelector(
|
||||
PROGRESS_BAR_CLASS,
|
||||
);
|
||||
expect(memoryProgressBar).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should handle inactive hosts', () => {
|
||||
const mockData = [
|
||||
{
|
||||
hostName: 'test-host',
|
||||
active: false,
|
||||
cpu: 0.3,
|
||||
memory: 0.4,
|
||||
wait: 0.02,
|
||||
load15: 1.2,
|
||||
os: 'linux',
|
||||
cpuTimeSeries: [],
|
||||
memoryTimeSeries: [],
|
||||
waitTimeSeries: [],
|
||||
load15TimeSeries: [],
|
||||
},
|
||||
] as any;
|
||||
|
||||
const result = formatDataForTable(mockData);
|
||||
|
||||
const inactiveTag = render(result[0].active as JSX.Element);
|
||||
expect(inactiveTag.container.textContent).toBe('INACTIVE');
|
||||
expect(inactiveTag.container.querySelector('.inactive')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('GetHostsQuickFiltersConfig', () => {
|
||||
it('should return correct config when dotMetricsEnabled is true', () => {
|
||||
const result = GetHostsQuickFiltersConfig(true);
|
||||
|
||||
expect(result[0].attributeKey.key).toBe('host.name');
|
||||
expect(result[1].attributeKey.key).toBe('os.type');
|
||||
expect(result[0].aggregateAttribute).toBe('system.cpu.load_average.15m');
|
||||
});
|
||||
|
||||
it('should return correct config when dotMetricsEnabled is false', () => {
|
||||
const result = GetHostsQuickFiltersConfig(false);
|
||||
|
||||
expect(result[0].attributeKey.key).toBe('host_name');
|
||||
expect(result[1].attributeKey.key).toBe('os_type');
|
||||
expect(result[0].aggregateAttribute).toBe('system_cpu_load_average_15m');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -611,7 +611,9 @@ export const errorPercentage = ({
|
||||
{
|
||||
id: '',
|
||||
key: {
|
||||
key: dotMetricsEnabled ? WidgetKeys.StatusCode : WidgetKeys.StatusCodeNorm,
|
||||
key: dotMetricsEnabled
|
||||
? WidgetKeys.Service_name
|
||||
: WidgetKeys.StatusCodeNorm,
|
||||
dataType: DataTypes.Int64,
|
||||
isColumn: false,
|
||||
type: MetricsType.Tag,
|
||||
|
||||
@@ -9,7 +9,7 @@ export const pipelineData: Pipeline = {
|
||||
active: false,
|
||||
is_valid: false,
|
||||
disabled: false,
|
||||
deployStatus: 'deployed',
|
||||
deployStatus: 'DEPLOYED',
|
||||
deployResult: 'Deployment was successful',
|
||||
lastHash: 'log_pipelines:24',
|
||||
lastConf: 'oiwernveroi',
|
||||
@@ -135,7 +135,7 @@ export const pipelineData: Pipeline = {
|
||||
active: false,
|
||||
isValid: false,
|
||||
disabled: false,
|
||||
deployStatus: 'deployed',
|
||||
deployStatus: 'DEPLOYED',
|
||||
deployResult: 'Deployment was successful',
|
||||
lastHash: 'log_pipelines:24',
|
||||
lastConf: 'eovineroiv',
|
||||
@@ -150,7 +150,7 @@ export const pipelineData: Pipeline = {
|
||||
active: false,
|
||||
isValid: false,
|
||||
disabled: false,
|
||||
deployStatus: 'deployed',
|
||||
deployStatus: 'DEPLOYED',
|
||||
deployResult: 'Deployment was successful',
|
||||
lastHash: 'log_pipelines:23',
|
||||
lastConf: 'eivrounreovi',
|
||||
@@ -169,7 +169,7 @@ export const pipelineDataHistory: Pipeline['history'] = [
|
||||
active: false,
|
||||
isValid: false,
|
||||
disabled: false,
|
||||
deployStatus: 'deployed',
|
||||
deployStatus: 'DEPLOYED',
|
||||
deployResult: 'Deployment was successful',
|
||||
lastHash: 'log_pipelines:24',
|
||||
lastConf: 'eovineroiv',
|
||||
@@ -184,7 +184,7 @@ export const pipelineDataHistory: Pipeline['history'] = [
|
||||
active: false,
|
||||
isValid: false,
|
||||
disabled: false,
|
||||
deployStatus: 'in_progress',
|
||||
deployStatus: 'IN_PROGRESS',
|
||||
deployResult: 'Deployment is in progress',
|
||||
lastHash: 'log_pipelines:23',
|
||||
lastConf: 'eivrounreovi',
|
||||
@@ -199,7 +199,7 @@ export const pipelineDataHistory: Pipeline['history'] = [
|
||||
active: false,
|
||||
isValid: false,
|
||||
disabled: false,
|
||||
deployStatus: 'dirty',
|
||||
deployStatus: 'DIRTY',
|
||||
deployResult: 'Deployment is dirty',
|
||||
lastHash: 'log_pipelines:23',
|
||||
lastConf: 'eivrounreovi',
|
||||
@@ -214,7 +214,7 @@ export const pipelineDataHistory: Pipeline['history'] = [
|
||||
active: false,
|
||||
isValid: false,
|
||||
disabled: false,
|
||||
deployStatus: 'failed',
|
||||
deployStatus: 'FAILED',
|
||||
deployResult: 'Deployment failed',
|
||||
lastHash: 'log_pipelines:23',
|
||||
lastConf: 'eivrounreovi',
|
||||
@@ -229,7 +229,7 @@ export const pipelineDataHistory: Pipeline['history'] = [
|
||||
active: false,
|
||||
isValid: false,
|
||||
disabled: false,
|
||||
deployStatus: 'unknown',
|
||||
deployStatus: 'UNKNOWN',
|
||||
deployResult: '',
|
||||
lastHash: 'log_pipelines:23',
|
||||
lastConf: 'eivrounreovi',
|
||||
|
||||
@@ -9,15 +9,15 @@ import { Spin } from 'antd';
|
||||
|
||||
export function getDeploymentStage(value: string): string {
|
||||
switch (value) {
|
||||
case 'in_progress':
|
||||
case 'IN_PROGRESS':
|
||||
return 'In Progress';
|
||||
case 'deployed':
|
||||
case 'DEPLOYED':
|
||||
return 'Deployed';
|
||||
case 'dirty':
|
||||
case 'DIRTY':
|
||||
return 'Dirty';
|
||||
case 'failed':
|
||||
case 'FAILED':
|
||||
return 'Failed';
|
||||
case 'unknown':
|
||||
case 'UNKNOWN':
|
||||
return 'Unknown';
|
||||
default:
|
||||
return '';
|
||||
@@ -26,17 +26,17 @@ export function getDeploymentStage(value: string): string {
|
||||
|
||||
export function getDeploymentStageIcon(value: string): JSX.Element {
|
||||
switch (value) {
|
||||
case 'in_progress':
|
||||
case 'IN_PROGRESS':
|
||||
return (
|
||||
<Spin indicator={<LoadingOutlined style={{ fontSize: 15 }} spin />} />
|
||||
);
|
||||
case 'deployed':
|
||||
case 'DEPLOYED':
|
||||
return <CheckCircleFilled />;
|
||||
case 'dirty':
|
||||
case 'DIRTY':
|
||||
return <ExclamationCircleFilled />;
|
||||
case 'failed':
|
||||
case 'FAILED':
|
||||
return <CloseCircleFilled />;
|
||||
case 'unknown':
|
||||
case 'UNKNOWN':
|
||||
return <MinusCircleFilled />;
|
||||
default:
|
||||
return <span />;
|
||||
|
||||
@@ -240,7 +240,6 @@
|
||||
line-height: 18px; /* 163.636% */
|
||||
letter-spacing: 0.88px;
|
||||
text-transform: uppercase;
|
||||
min-height: 18px;
|
||||
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@@ -890,10 +889,6 @@
|
||||
.ant-dropdown-menu-item-divider {
|
||||
background-color: var(--Slate-500, #161922) !important;
|
||||
}
|
||||
|
||||
.ant-dropdown-menu-item-disabled {
|
||||
opacity: 0.7;
|
||||
}
|
||||
}
|
||||
|
||||
.settings-dropdown,
|
||||
|
||||
@@ -23,7 +23,6 @@ import logEvent from 'api/common/logEvent';
|
||||
import { Logout } from 'api/utils';
|
||||
import updateUserPreference from 'api/v1/user/preferences/name/update';
|
||||
import cx from 'classnames';
|
||||
import ChangelogModal from 'components/ChangelogModal/ChangelogModal';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { GlobalShortcuts } from 'constants/shortcuts/globalShortcuts';
|
||||
@@ -125,7 +124,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
trialInfo,
|
||||
isLoggedIn,
|
||||
userPreferences,
|
||||
changelog,
|
||||
updateUserPreferenceInContext,
|
||||
} = useAppContext();
|
||||
|
||||
@@ -157,7 +155,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
|
||||
const [hasScroll, setHasScroll] = useState(false);
|
||||
const navTopSectionRef = useRef<HTMLDivElement>(null);
|
||||
const [showChangelogModal, setShowChangelogModal] = useState<boolean>(false);
|
||||
|
||||
const checkScroll = useCallback((): void => {
|
||||
if (navTopSectionRef.current) {
|
||||
@@ -450,7 +447,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
{
|
||||
key: 'workspace',
|
||||
label: 'Workspace Settings',
|
||||
disabled: isWorkspaceBlocked,
|
||||
},
|
||||
...(isEnterpriseSelfHostedUser || isCommunityEnterpriseUser
|
||||
? [
|
||||
@@ -468,12 +464,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
),
|
||||
},
|
||||
].filter(Boolean),
|
||||
[
|
||||
isEnterpriseSelfHostedUser,
|
||||
isCommunityEnterpriseUser,
|
||||
user.email,
|
||||
isWorkspaceBlocked,
|
||||
],
|
||||
[isEnterpriseSelfHostedUser, isCommunityEnterpriseUser, user.email],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -740,13 +731,20 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
|
||||
const onClickVersionHandler = useCallback((): void => {
|
||||
if (isCloudUser) {
|
||||
return;
|
||||
}
|
||||
const onClickVersionHandler = useCallback(
|
||||
(event: MouseEvent): void => {
|
||||
if (isCloudUser) {
|
||||
return;
|
||||
}
|
||||
|
||||
setShowChangelogModal(true);
|
||||
}, [isCloudUser]);
|
||||
if (isCtrlMetaKey(event)) {
|
||||
openInNewTab(ROUTES.VERSION);
|
||||
} else {
|
||||
history.push(ROUTES.VERSION);
|
||||
}
|
||||
},
|
||||
[isCloudUser],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isLatestVersion && !isCloudUser) {
|
||||
@@ -786,9 +784,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
'brand-title-section',
|
||||
isCommunityEnterpriseUser && 'community-enterprise-user',
|
||||
isCloudUser && 'cloud-user',
|
||||
showVersionUpdateNotification &&
|
||||
changelog &&
|
||||
'version-update-notification',
|
||||
showVersionUpdateNotification && 'version-update-notification',
|
||||
)}
|
||||
>
|
||||
<span className="license-type"> {licenseTag} </span>
|
||||
@@ -799,8 +795,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
overlayClassName="version-tooltip-overlay"
|
||||
arrow={false}
|
||||
overlay={
|
||||
showVersionUpdateNotification &&
|
||||
changelog && (
|
||||
showVersionUpdateNotification && (
|
||||
<div className="version-update-notification-tooltip">
|
||||
<div className="version-update-notification-tooltip-title">
|
||||
There's a new version available.
|
||||
@@ -818,7 +813,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
{currentVersion}
|
||||
</span>
|
||||
|
||||
{showVersionUpdateNotification && changelog && (
|
||||
{showVersionUpdateNotification && (
|
||||
<span className="version-update-notification-dot-icon" />
|
||||
)}
|
||||
</div>
|
||||
@@ -1049,9 +1044,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
{showChangelogModal && (
|
||||
<ChangelogModal onClose={(): void => setShowChangelogModal(false)} />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -391,7 +391,7 @@ export const helpSupportDropdownMenuItems: SidebarItem[] = [
|
||||
},
|
||||
{
|
||||
key: 'invite-collaborators',
|
||||
label: 'Invite a Team Member',
|
||||
label: 'Invite a Collaborator',
|
||||
icon: <Plus size={14} />,
|
||||
itemKey: 'invite-collaborators',
|
||||
},
|
||||
@@ -403,10 +403,6 @@ export const NEW_ROUTES_MENU_ITEM_KEY_MAP: Record<string, string> = {
|
||||
[ROUTES.TRACE_EXPLORER]: ROUTES.TRACES_EXPLORER,
|
||||
[ROUTES.LOGS_BASE]: ROUTES.LOGS_EXPLORER,
|
||||
[ROUTES.METRICS_EXPLORER_BASE]: ROUTES.METRICS_EXPLORER,
|
||||
[ROUTES.INFRASTRUCTURE_MONITORING_BASE]:
|
||||
ROUTES.INFRASTRUCTURE_MONITORING_HOSTS,
|
||||
[ROUTES.API_MONITORING_BASE]: ROUTES.API_MONITORING,
|
||||
[ROUTES.MESSAGING_QUEUES_BASE]: ROUTES.MESSAGING_QUEUES_OVERVIEW,
|
||||
};
|
||||
|
||||
export default menuItems;
|
||||
|
||||
@@ -241,15 +241,6 @@
|
||||
&-title {
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
|
||||
&-footer {
|
||||
border-top-color: var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
.add-span-to-funnel-modal__discard-button {
|
||||
background: var(--bg-vanilla-200);
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -72,6 +72,7 @@ function FunnelDetailsView({
|
||||
funnel={funnel}
|
||||
isTraceDetailsPage
|
||||
span={span}
|
||||
disableAutoSave
|
||||
triggerAutoSave={triggerAutoSave}
|
||||
showNotifications={showNotifications}
|
||||
/>
|
||||
@@ -142,19 +143,13 @@ function AddSpanToFunnelModal({
|
||||
const handleSaveFunnel = (): void => {
|
||||
setTriggerSave(true);
|
||||
// Reset trigger after a brief moment to allow the save to be processed
|
||||
setTimeout(() => {
|
||||
setTriggerSave(false);
|
||||
onClose();
|
||||
}, 100);
|
||||
setTimeout(() => setTriggerSave(false), 100);
|
||||
};
|
||||
|
||||
const handleDiscard = (): void => {
|
||||
setTriggerDiscard(true);
|
||||
// Reset trigger after a brief moment
|
||||
setTimeout(() => {
|
||||
setTriggerDiscard(false);
|
||||
onClose();
|
||||
}, 100);
|
||||
setTimeout(() => setTriggerDiscard(false), 100);
|
||||
};
|
||||
|
||||
const renderListView = (): JSX.Element => (
|
||||
@@ -244,6 +239,9 @@ function AddSpanToFunnelModal({
|
||||
footer={
|
||||
activeView === ModalView.DETAILS
|
||||
? [
|
||||
<Button key="close" onClick={onClose}>
|
||||
Close
|
||||
</Button>,
|
||||
<Button
|
||||
type="default"
|
||||
key="discard"
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { FunnelData, FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import { useUpdateFunnelSteps } from './useFunnels';
|
||||
@@ -16,30 +13,22 @@ interface UseFunnelConfiguration {
|
||||
isPopoverOpen: boolean;
|
||||
setIsPopoverOpen: (isPopoverOpen: boolean) => void;
|
||||
steps: FunnelStepData[];
|
||||
isSaving: boolean;
|
||||
}
|
||||
|
||||
// Add this helper function
|
||||
export const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
if (steps.some((step) => !step.filters)) return steps;
|
||||
|
||||
return steps.map((step) => ({
|
||||
...step,
|
||||
filters: {
|
||||
...step.filters,
|
||||
items: step.filters.items.map((item) => {
|
||||
const {
|
||||
id: unusedId,
|
||||
isIndexed,
|
||||
...keyObj
|
||||
} = item.key as BaseAutocompleteData;
|
||||
return {
|
||||
id: '',
|
||||
key: keyObj,
|
||||
value: item.value,
|
||||
op: item.op,
|
||||
};
|
||||
}),
|
||||
items: step.filters.items.map((item) => ({
|
||||
id: '',
|
||||
key: item.key,
|
||||
value: item.value,
|
||||
op: item.op,
|
||||
})),
|
||||
},
|
||||
}));
|
||||
};
|
||||
@@ -47,22 +36,22 @@ export const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export default function useFunnelConfiguration({
|
||||
funnel,
|
||||
disableAutoSave = false,
|
||||
triggerAutoSave = false,
|
||||
showNotifications = false,
|
||||
}: {
|
||||
funnel: FunnelData;
|
||||
disableAutoSave?: boolean;
|
||||
triggerAutoSave?: boolean;
|
||||
showNotifications?: boolean;
|
||||
}): UseFunnelConfiguration {
|
||||
const { notifications } = useNotifications();
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
steps,
|
||||
lastUpdatedSteps,
|
||||
setLastUpdatedSteps,
|
||||
initialSteps,
|
||||
hasIncompleteStepFields,
|
||||
handleRestoreSteps,
|
||||
selectedTime,
|
||||
setIsUpdatingFunnel,
|
||||
handleRunFunnel,
|
||||
} = useFunnelContext();
|
||||
|
||||
// State management
|
||||
@@ -70,6 +59,10 @@ export default function useFunnelConfiguration({
|
||||
|
||||
const debouncedSteps = useDebounce(steps, 200);
|
||||
|
||||
const [lastValidatedSteps, setLastValidatedSteps] = useState<FunnelStepData[]>(
|
||||
initialSteps,
|
||||
);
|
||||
|
||||
// Mutation hooks
|
||||
const updateStepsMutation = useUpdateFunnelSteps(
|
||||
funnel.funnel_id,
|
||||
@@ -78,15 +71,6 @@ export default function useFunnelConfiguration({
|
||||
|
||||
// Derived state
|
||||
const lastSavedStepsStateRef = useRef<FunnelStepData[]>(steps);
|
||||
const hasRestoredFromLocalStorage = useRef(false);
|
||||
|
||||
// localStorage hook for funnel steps
|
||||
const localStorageKey = `${LOCALSTORAGE.FUNNEL_STEPS}_${funnel.funnel_id}`;
|
||||
const [
|
||||
localStorageSavedSteps,
|
||||
setLocalStorageSavedSteps,
|
||||
clearLocalStorageSavedSteps,
|
||||
] = useLocalStorage<FunnelStepData[] | null>(localStorageKey, null);
|
||||
|
||||
const hasStepsChanged = useCallback(() => {
|
||||
const normalizedLastSavedSteps = normalizeSteps(
|
||||
@@ -96,34 +80,6 @@ export default function useFunnelConfiguration({
|
||||
return !isEqual(normalizedDebouncedSteps, normalizedLastSavedSteps);
|
||||
}, [debouncedSteps]);
|
||||
|
||||
// Handle localStorage for funnel steps
|
||||
useEffect(() => {
|
||||
// Restore from localStorage on first run if
|
||||
if (!hasRestoredFromLocalStorage.current) {
|
||||
const savedSteps = localStorageSavedSteps;
|
||||
if (savedSteps) {
|
||||
handleRestoreSteps(savedSteps);
|
||||
hasRestoredFromLocalStorage.current = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Save steps to localStorage
|
||||
if (hasStepsChanged()) {
|
||||
setLocalStorageSavedSteps(debouncedSteps);
|
||||
}
|
||||
}, [
|
||||
debouncedSteps,
|
||||
funnel.funnel_id,
|
||||
hasStepsChanged,
|
||||
handleRestoreSteps,
|
||||
localStorageSavedSteps,
|
||||
setLocalStorageSavedSteps,
|
||||
queryClient,
|
||||
selectedTime,
|
||||
lastUpdatedSteps,
|
||||
]);
|
||||
|
||||
const hasFunnelStepDefinitionsChanged = useCallback(
|
||||
(prevSteps: FunnelStepData[], nextSteps: FunnelStepData[]): boolean => {
|
||||
if (prevSteps.length !== nextSteps.length) return true;
|
||||
@@ -141,6 +97,15 @@ export default function useFunnelConfiguration({
|
||||
[],
|
||||
);
|
||||
|
||||
const hasFunnelLatencyTypeChanged = useCallback(
|
||||
(prevSteps: FunnelStepData[], nextSteps: FunnelStepData[]): boolean =>
|
||||
prevSteps.some((step, index) => {
|
||||
const nextStep = nextSteps[index];
|
||||
return step.latency_type !== nextStep.latency_type;
|
||||
}),
|
||||
[],
|
||||
);
|
||||
|
||||
// Mutation payload preparation
|
||||
const getUpdatePayload = useCallback(
|
||||
() => ({
|
||||
@@ -151,19 +116,33 @@ export default function useFunnelConfiguration({
|
||||
[funnel.funnel_id, debouncedSteps],
|
||||
);
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
const { selectedTime } = useFunnelContext();
|
||||
|
||||
const validateStepsQueryKey = useMemo(
|
||||
() => [REACT_QUERY_KEY.VALIDATE_FUNNEL_STEPS, funnel.funnel_id, selectedTime],
|
||||
[funnel.funnel_id, selectedTime],
|
||||
);
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
if (triggerAutoSave && !isEqual(debouncedSteps, lastUpdatedSteps)) {
|
||||
setIsUpdatingFunnel(true);
|
||||
// Determine if we should save based on the mode
|
||||
let shouldSave = false;
|
||||
|
||||
if (disableAutoSave) {
|
||||
// Manual save mode: only save when explicitly triggered
|
||||
shouldSave = triggerAutoSave;
|
||||
} else {
|
||||
// Auto-save mode: save when steps have changed and no incomplete fields
|
||||
shouldSave = hasStepsChanged() && !hasIncompleteStepFields;
|
||||
}
|
||||
|
||||
if (shouldSave && !isEqual(debouncedSteps, lastValidatedSteps)) {
|
||||
updateStepsMutation.mutate(getUpdatePayload(), {
|
||||
onSuccess: (data) => {
|
||||
const updatedFunnelSteps = data?.payload?.steps;
|
||||
|
||||
if (!updatedFunnelSteps) return;
|
||||
|
||||
// Clear localStorage since steps are saved successfully
|
||||
clearLocalStorageSavedSteps();
|
||||
|
||||
queryClient.setQueryData(
|
||||
[REACT_QUERY_KEY.GET_FUNNEL_DETAILS, funnel.funnel_id],
|
||||
(oldData: any) => {
|
||||
@@ -184,9 +163,17 @@ export default function useFunnelConfiguration({
|
||||
(step) => step.service_name === '' || step.span_name === '',
|
||||
);
|
||||
|
||||
if (hasFunnelLatencyTypeChanged(lastValidatedSteps, debouncedSteps)) {
|
||||
handleRunFunnel();
|
||||
setLastValidatedSteps(debouncedSteps);
|
||||
}
|
||||
// Only validate if funnel steps definitions
|
||||
if (!hasIncompleteStepFields) {
|
||||
setLastUpdatedSteps(debouncedSteps);
|
||||
else if (
|
||||
!hasIncompleteStepFields &&
|
||||
hasFunnelStepDefinitionsChanged(lastValidatedSteps, debouncedSteps)
|
||||
) {
|
||||
queryClient.refetchQueries(validateStepsQueryKey);
|
||||
setLastValidatedSteps(debouncedSteps);
|
||||
}
|
||||
|
||||
// Show success notification only when requested
|
||||
@@ -229,18 +216,17 @@ export default function useFunnelConfiguration({
|
||||
getUpdatePayload,
|
||||
hasFunnelStepDefinitionsChanged,
|
||||
hasStepsChanged,
|
||||
lastUpdatedSteps,
|
||||
lastValidatedSteps,
|
||||
queryClient,
|
||||
validateStepsQueryKey,
|
||||
triggerAutoSave,
|
||||
showNotifications,
|
||||
localStorageSavedSteps,
|
||||
clearLocalStorageSavedSteps,
|
||||
disableAutoSave,
|
||||
]);
|
||||
|
||||
return {
|
||||
isPopoverOpen,
|
||||
setIsPopoverOpen,
|
||||
steps,
|
||||
isSaving: updateStepsMutation.isLoading,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -20,11 +20,10 @@ export function useFunnelMetrics({
|
||||
metricsData: MetricItem[];
|
||||
conversionRate: number;
|
||||
} {
|
||||
const { startTime, endTime, steps } = useFunnelContext();
|
||||
const { startTime, endTime } = useFunnelContext();
|
||||
const payload = {
|
||||
start_time: startTime,
|
||||
end_time: endTime,
|
||||
steps,
|
||||
};
|
||||
|
||||
const {
|
||||
@@ -82,7 +81,6 @@ export function useFunnelStepsMetrics({
|
||||
end_time: endTime,
|
||||
step_start: stepStart,
|
||||
step_end: stepEnd,
|
||||
steps,
|
||||
};
|
||||
|
||||
const {
|
||||
|
||||
@@ -7,7 +7,6 @@ import {
|
||||
FunnelOverviewResponse,
|
||||
FunnelStepsOverviewPayload,
|
||||
FunnelStepsOverviewResponse,
|
||||
FunnelStepsPayload,
|
||||
FunnelStepsResponse,
|
||||
getFunnelById,
|
||||
getFunnelErrorTraces,
|
||||
@@ -38,7 +37,6 @@ import {
|
||||
CreateFunnelPayload,
|
||||
CreateFunnelResponse,
|
||||
FunnelData,
|
||||
FunnelStepData,
|
||||
} from 'types/api/traceFunnels';
|
||||
|
||||
export const useFunnelsList = (): UseQueryResult<
|
||||
@@ -119,14 +117,12 @@ export const useValidateFunnelSteps = ({
|
||||
startTime,
|
||||
endTime,
|
||||
enabled,
|
||||
steps,
|
||||
}: {
|
||||
funnelId: string;
|
||||
selectedTime: string;
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
enabled: boolean;
|
||||
steps: FunnelStepData[];
|
||||
}): UseQueryResult<
|
||||
SuccessResponse<ValidateFunnelResponse> | ErrorResponse,
|
||||
Error
|
||||
@@ -134,19 +130,11 @@ export const useValidateFunnelSteps = ({
|
||||
useQuery({
|
||||
queryFn: ({ signal }) =>
|
||||
validateFunnelSteps(
|
||||
{ start_time: startTime, end_time: endTime, steps },
|
||||
funnelId,
|
||||
{ start_time: startTime, end_time: endTime },
|
||||
signal,
|
||||
),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.VALIDATE_FUNNEL_STEPS,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
steps.map((step) => {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { latency_type, ...rest } = step;
|
||||
return rest;
|
||||
}),
|
||||
],
|
||||
queryKey: [REACT_QUERY_KEY.VALIDATE_FUNNEL_STEPS, funnelId, selectedTime],
|
||||
enabled,
|
||||
staleTime: 0,
|
||||
});
|
||||
@@ -180,17 +168,18 @@ export const useFunnelOverview = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelOverview(payload, signal),
|
||||
queryFn: ({ signal }) => getFunnelOverview(funnelId, payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_OVERVIEW,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.steps,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -201,19 +190,18 @@ export const useFunnelSlowTraces = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
return useQuery<SuccessResponse<SlowTraceData> | ErrorResponse, Error>({
|
||||
queryFn: ({ signal }) => getFunnelSlowTraces(payload, signal),
|
||||
queryFn: ({ signal }) => getFunnelSlowTraces(funnelId, payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_SLOW_TRACES,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -224,7 +212,7 @@ export const useFunnelErrorTraces = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelErrorTraces(funnelId, payload, signal),
|
||||
@@ -234,31 +222,35 @@ export const useFunnelErrorTraces = (
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
};
|
||||
|
||||
export function useFunnelStepsGraphData(
|
||||
funnelId: string,
|
||||
payload: FunnelStepsPayload,
|
||||
): UseQueryResult<SuccessResponse<FunnelStepsResponse> | ErrorResponse, Error> {
|
||||
const {
|
||||
startTime,
|
||||
endTime,
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelSteps(payload, signal),
|
||||
queryFn: ({ signal }) =>
|
||||
getFunnelSteps(
|
||||
funnelId,
|
||||
{ start_time: startTime, end_time: endTime },
|
||||
signal,
|
||||
),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_STEPS_GRAPH_DATA,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -272,18 +264,17 @@ export const useFunnelStepsOverview = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelStepsOverview(payload, signal),
|
||||
queryFn: ({ signal }) => getFunnelStepsOverview(funnelId, payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_STEPS_OVERVIEW,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -38,21 +38,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.alert-empty-card {
|
||||
margin-top: 50px;
|
||||
.ant-empty-description {
|
||||
color: var(--text-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.alert-empty-card {
|
||||
.ant-empty-description {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.alert-details {
|
||||
margin-top: 10px;
|
||||
.divider {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import './AlertDetails.styles.scss';
|
||||
|
||||
import { Breadcrumb, Button, Divider, Empty } from 'antd';
|
||||
import { Breadcrumb, Button, Divider } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { Filters } from 'components/AlertDetailsFilters/Filters';
|
||||
import NotFound from 'components/NotFound';
|
||||
import RouteTab from 'components/RouteTab';
|
||||
import Spinner from 'components/Spinner';
|
||||
import ROUTES from 'constants/routes';
|
||||
@@ -69,7 +70,6 @@ BreadCrumbItem.defaultProps = {
|
||||
function AlertDetails(): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
const { routes } = useRouteTabUtils();
|
||||
const { t } = useTranslation(['alerts']);
|
||||
|
||||
const {
|
||||
isLoading,
|
||||
@@ -90,11 +90,7 @@ function AlertDetails(): JSX.Element {
|
||||
!isValidRuleId ||
|
||||
(alertDetailsResponse && alertDetailsResponse.statusCode !== 200)
|
||||
) {
|
||||
return (
|
||||
<div className="alert-empty-card">
|
||||
<Empty description={t('alert_rule_not_found')} />
|
||||
</div>
|
||||
);
|
||||
return <NotFound />;
|
||||
}
|
||||
|
||||
const handleTabChange = (route: string): void => {
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
}
|
||||
|
||||
.ant-tabs-content-holder {
|
||||
padding: 8px;
|
||||
padding-left: 16px;
|
||||
padding-right: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
.edit-alert-channels-container {
|
||||
margin: 12px;
|
||||
width: 90%;
|
||||
margin: 12px auto;
|
||||
|
||||
border: 1px solid var(--Slate-500, #161922);
|
||||
background: var(--Ink-400, #121317);
|
||||
|
||||
@@ -15,27 +15,23 @@ import {
|
||||
import EditAlertChannels from 'container/EditAlertChannels';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { Channels } from 'types/api/channels/getAll';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
function ChannelsEdit(): JSX.Element {
|
||||
const { id } = useParams<Params>();
|
||||
const { t } = useTranslation();
|
||||
|
||||
// Extract channelId from URL pathname since useParams doesn't work in nested routing
|
||||
const { pathname } = window.location;
|
||||
const channelIdMatch = pathname.match(/\/settings\/channels\/edit\/([^/]+)/);
|
||||
const channelId = channelIdMatch ? channelIdMatch[1] : undefined;
|
||||
|
||||
const { isFetching, isError, data, error } = useQuery<
|
||||
SuccessResponseV2<Channels>,
|
||||
APIError
|
||||
>(['getChannel', channelId], {
|
||||
>(['getChannel', id], {
|
||||
queryFn: () =>
|
||||
get({
|
||||
id: channelId || '',
|
||||
id,
|
||||
}),
|
||||
enabled: !!channelId,
|
||||
});
|
||||
|
||||
if (isError) {
|
||||
@@ -148,5 +144,8 @@ function ChannelsEdit(): JSX.Element {
|
||||
</div>
|
||||
);
|
||||
}
|
||||
interface Params {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export default ChannelsEdit;
|
||||
|
||||
@@ -7,7 +7,6 @@ import NewWidget from 'container/NewWidget';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { generatePath, useLocation, useParams } from 'react-router-dom';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
@@ -53,13 +52,11 @@ function DashboardWidget(): JSX.Element | null {
|
||||
}
|
||||
|
||||
return (
|
||||
<PreferenceContextProvider>
|
||||
<NewWidget
|
||||
yAxisUnit={selectedWidget?.yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
fillSpans={selectedWidget?.fillSpans}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
<NewWidget
|
||||
yAxisUnit={selectedWidget?.yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
fillSpans={selectedWidget?.fillSpans}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,14 +3,9 @@ import ROUTES from 'constants/routes';
|
||||
import InfraMonitoringHosts from 'container/InfraMonitoringHosts';
|
||||
import InfraMonitoringK8s from 'container/InfraMonitoringK8s';
|
||||
import { Inbox } from 'lucide-react';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
|
||||
export const Hosts: TabRoutes = {
|
||||
Component: (): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<InfraMonitoringHosts />
|
||||
</PreferenceContextProvider>
|
||||
),
|
||||
Component: InfraMonitoringHosts,
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<Inbox size={16} /> Hosts
|
||||
@@ -21,11 +16,7 @@ export const Hosts: TabRoutes = {
|
||||
};
|
||||
|
||||
export const Kubernetes: TabRoutes = {
|
||||
Component: (): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<InfraMonitoringK8s />
|
||||
</PreferenceContextProvider>
|
||||
),
|
||||
Component: InfraMonitoringK8s,
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<Inbox size={16} /> Kubernetes
|
||||
|
||||
@@ -10,7 +10,6 @@ import LogsFilters from 'container/LogsFilters';
|
||||
import LogsSearchFilter from 'container/LogsSearchFilter';
|
||||
import LogsTable from 'container/LogsTable';
|
||||
import history from 'lib/history';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
@@ -83,71 +82,69 @@ function OldLogsExplorer(): JSX.Element {
|
||||
};
|
||||
|
||||
return (
|
||||
<PreferenceContextProvider>
|
||||
<div className="old-logs-explorer">
|
||||
<SpaceContainer
|
||||
split={<Divider type="vertical" />}
|
||||
align="center"
|
||||
direction="horizontal"
|
||||
>
|
||||
<LogsSearchFilter />
|
||||
<LogLiveTail />
|
||||
</SpaceContainer>
|
||||
<div className="old-logs-explorer">
|
||||
<SpaceContainer
|
||||
split={<Divider type="vertical" />}
|
||||
align="center"
|
||||
direction="horizontal"
|
||||
>
|
||||
<LogsSearchFilter />
|
||||
<LogLiveTail />
|
||||
</SpaceContainer>
|
||||
|
||||
<LogsAggregate />
|
||||
<LogsAggregate />
|
||||
|
||||
<Row gutter={20} wrap={false}>
|
||||
<LogsFilters />
|
||||
<Col flex={1} className="logs-col-container">
|
||||
<Row>
|
||||
<Col flex={1}>
|
||||
<Space align="baseline" direction="horizontal">
|
||||
<Select
|
||||
<Row gutter={20} wrap={false}>
|
||||
<LogsFilters />
|
||||
<Col flex={1} className="logs-col-container">
|
||||
<Row>
|
||||
<Col flex={1}>
|
||||
<Space align="baseline" direction="horizontal">
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
value={selectedViewModeOption}
|
||||
onChange={onChangeVeiwMode}
|
||||
>
|
||||
{viewModeOptionList.map((option) => (
|
||||
<Select.Option key={option.value}>{option.label}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
|
||||
{isFormatButtonVisible && (
|
||||
<Popover
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
value={selectedViewModeOption}
|
||||
onChange={onChangeVeiwMode}
|
||||
placement="right"
|
||||
content={renderPopoverContent}
|
||||
>
|
||||
{viewModeOptionList.map((option) => (
|
||||
<Select.Option key={option.value}>{option.label}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
<Button>Format</Button>
|
||||
</Popover>
|
||||
)}
|
||||
|
||||
{isFormatButtonVisible && (
|
||||
<Popover
|
||||
getPopupContainer={popupContainer}
|
||||
placement="right"
|
||||
content={renderPopoverContent}
|
||||
>
|
||||
<Button>Format</Button>
|
||||
</Popover>
|
||||
)}
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
defaultValue={order}
|
||||
onChange={handleChangeOrder}
|
||||
>
|
||||
{orderItems.map((item) => (
|
||||
<Select.Option key={item.enum}>{item.name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Space>
|
||||
</Col>
|
||||
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
defaultValue={order}
|
||||
onChange={handleChangeOrder}
|
||||
>
|
||||
{orderItems.map((item) => (
|
||||
<Select.Option key={item.enum}>{item.name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Space>
|
||||
</Col>
|
||||
<Col>
|
||||
<LogControls />
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<Col>
|
||||
<LogControls />
|
||||
</Col>
|
||||
</Row>
|
||||
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<LogDetailedView />
|
||||
</div>
|
||||
</PreferenceContextProvider>
|
||||
<LogDetailedView />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import NewDashboard from 'container/NewDashboard';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useEffect } from 'react';
|
||||
import { ErrorType } from 'types/common';
|
||||
|
||||
@@ -36,11 +35,7 @@ function DashboardPage(): JSX.Element {
|
||||
return <Spinner tip="Loading.." />;
|
||||
}
|
||||
|
||||
return (
|
||||
<PreferenceContextProvider>
|
||||
<NewDashboard />
|
||||
</PreferenceContextProvider>
|
||||
);
|
||||
return <NewDashboard />;
|
||||
}
|
||||
|
||||
export default DashboardPage;
|
||||
|
||||
@@ -24,12 +24,7 @@ import { getRoutes } from './utils';
|
||||
function SettingsPage(): JSX.Element {
|
||||
const { pathname, search } = useLocation();
|
||||
|
||||
const {
|
||||
user,
|
||||
featureFlags,
|
||||
trialInfo,
|
||||
isFetchingActiveLicense,
|
||||
} = useAppContext();
|
||||
const { user, featureFlags, trialInfo } = useAppContext();
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
|
||||
const [settingsMenuItems, setSettingsMenuItems] = useState<SidebarItem[]>(
|
||||
@@ -56,21 +51,6 @@ function SettingsPage(): JSX.Element {
|
||||
setSettingsMenuItems((prevItems) => {
|
||||
let updatedItems = [...prevItems];
|
||||
|
||||
if (trialInfo?.workSpaceBlock && !isFetchingActiveLicense) {
|
||||
updatedItems = updatedItems.map((item) => ({
|
||||
...item,
|
||||
isEnabled: !!(
|
||||
isAdmin &&
|
||||
(item.key === ROUTES.BILLING ||
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.MY_SETTINGS ||
|
||||
item.key === ROUTES.SHORTCUTS)
|
||||
),
|
||||
}));
|
||||
|
||||
return updatedItems;
|
||||
}
|
||||
|
||||
if (isCloudUser) {
|
||||
if (isAdmin) {
|
||||
updatedItems = updatedItems.map((item) => ({
|
||||
@@ -81,8 +61,7 @@ function SettingsPage(): JSX.Element {
|
||||
item.key === ROUTES.CUSTOM_DOMAIN_SETTINGS ||
|
||||
item.key === ROUTES.API_KEYS ||
|
||||
item.key === ROUTES.INGESTION_SETTINGS ||
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.SHORTCUTS
|
||||
item.key === ROUTES.ORG_SETTINGS
|
||||
? true
|
||||
: item.isEnabled,
|
||||
}));
|
||||
@@ -93,8 +72,7 @@ function SettingsPage(): JSX.Element {
|
||||
...item,
|
||||
isEnabled:
|
||||
item.key === ROUTES.INGESTION_SETTINGS ||
|
||||
item.key === ROUTES.INTEGRATIONS ||
|
||||
item.key === ROUTES.SHORTCUTS
|
||||
item.key === ROUTES.INTEGRATIONS
|
||||
? true
|
||||
: item.isEnabled,
|
||||
}));
|
||||
@@ -109,8 +87,7 @@ function SettingsPage(): JSX.Element {
|
||||
item.key === ROUTES.BILLING ||
|
||||
item.key === ROUTES.INTEGRATIONS ||
|
||||
item.key === ROUTES.API_KEYS ||
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.SHORTCUTS
|
||||
item.key === ROUTES.ORG_SETTINGS
|
||||
? true
|
||||
: item.isEnabled,
|
||||
}));
|
||||
@@ -130,9 +107,7 @@ function SettingsPage(): JSX.Element {
|
||||
updatedItems = updatedItems.map((item) => ({
|
||||
...item,
|
||||
isEnabled:
|
||||
item.key === ROUTES.API_KEYS ||
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.SHORTCUTS
|
||||
item.key === ROUTES.API_KEYS || item.key === ROUTES.ORG_SETTINGS
|
||||
? true
|
||||
: item.isEnabled,
|
||||
}));
|
||||
@@ -150,15 +125,7 @@ function SettingsPage(): JSX.Element {
|
||||
|
||||
return updatedItems;
|
||||
});
|
||||
}, [
|
||||
isAdmin,
|
||||
isEditor,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isFetchingActiveLicense,
|
||||
trialInfo?.workSpaceBlock,
|
||||
pathname,
|
||||
]);
|
||||
}, [isAdmin, isEditor, isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
|
||||
const routes = useMemo(
|
||||
() =>
|
||||
@@ -217,13 +184,6 @@ function SettingsPage(): JSX.Element {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (
|
||||
pathname.startsWith(ROUTES.CHANNELS_EDIT) &&
|
||||
key === ROUTES.ALL_CHANNELS
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return pathname === key;
|
||||
};
|
||||
|
||||
|
||||
@@ -32,12 +32,7 @@ export const getRoutes = (
|
||||
const isEditor = userRole === USER_ROLES.EDITOR;
|
||||
|
||||
if (isWorkspaceBlocked && isAdmin) {
|
||||
settings.push(
|
||||
...organizationSettings(t),
|
||||
...mySettings(t),
|
||||
...billingSettings(t),
|
||||
...keyboardShortcuts(t),
|
||||
);
|
||||
settings.push(...organizationSettings(t));
|
||||
|
||||
return settings;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ import './DeleteFunnelStep.styles.scss';
|
||||
|
||||
import SignozModal from 'components/SignozModal/SignozModal';
|
||||
import { Trash2, X } from 'lucide-react';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
|
||||
interface DeleteFunnelStepProps {
|
||||
isOpen: boolean;
|
||||
@@ -15,10 +14,8 @@ function DeleteFunnelStep({
|
||||
onClose,
|
||||
onStepRemove,
|
||||
}: DeleteFunnelStepProps): JSX.Element {
|
||||
const { handleRunFunnel } = useFunnelContext();
|
||||
const handleStepRemoval = (): void => {
|
||||
onStepRemove();
|
||||
handleRunFunnel();
|
||||
onClose();
|
||||
};
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import useFunnelConfiguration from 'hooks/TracesFunnels/useFunnelConfiguration';
|
||||
import { PencilLine } from 'lucide-react';
|
||||
import FunnelItemPopover from 'pages/TracesFunnels/components/FunnelsList/FunnelItemPopover';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import CopyToClipboard from 'periscope/components/CopyToClipboard';
|
||||
import { memo, useState } from 'react';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
@@ -22,6 +21,7 @@ interface FunnelConfigurationProps {
|
||||
funnel: FunnelData;
|
||||
isTraceDetailsPage?: boolean;
|
||||
span?: Span;
|
||||
disableAutoSave?: boolean;
|
||||
triggerAutoSave?: boolean;
|
||||
showNotifications?: boolean;
|
||||
}
|
||||
@@ -30,19 +30,15 @@ function FunnelConfiguration({
|
||||
funnel,
|
||||
isTraceDetailsPage,
|
||||
span,
|
||||
disableAutoSave,
|
||||
triggerAutoSave,
|
||||
showNotifications,
|
||||
}: FunnelConfigurationProps): JSX.Element {
|
||||
const { triggerSave } = useFunnelContext();
|
||||
const {
|
||||
isPopoverOpen,
|
||||
setIsPopoverOpen,
|
||||
steps,
|
||||
isSaving,
|
||||
} = useFunnelConfiguration({
|
||||
const { isPopoverOpen, setIsPopoverOpen, steps } = useFunnelConfiguration({
|
||||
funnel,
|
||||
triggerAutoSave: triggerAutoSave || triggerSave,
|
||||
showNotifications: showNotifications || triggerSave,
|
||||
disableAutoSave,
|
||||
triggerAutoSave,
|
||||
showNotifications,
|
||||
});
|
||||
const [isDescriptionModalOpen, setIsDescriptionModalOpen] = useState<boolean>(
|
||||
false,
|
||||
@@ -110,7 +106,7 @@ function FunnelConfiguration({
|
||||
|
||||
{!isTraceDetailsPage && (
|
||||
<>
|
||||
<StepsFooter stepsCount={steps.length} isSaving={isSaving || false} />
|
||||
<StepsFooter stepsCount={steps.length} />
|
||||
<AddFunnelDescriptionModal
|
||||
isOpen={isDescriptionModalOpen}
|
||||
onClose={handleDescriptionModalClose}
|
||||
@@ -126,6 +122,7 @@ function FunnelConfiguration({
|
||||
FunnelConfiguration.defaultProps = {
|
||||
isTraceDetailsPage: false,
|
||||
span: undefined,
|
||||
disableAutoSave: false,
|
||||
triggerAutoSave: false,
|
||||
showNotifications: false,
|
||||
};
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
color: var(--bg-vanilla-400);
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-radius: 6px;
|
||||
width: 100%;
|
||||
.step-popover {
|
||||
opacity: 0;
|
||||
width: 22px;
|
||||
|
||||
@@ -40,6 +40,11 @@
|
||||
letter-spacing: 0.12px;
|
||||
border-radius: 2px;
|
||||
|
||||
&--sync {
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
&--run {
|
||||
background-color: var(--bg-robin-500);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,53 @@
|
||||
import './StepsFooter.styles.scss';
|
||||
|
||||
import { Button, Skeleton } from 'antd';
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Button, Skeleton, Spin } from 'antd';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { Check, Cone } from 'lucide-react';
|
||||
import { Cone, Play, RefreshCcw } from 'lucide-react';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import { useIsMutating } from 'react-query';
|
||||
import { useMemo } from 'react';
|
||||
import { useIsFetching, useIsMutating } from 'react-query';
|
||||
|
||||
const useFunnelResultsLoading = (): boolean => {
|
||||
const { funnelId } = useFunnelContext();
|
||||
|
||||
const isFetchingFunnelOverview = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_OVERVIEW, funnelId],
|
||||
});
|
||||
|
||||
const isFetchingStepsGraphData = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_STEPS_GRAPH_DATA, funnelId],
|
||||
});
|
||||
|
||||
const isFetchingErrorTraces = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_ERROR_TRACES, funnelId],
|
||||
});
|
||||
|
||||
const isFetchingSlowTraces = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_SLOW_TRACES, funnelId],
|
||||
});
|
||||
|
||||
return useMemo(() => {
|
||||
if (!funnelId) {
|
||||
return false;
|
||||
}
|
||||
return (
|
||||
!!isFetchingFunnelOverview ||
|
||||
!!isFetchingStepsGraphData ||
|
||||
!!isFetchingErrorTraces ||
|
||||
!!isFetchingSlowTraces
|
||||
);
|
||||
}, [
|
||||
funnelId,
|
||||
isFetchingFunnelOverview,
|
||||
isFetchingStepsGraphData,
|
||||
isFetchingErrorTraces,
|
||||
isFetchingSlowTraces,
|
||||
]);
|
||||
};
|
||||
|
||||
interface StepsFooterProps {
|
||||
stepsCount: number;
|
||||
isSaving: boolean;
|
||||
}
|
||||
|
||||
function ValidTracesCount(): JSX.Element {
|
||||
@@ -54,13 +93,21 @@ function ValidTracesCount(): JSX.Element {
|
||||
return <span className="steps-footer__valid-traces">Valid traces found</span>;
|
||||
}
|
||||
|
||||
function StepsFooter({ stepsCount, isSaving }: StepsFooterProps): JSX.Element {
|
||||
function StepsFooter({ stepsCount }: StepsFooterProps): JSX.Element {
|
||||
const {
|
||||
hasIncompleteStepFields,
|
||||
handleSaveFunnel,
|
||||
hasUnsavedChanges,
|
||||
validTracesCount,
|
||||
handleRunFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
funnelId,
|
||||
} = useFunnelContext();
|
||||
|
||||
const isFunnelResultsLoading = useFunnelResultsLoading();
|
||||
|
||||
const isFunnelUpdateMutating = useIsMutating([
|
||||
REACT_QUERY_KEY.UPDATE_FUNNEL_STEPS,
|
||||
funnelId,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="steps-footer">
|
||||
<div className="steps-footer__left">
|
||||
@@ -70,16 +117,38 @@ function StepsFooter({ stepsCount, isSaving }: StepsFooterProps): JSX.Element {
|
||||
<ValidTracesCount />
|
||||
</div>
|
||||
<div className="steps-footer__right">
|
||||
<Button
|
||||
disabled={hasIncompleteStepFields || !hasUnsavedChanges}
|
||||
onClick={handleSaveFunnel}
|
||||
type="primary"
|
||||
className="steps-footer__button steps-footer__button--run"
|
||||
icon={<Check size={14} />}
|
||||
loading={isSaving}
|
||||
>
|
||||
Save funnel
|
||||
</Button>
|
||||
{!!isFunnelUpdateMutating && (
|
||||
<div className="steps-footer__button steps-footer__button--updating">
|
||||
<Spin
|
||||
indicator={<LoadingOutlined style={{ color: 'grey' }} />}
|
||||
size="small"
|
||||
/>
|
||||
Updating
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!hasFunnelBeenExecuted ? (
|
||||
<Button
|
||||
disabled={validTracesCount === 0}
|
||||
onClick={handleRunFunnel}
|
||||
type="primary"
|
||||
className="steps-footer__button steps-footer__button--run"
|
||||
icon={<Play size={16} />}
|
||||
>
|
||||
Run funnel
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
type="text"
|
||||
className="steps-footer__button steps-footer__button--sync"
|
||||
icon={<RefreshCcw size={16} />}
|
||||
onClick={handleRunFunnel}
|
||||
loading={isFunnelResultsLoading}
|
||||
disabled={validTracesCount === 0}
|
||||
>
|
||||
Refresh
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -29,20 +29,13 @@ Chart.register(
|
||||
);
|
||||
|
||||
function FunnelGraph(): JSX.Element {
|
||||
const { funnelId, startTime, endTime, steps } = useFunnelContext();
|
||||
|
||||
const payload = {
|
||||
start_time: startTime,
|
||||
end_time: endTime,
|
||||
steps,
|
||||
};
|
||||
|
||||
const { funnelId } = useFunnelContext();
|
||||
const {
|
||||
data: stepsData,
|
||||
isLoading,
|
||||
isFetching,
|
||||
isError,
|
||||
} = useFunnelStepsGraphData(funnelId, payload);
|
||||
} = useFunnelStepsGraphData(funnelId);
|
||||
|
||||
const data = useMemo(() => stepsData?.payload?.data?.[0]?.data, [
|
||||
stepsData?.payload?.data,
|
||||
|
||||
@@ -16,6 +16,7 @@ function FunnelResults(): JSX.Element {
|
||||
isValidateStepsLoading,
|
||||
hasIncompleteStepFields,
|
||||
hasAllEmptyStepFields,
|
||||
hasFunnelBeenExecuted,
|
||||
funnelId,
|
||||
} = useFunnelContext();
|
||||
|
||||
@@ -46,6 +47,14 @@ function FunnelResults(): JSX.Element {
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (!hasFunnelBeenExecuted) {
|
||||
return (
|
||||
<EmptyFunnelResults
|
||||
title="Funnel has not been run yet."
|
||||
description="Run the funnel to see the results"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="funnel-results">
|
||||
|
||||
@@ -7,7 +7,6 @@ import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import FunnelTable from './FunnelTable';
|
||||
import { topTracesTableColumns } from './utils';
|
||||
@@ -25,7 +24,6 @@ interface FunnelTopTracesTableProps {
|
||||
SuccessResponse<SlowTraceData | ErrorTraceData> | ErrorResponse,
|
||||
Error
|
||||
>;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
function FunnelTopTracesTable({
|
||||
@@ -34,7 +32,6 @@ function FunnelTopTracesTable({
|
||||
stepBOrder,
|
||||
title,
|
||||
tooltip,
|
||||
steps,
|
||||
useQueryHook,
|
||||
}: FunnelTopTracesTableProps): JSX.Element {
|
||||
const { startTime, endTime } = useFunnelContext();
|
||||
@@ -44,9 +41,8 @@ function FunnelTopTracesTable({
|
||||
end_time: endTime,
|
||||
step_start: stepAOrder,
|
||||
step_end: stepBOrder,
|
||||
steps,
|
||||
}),
|
||||
[startTime, endTime, stepAOrder, stepBOrder, steps],
|
||||
[startTime, endTime, stepAOrder, stepBOrder],
|
||||
);
|
||||
|
||||
const { data: response, isLoading, isFetching } = useQueryHook(
|
||||
|
||||
@@ -6,7 +6,7 @@ import FunnelMetricsTable from './FunnelMetricsTable';
|
||||
function OverallMetrics(): JSX.Element {
|
||||
const { funnelId } = useParams<{ funnelId: string }>();
|
||||
const { isLoading, metricsData, conversionRate, isError } = useFunnelMetrics({
|
||||
funnelId,
|
||||
funnelId: funnelId || '',
|
||||
});
|
||||
|
||||
return (
|
||||
|
||||
@@ -52,13 +52,11 @@ function StepsTransitionResults(): JSX.Element {
|
||||
funnelId={funnelId}
|
||||
stepAOrder={stepAOrder}
|
||||
stepBOrder={stepBOrder}
|
||||
steps={steps}
|
||||
/>
|
||||
<TopTracesWithErrors
|
||||
funnelId={funnelId}
|
||||
stepAOrder={stepAOrder}
|
||||
stepBOrder={stepBOrder}
|
||||
steps={steps}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { useFunnelSlowTraces } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import FunnelTopTracesTable from './FunnelTopTracesTable';
|
||||
|
||||
@@ -7,7 +6,6 @@ interface TopSlowestTracesProps {
|
||||
funnelId: string;
|
||||
stepAOrder: number;
|
||||
stepBOrder: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
function TopSlowestTraces(props: TopSlowestTracesProps): JSX.Element {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { useFunnelErrorTraces } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import FunnelTopTracesTable from './FunnelTopTracesTable';
|
||||
|
||||
@@ -7,7 +6,6 @@ interface TopTracesWithErrorsProps {
|
||||
funnelId: string;
|
||||
stepAOrder: number;
|
||||
stepBOrder: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
function TopTracesWithErrors(props: TopTracesWithErrorsProps): JSX.Element {
|
||||
|
||||
@@ -18,4 +18,10 @@ export const topTracesTableColumns = [
|
||||
key: 'duration_ms',
|
||||
render: (value: string): string => getYAxisFormattedValue(value, 'ms'),
|
||||
},
|
||||
{
|
||||
title: 'SPAN COUNT',
|
||||
dataIndex: 'span_count',
|
||||
key: 'span_count',
|
||||
render: (value: number): string => value.toString(),
|
||||
},
|
||||
];
|
||||
|
||||
@@ -14,6 +14,8 @@ export const initialStepsData: FunnelStepData[] = [
|
||||
latency_pointer: 'start',
|
||||
latency_type: undefined,
|
||||
has_errors: false,
|
||||
name: '',
|
||||
description: '',
|
||||
},
|
||||
{
|
||||
id: v4(),
|
||||
@@ -27,6 +29,8 @@ export const initialStepsData: FunnelStepData[] = [
|
||||
latency_pointer: 'start',
|
||||
latency_type: LatencyOptions.P95,
|
||||
has_errors: false,
|
||||
name: '',
|
||||
description: '',
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ValidateFunnelResponse } from 'api/traceFunnels';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { Time } from 'container/TopNav/DateTimeSelection/config';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time as TimeV2,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { normalizeSteps } from 'hooks/TracesFunnels/useFunnelConfiguration';
|
||||
import { useValidateFunnelSteps } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { initialStepsData } from 'pages/TracesFunnelDetails/constants';
|
||||
import {
|
||||
createContext,
|
||||
@@ -41,9 +41,6 @@ interface FunnelContextType {
|
||||
handleStepChange: (index: number, newStep: Partial<FunnelStepData>) => void;
|
||||
handleStepRemoval: (index: number) => void;
|
||||
handleRunFunnel: () => void;
|
||||
handleSaveFunnel: () => void;
|
||||
triggerSave: boolean;
|
||||
hasUnsavedChanges: boolean;
|
||||
validationResponse:
|
||||
| SuccessResponse<ValidateFunnelResponse>
|
||||
| ErrorResponse
|
||||
@@ -57,10 +54,8 @@ interface FunnelContextType {
|
||||
spanName: string,
|
||||
) => void;
|
||||
handleRestoreSteps: (oldSteps: FunnelStepData[]) => void;
|
||||
isUpdatingFunnel: boolean;
|
||||
setIsUpdatingFunnel: Dispatch<SetStateAction<boolean>>;
|
||||
lastUpdatedSteps: FunnelStepData[];
|
||||
setLastUpdatedSteps: Dispatch<SetStateAction<FunnelStepData[]>>;
|
||||
hasFunnelBeenExecuted: boolean;
|
||||
setHasFunnelBeenExecuted: Dispatch<SetStateAction<boolean>>;
|
||||
}
|
||||
|
||||
const FunnelContext = createContext<FunnelContextType | undefined>(undefined);
|
||||
@@ -91,19 +86,6 @@ export function FunnelProvider({
|
||||
const funnel = data?.payload;
|
||||
const initialSteps = funnel?.steps?.length ? funnel.steps : initialStepsData;
|
||||
const [steps, setSteps] = useState<FunnelStepData[]>(initialSteps);
|
||||
const [triggerSave, setTriggerSave] = useState<boolean>(false);
|
||||
const [isUpdatingFunnel, setIsUpdatingFunnel] = useState<boolean>(false);
|
||||
const [lastUpdatedSteps, setLastUpdatedSteps] = useState<FunnelStepData[]>(
|
||||
initialSteps,
|
||||
);
|
||||
|
||||
// Check if there are unsaved changes by comparing with initial steps from API
|
||||
const hasUnsavedChanges = useMemo(() => {
|
||||
const normalizedCurrentSteps = normalizeSteps(steps);
|
||||
const normalizedInitialSteps = normalizeSteps(lastUpdatedSteps);
|
||||
return !isEqual(normalizedCurrentSteps, normalizedInitialSteps);
|
||||
}, [steps, lastUpdatedSteps]);
|
||||
|
||||
const { hasIncompleteStepFields, hasAllEmptyStepFields } = useMemo(
|
||||
() => ({
|
||||
hasAllEmptyStepFields: steps.every(
|
||||
@@ -116,6 +98,15 @@ export function FunnelProvider({
|
||||
[steps],
|
||||
);
|
||||
|
||||
const [unexecutedFunnels, setUnexecutedFunnels] = useLocalStorage<string[]>(
|
||||
LOCALSTORAGE.UNEXECUTED_FUNNELS,
|
||||
[],
|
||||
);
|
||||
|
||||
const [hasFunnelBeenExecuted, setHasFunnelBeenExecuted] = useState(
|
||||
!unexecutedFunnels.includes(funnelId),
|
||||
);
|
||||
|
||||
const {
|
||||
data: validationResponse,
|
||||
isLoading: isValidationLoading,
|
||||
@@ -125,13 +116,7 @@ export function FunnelProvider({
|
||||
selectedTime,
|
||||
startTime,
|
||||
endTime,
|
||||
enabled:
|
||||
!!funnelId &&
|
||||
!!selectedTime &&
|
||||
!!startTime &&
|
||||
!!endTime &&
|
||||
!hasIncompleteStepFields,
|
||||
steps,
|
||||
enabled: !!funnelId && !!selectedTime && !!startTime && !!endTime,
|
||||
});
|
||||
|
||||
const validTracesCount = useMemo(
|
||||
@@ -200,7 +185,11 @@ export function FunnelProvider({
|
||||
|
||||
const handleRunFunnel = useCallback(async (): Promise<void> => {
|
||||
if (validTracesCount === 0) return;
|
||||
if (!hasFunnelBeenExecuted) {
|
||||
setUnexecutedFunnels(unexecutedFunnels.filter((id) => id !== funnelId));
|
||||
|
||||
setHasFunnelBeenExecuted(true);
|
||||
}
|
||||
queryClient.refetchQueries([
|
||||
REACT_QUERY_KEY.GET_FUNNEL_OVERVIEW,
|
||||
funnelId,
|
||||
@@ -226,13 +215,15 @@ export function FunnelProvider({
|
||||
funnelId,
|
||||
selectedTime,
|
||||
]);
|
||||
}, [funnelId, queryClient, selectedTime, validTracesCount]);
|
||||
|
||||
const handleSaveFunnel = useCallback(() => {
|
||||
setTriggerSave(true);
|
||||
// Reset the trigger after a brief moment to allow useFunnelConfiguration to pick it up
|
||||
setTimeout(() => setTriggerSave(false), 100);
|
||||
}, []);
|
||||
}, [
|
||||
funnelId,
|
||||
hasFunnelBeenExecuted,
|
||||
unexecutedFunnels,
|
||||
queryClient,
|
||||
selectedTime,
|
||||
setUnexecutedFunnels,
|
||||
validTracesCount,
|
||||
]);
|
||||
|
||||
const value = useMemo<FunnelContextType>(
|
||||
() => ({
|
||||
@@ -248,19 +239,14 @@ export function FunnelProvider({
|
||||
handleAddStep: addNewStep,
|
||||
handleStepRemoval,
|
||||
handleRunFunnel,
|
||||
handleSaveFunnel,
|
||||
triggerSave,
|
||||
validationResponse,
|
||||
isValidateStepsLoading: isValidationLoading || isValidationFetching,
|
||||
hasIncompleteStepFields,
|
||||
hasAllEmptyStepFields,
|
||||
handleReplaceStep,
|
||||
handleRestoreSteps,
|
||||
hasUnsavedChanges,
|
||||
setIsUpdatingFunnel,
|
||||
isUpdatingFunnel,
|
||||
lastUpdatedSteps,
|
||||
setLastUpdatedSteps,
|
||||
hasFunnelBeenExecuted,
|
||||
setHasFunnelBeenExecuted,
|
||||
}),
|
||||
[
|
||||
funnelId,
|
||||
@@ -274,8 +260,6 @@ export function FunnelProvider({
|
||||
addNewStep,
|
||||
handleStepRemoval,
|
||||
handleRunFunnel,
|
||||
handleSaveFunnel,
|
||||
triggerSave,
|
||||
validationResponse,
|
||||
isValidationLoading,
|
||||
isValidationFetching,
|
||||
@@ -283,11 +267,8 @@ export function FunnelProvider({
|
||||
hasAllEmptyStepFields,
|
||||
handleReplaceStep,
|
||||
handleRestoreSteps,
|
||||
hasUnsavedChanges,
|
||||
setIsUpdatingFunnel,
|
||||
isUpdatingFunnel,
|
||||
lastUpdatedSteps,
|
||||
setLastUpdatedSteps,
|
||||
hasFunnelBeenExecuted,
|
||||
setHasFunnelBeenExecuted,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -4,9 +4,11 @@ import { Input } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { AxiosError } from 'axios';
|
||||
import SignozModal from 'components/SignozModal/SignozModal';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useCreateFunnel } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { Check, X } from 'lucide-react';
|
||||
@@ -32,6 +34,11 @@ function CreateFunnel({
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { pathname } = useLocation();
|
||||
|
||||
const [unexecutedFunnels, setUnexecutedFunnels] = useLocalStorage<string[]>(
|
||||
LOCALSTORAGE.UNEXECUTED_FUNNELS,
|
||||
[],
|
||||
);
|
||||
|
||||
const handleCreate = (): void => {
|
||||
createFunnelMutation.mutate(
|
||||
{
|
||||
@@ -54,6 +61,9 @@ function CreateFunnel({
|
||||
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_FUNNELS_LIST]);
|
||||
|
||||
const funnelId = data?.payload?.funnel_id;
|
||||
if (funnelId) {
|
||||
setUnexecutedFunnels([...unexecutedFunnels, funnelId]);
|
||||
}
|
||||
|
||||
onClose(funnelId);
|
||||
if (funnelId && redirectToDetails) {
|
||||
|
||||
@@ -2,16 +2,13 @@ import '../RenameFunnel/RenameFunnel.styles.scss';
|
||||
import './DeleteFunnel.styles.scss';
|
||||
|
||||
import SignozModal from 'components/SignozModal/SignozModal';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useDeleteFunnel } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Trash2, X } from 'lucide-react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
interface DeleteFunnelProps {
|
||||
isOpen: boolean;
|
||||
@@ -32,13 +29,6 @@ function DeleteFunnel({
|
||||
|
||||
const history = useHistory();
|
||||
const { pathname } = history.location;
|
||||
|
||||
// localStorage hook for funnel steps
|
||||
const localStorageKey = `${LOCALSTORAGE.FUNNEL_STEPS}_${funnelId}`;
|
||||
const [, , clearLocalStorageSavedSteps] = useLocalStorage<
|
||||
FunnelStepData[] | null
|
||||
>(localStorageKey, null);
|
||||
|
||||
const handleDelete = (): void => {
|
||||
deleteFunnelMutation.mutate(
|
||||
{
|
||||
@@ -49,7 +39,6 @@ function DeleteFunnel({
|
||||
notifications.success({
|
||||
message: 'Funnel deleted successfully',
|
||||
});
|
||||
clearLocalStorageSavedSteps();
|
||||
onClose();
|
||||
|
||||
if (
|
||||
|
||||
@@ -19,7 +19,6 @@ import {
|
||||
useState,
|
||||
} from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
|
||||
import { FeatureFlagProps as FeatureFlags } from 'types/api/features/getFeaturesFlags';
|
||||
import {
|
||||
LicensePlatform,
|
||||
@@ -59,7 +58,6 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
|
||||
(): boolean => getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true',
|
||||
);
|
||||
const [org, setOrg] = useState<Organization[] | null>(null);
|
||||
const [changelog, setChangelog] = useState<ChangelogSchema | null>(null);
|
||||
|
||||
// if the user.id is not present, for migration older cases then we need to logout only for current logged in users!
|
||||
useEffect(() => {
|
||||
@@ -255,13 +253,6 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
|
||||
[org],
|
||||
);
|
||||
|
||||
const updateChangelog = useCallback(
|
||||
(payload: ChangelogSchema): void => {
|
||||
setChangelog(payload);
|
||||
},
|
||||
[setChangelog],
|
||||
);
|
||||
|
||||
// global event listener for AFTER_LOGIN event to start the user fetch post all actions are complete
|
||||
useGlobalEventListener('AFTER_LOGIN', (event) => {
|
||||
if (event.detail) {
|
||||
@@ -305,13 +296,11 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
|
||||
featureFlagsFetchError,
|
||||
orgPreferencesFetchError,
|
||||
activeLicense,
|
||||
changelog,
|
||||
activeLicenseRefetch,
|
||||
updateUser,
|
||||
updateOrgPreferences,
|
||||
updateUserPreferenceInContext,
|
||||
updateOrg,
|
||||
updateChangelog,
|
||||
versionData: versionData?.payload || null,
|
||||
}),
|
||||
[
|
||||
@@ -330,10 +319,8 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
|
||||
orgPreferences,
|
||||
activeLicenseRefetch,
|
||||
orgPreferencesFetchError,
|
||||
changelog,
|
||||
updateUserPreferenceInContext,
|
||||
updateOrg,
|
||||
updateChangelog,
|
||||
user,
|
||||
userFetchError,
|
||||
versionData,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
|
||||
import APIError from 'types/api/error';
|
||||
import { FeatureFlagProps as FeatureFlags } from 'types/api/features/getFeaturesFlags';
|
||||
import { LicenseResModel, TrialInfo } from 'types/api/licensesV3/getActive';
|
||||
@@ -27,13 +26,11 @@ export interface IAppContext {
|
||||
activeLicenseFetchError: APIError | null;
|
||||
featureFlagsFetchError: unknown;
|
||||
orgPreferencesFetchError: unknown;
|
||||
changelog: ChangelogSchema | null;
|
||||
activeLicenseRefetch: () => void;
|
||||
updateUser: (user: IUser) => void;
|
||||
updateOrgPreferences: (orgPreferences: OrgPreference[]) => void;
|
||||
updateUserPreferenceInContext: (userPreference: UserPreference) => void;
|
||||
updateOrg(orgId: string, updatedOrgName: string): void;
|
||||
updateChangelog(payload: ChangelogSchema): void;
|
||||
versionData: PayloadProps | null;
|
||||
}
|
||||
|
||||
|
||||
@@ -143,7 +143,6 @@ export function getAppContextMock(
|
||||
},
|
||||
isFetchingActiveLicense: false,
|
||||
activeLicenseFetchError: null,
|
||||
changelog: null,
|
||||
user: {
|
||||
accessJwt: 'some-token',
|
||||
refreshJwt: 'some-refresh-token',
|
||||
@@ -237,7 +236,6 @@ export function getAppContextMock(
|
||||
updateOrg: jest.fn(),
|
||||
updateOrgPreferences: jest.fn(),
|
||||
activeLicenseRefetch: jest.fn(),
|
||||
updateChangelog: jest.fn(),
|
||||
versionData: {
|
||||
version: '1.0.0',
|
||||
ee: 'Y',
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
export type Media = {
|
||||
id: number;
|
||||
documentId: string;
|
||||
ext: string;
|
||||
url: string;
|
||||
mime: string;
|
||||
alternativeText: string | null;
|
||||
[key: string]: any; // Allow other fields (e.g., mime, size) to be flexible
|
||||
};
|
||||
|
||||
type Feature = {
|
||||
id: number;
|
||||
documentId: string;
|
||||
title: string;
|
||||
sort_order: number | null;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
publishedAt: string;
|
||||
description: string;
|
||||
deployment_type: string | null;
|
||||
media: Media | null;
|
||||
};
|
||||
|
||||
export interface ChangelogSchema {
|
||||
id: number;
|
||||
documentId: string;
|
||||
version: string;
|
||||
release_date: string;
|
||||
bug_fixes: string | null;
|
||||
maintenance: string | null;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
publishedAt: string;
|
||||
features: Feature[];
|
||||
}
|
||||
|
||||
export const SupportedImageTypes = ['.jpg', '.jpeg', '.png', '.gif', '.webp'];
|
||||
|
||||
export const SupportedVideoTypes = ['.mp4', '.webm'];
|
||||
@@ -105,7 +105,7 @@ export const routePermission: Record<keyof typeof ROUTES, ROLES[]> = {
|
||||
TRACES_FUNNELS_DETAIL: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
API_KEYS: ['ADMIN'],
|
||||
CUSTOM_DOMAIN_SETTINGS: ['ADMIN'],
|
||||
LOGS_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
LOGS_BASE: [],
|
||||
OLD_LOGS_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
SHORTCUTS: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
INTEGRATIONS: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
@@ -120,7 +120,4 @@ export const routePermission: Record<keyof typeof ROUTES, ROLES[]> = {
|
||||
API_MONITORING: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
WORKSPACE_ACCESS_RESTRICTED: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METRICS_EXPLORER_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
INFRASTRUCTURE_MONITORING_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
API_MONITORING_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
MESSAGING_QUEUES_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
};
|
||||
|
||||
246
go.mod
246
go.mod
@@ -8,40 +8,41 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.43-bump-a2ff26b
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/coreos/go-oidc/v3 v3.11.0
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/go-co-op/gocron v1.30.1
|
||||
github.com/go-openapi/runtime v0.28.0
|
||||
github.com/go-openapi/strfmt v0.23.0
|
||||
github.com/go-redis/redis/v8 v8.11.5
|
||||
github.com/go-redis/redismock/v8 v8.11.5
|
||||
github.com/go-viper/mapstructure/v2 v2.2.1
|
||||
github.com/go-viper/mapstructure/v2 v2.1.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/handlers v1.5.1
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/gorilla/websocket v1.5.3
|
||||
github.com/gorilla/websocket v1.5.0
|
||||
github.com/huandu/go-sqlbuilder v1.35.0
|
||||
github.com/jackc/pgx/v5 v5.7.2
|
||||
github.com/jmoiron/sqlx v1.3.4
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/knadh/koanf v1.5.0
|
||||
github.com/knadh/koanf/v2 v2.2.0
|
||||
github.com/knadh/koanf/v2 v2.1.1
|
||||
github.com/mailru/easyjson v0.7.7
|
||||
github.com/mattn/go-sqlite3 v1.14.24
|
||||
github.com/open-telemetry/opamp-go v0.19.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.128.0
|
||||
github.com/open-telemetry/opamp-go v0.5.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.111.0
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/alertmanager v0.28.1
|
||||
github.com/prometheus/client_golang v1.22.0
|
||||
github.com/prometheus/common v0.64.0
|
||||
github.com/prometheus/prometheus v0.304.1
|
||||
github.com/prometheus/alertmanager v0.28.0
|
||||
github.com/prometheus/client_golang v1.20.5
|
||||
github.com/prometheus/common v0.61.0
|
||||
github.com/prometheus/prometheus v0.300.1
|
||||
github.com/rs/cors v1.11.1
|
||||
github.com/russellhaering/gosaml2 v0.9.0
|
||||
github.com/russellhaering/goxmldsig v1.2.0
|
||||
@@ -56,48 +57,47 @@ require (
|
||||
github.com/uptrace/bun v1.2.9
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.9
|
||||
github.com/uptrace/bun/dialect/sqlitedialect v1.2.9
|
||||
go.opentelemetry.io/collector/confmap v1.34.0
|
||||
go.opentelemetry.io/collector/pdata v1.34.0
|
||||
go.opentelemetry.io/collector/processor v1.34.0
|
||||
go.opentelemetry.io/collector/confmap v1.17.0
|
||||
go.opentelemetry.io/collector/pdata v1.17.0
|
||||
go.opentelemetry.io/collector/processor v0.111.0
|
||||
go.opentelemetry.io/contrib/config v0.10.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0
|
||||
go.opentelemetry.io/otel v1.36.0
|
||||
go.opentelemetry.io/otel/metric v1.36.0
|
||||
go.opentelemetry.io/otel/sdk v1.36.0
|
||||
go.opentelemetry.io/otel/trace v1.36.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0
|
||||
go.opentelemetry.io/otel v1.34.0
|
||||
go.opentelemetry.io/otel/metric v1.34.0
|
||||
go.opentelemetry.io/otel/sdk v1.34.0
|
||||
go.opentelemetry.io/otel/trace v1.34.0
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/crypto v0.38.0
|
||||
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842
|
||||
golang.org/x/oauth2 v0.24.0
|
||||
golang.org/x/sync v0.14.0
|
||||
golang.org/x/text v0.25.0
|
||||
google.golang.org/protobuf v1.36.6
|
||||
google.golang.org/protobuf v1.36.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
k8s.io/apimachinery v0.32.3
|
||||
k8s.io/apimachinery v0.31.3
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go/auth v0.16.1 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
|
||||
cloud.google.com/go/auth v0.13.0 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.6 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.6.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.16.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
|
||||
github.com/ClickHouse/ch-go v0.63.1 // indirect
|
||||
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/armon/go-metrics v0.4.1 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.7 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.5 // indirect
|
||||
github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3 // indirect
|
||||
github.com/beevik/etree v1.1.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.2 // indirect
|
||||
github.com/coder/quartz v0.1.2 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
@@ -107,10 +107,10 @@ require (
|
||||
github.com/ebitengine/purego v0.8.4 // indirect
|
||||
github.com/edsrzf/mmap-go v1.2.0 // indirect
|
||||
github.com/elastic/lunes v0.1.0 // indirect
|
||||
github.com/expr-lang/expr v1.17.5 // indirect
|
||||
github.com/expr-lang/expr v1.17.0 // indirect
|
||||
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||
github.com/fsnotify/fsnotify v1.8.0 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.0.5 // indirect
|
||||
@@ -125,21 +125,20 @@ require (
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.0 // indirect
|
||||
github.com/go-openapi/validate v0.24.0 // indirect
|
||||
github.com/gobwas/glob v0.2.3 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/goccy/go-json v0.10.3 // indirect
|
||||
github.com/gofrs/uuid v4.4.0+incompatible // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/golang/snappy v1.0.0 // indirect
|
||||
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
|
||||
github.com/google/btree v1.0.1 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/google/go-cmp v0.6.0 // indirect
|
||||
github.com/google/s2a-go v0.1.8 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.0 // indirect
|
||||
github.com/gopherjs/gopherjs v1.17.2 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||
github.com/hashicorp/go-msgpack/v2 v2.1.1 // indirect
|
||||
@@ -157,21 +156,21 @@ require (
|
||||
github.com/jessevdk/go-flags v1.6.1 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||
github.com/jonboulle/clockwork v0.5.0 // indirect
|
||||
github.com/jonboulle/clockwork v0.4.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/jpillora/backoff v1.0.0 // indirect
|
||||
github.com/jtolds/gls v4.20.0+incompatible // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/compress v1.17.11 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.2.0 // indirect
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74 // indirect
|
||||
github.com/magefile/mage v1.15.0 // indirect
|
||||
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/mdlayher/socket v0.4.1 // indirect
|
||||
github.com/mdlayher/vsock v1.2.1 // indirect
|
||||
github.com/miekg/dns v1.1.65 // indirect
|
||||
github.com/miekg/dns v1.1.62 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
@@ -181,38 +180,33 @@ require (
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
|
||||
github.com/oklog/run v1.1.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/oklog/ulid/v2 v2.1.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.111.0 // indirect
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.21 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.14.0 // indirect
|
||||
github.com/prometheus/otlptranslator v0.0.0-20250320144820-d800c8b0eb07 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/prometheus/sigv4 v0.1.2 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c // indirect
|
||||
github.com/prometheus/client_model v0.6.1 // indirect
|
||||
github.com/prometheus/common/sigv4 v0.1.0 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.13.2 // indirect
|
||||
github.com/prometheus/procfs v0.15.1 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.0 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 // indirect
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/segmentio/backo-go v1.0.1 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.5 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.24.9 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect
|
||||
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92 // indirect
|
||||
github.com/smarty/assertions v1.15.0 // indirect
|
||||
github.com/spf13/cobra v1.9.1 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/spf13/cobra v1.8.1 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.15 // indirect
|
||||
github.com/tklauser/numcpus v0.10.0 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.13 // indirect
|
||||
github.com/tklauser/numcpus v0.7.0 // indirect
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||
github.com/trivago/tgo v1.0.7 // indirect
|
||||
github.com/valyala/fastjson v1.6.4 // indirect
|
||||
@@ -222,79 +216,65 @@ require (
|
||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/collector/component v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componenttest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/envprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/xconfmap v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/xconnector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumererror v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/xconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exportertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/xexporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensiontest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/xextension v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/fanoutconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/telemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/otelcol v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline/xpipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/xprocessor v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receivertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/xreceiver v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/service v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/service/hostcapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0 // indirect
|
||||
go.opentelemetry.io/contrib/otelconf v0.16.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.58.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.6.0 // indirect
|
||||
go.opentelemetry.io/collector v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/component v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentprofiles v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/converter/expandconverter v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.17.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectorprofiles v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumerprofiles v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exporterprofiles v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/experimental/storage v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.17.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/globalgates v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/globalsignal v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/otelcol v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorprofiles v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverprofiles v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.116.0 // indirect
|
||||
go.opentelemetry.io/collector/service v0.111.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.56.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.30.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.30.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.52.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.6.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.30.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.30.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.10.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.10.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.31.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.4.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/goleak v1.3.0 // indirect
|
||||
golang.org/x/mod v0.24.0 // indirect
|
||||
golang.org/x/mod v0.22.0 // indirect
|
||||
golang.org/x/net v0.40.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.33.0 // indirect
|
||||
gonum.org/v1/gonum v0.16.0 // indirect
|
||||
google.golang.org/api v0.236.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250519155744-55703ea1f237 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250528174236-200df99c418a // indirect
|
||||
google.golang.org/grpc v1.72.2 // indirect
|
||||
golang.org/x/time v0.8.0 // indirect
|
||||
golang.org/x/tools v0.28.0 // indirect
|
||||
gonum.org/v1/gonum v0.15.1 // indirect
|
||||
google.golang.org/api v0.213.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20241216192217-9240e9c98484 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576 // indirect
|
||||
google.golang.org/grpc v1.69.0 // indirect
|
||||
gopkg.in/telebot.v3 v3.3.8 // indirect
|
||||
k8s.io/client-go v0.32.3 // indirect
|
||||
k8s.io/client-go v0.31.3 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
k8s.io/utils v0.0.0-20250321185631-1f6e0b77f77e // indirect
|
||||
sigs.k8s.io/yaml v1.4.0 // indirect
|
||||
k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 // indirect
|
||||
)
|
||||
|
||||
35
go.sum
35
go.sum
@@ -62,6 +62,8 @@ cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3f
|
||||
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
|
||||
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.7 h1:zbdC0UooWLwBvOi5CeyCA42AWm6lMYuBVy6XnMzmF+c=
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.7/go.mod h1:W0Z82wJWkJxz2RVun/RMwxue3g7ut47Xxl+SFqdJGus=
|
||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
|
||||
@@ -98,8 +100,8 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.43 h1:upWUoxDl5kCE/WI5+di2oqA/wJi2NU/PRyN8zDR078c=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.43/go.mod h1:iUGoKEaNQmLNptTwEz9o5kZ0ntbCMQsrV53Y2TDd1Qg=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39 h1:Dl8QqZNAsj2atxP572OzsszPK0XPpd3LLPNPRAUJ5wo=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
@@ -310,6 +312,9 @@ github.com/go-redis/redismock/v8 v8.11.5 h1:RJFIiua58hrBrSpXhnGX3on79AU3S271H4Zh
|
||||
github.com/go-redis/redismock/v8 v8.11.5/go.mod h1:UaAU9dEe1C+eGr+FHV5prCWIt0hafyPWbGMEWE0UWdA=
|
||||
github.com/go-resty/resty/v2 v2.13.1 h1:x+LHXBI2nMB1vqndymf26quycC4aggYJ7DECYbiz03g=
|
||||
github.com/go-resty/resty/v2 v2.13.1/go.mod h1:GznXlLxkq6Nh4sU59rPmUw3VtgpO3aS96ORAI6Q7d+0=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||
github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
@@ -317,8 +322,8 @@ github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpG
|
||||
github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/go-zookeeper/zk v1.0.4 h1:DPzxraQx7OrPyXq2phlGlNSIyWEsAox0RJmjTseMV6I=
|
||||
github.com/go-zookeeper/zk v1.0.4/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw=
|
||||
github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM=
|
||||
github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
|
||||
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.9.5/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA=
|
||||
@@ -560,6 +565,8 @@ github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9Y
|
||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
|
||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
|
||||
github.com/jmoiron/sqlx v1.3.4 h1:wv+0IJZfL5z0uZoUjlpKgHkgaFSYD+r9CfrXjEXsO7w=
|
||||
github.com/jmoiron/sqlx v1.3.4/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ=
|
||||
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||
github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
|
||||
@@ -616,6 +623,9 @@ github.com/leodido/go-syslog/v4 v4.2.0/go.mod h1:eJ8rUfDN5OS6dOkCOBYlg2a+hbAg6pJ
|
||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b h1:11UHH39z1RhZ5dc4y4r/4koJo6IYFgTRMe/LlwRTEw0=
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b/go.mod h1:WZxr2/6a/Ar9bMDc2rN/LJrE/hF6bXE4LPyDSIxwAfg=
|
||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/linode/linodego v1.41.0 h1:GcP7JIBr9iLRJ9FwAtb9/WCT1DuPJS/xUApapfdjtiY=
|
||||
github.com/linode/linodego v1.41.0/go.mod h1:Ow4/XZ0yvWBzt3iAHwchvhSx30AyLintsSMvvQ2/SJY=
|
||||
github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74 h1:1KuuSOy4ZNgW0KA2oYIngXVFhQcXxhLqCVK7cBcldkk=
|
||||
@@ -643,6 +653,7 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky
|
||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
|
||||
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
@@ -747,8 +758,8 @@ github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3v
|
||||
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
|
||||
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=
|
||||
github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
@@ -1127,8 +1138,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM=
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
@@ -1230,8 +1241,8 @@ golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ
|
||||
golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.24.0 h1:KTBBxWqUa0ykRPLtV69rRto9TLXcqYkeswu48x/gvNE=
|
||||
golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -1414,8 +1425,8 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
|
||||
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=
|
||||
golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8=
|
||||
golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
||||
@@ -12,16 +12,4 @@ type Analytics interface {
|
||||
|
||||
// Sends analytics messages to an analytics backend.
|
||||
Send(context.Context, ...analyticstypes.Message)
|
||||
|
||||
// Tracks an event on a group level. Input is group, event name, and attributes. The user is "stats_<org_id>".
|
||||
TrackGroup(context.Context, string, string, map[string]any)
|
||||
|
||||
// Tracks an event on a user level and attributes it with the group. Input is group, user, event name, and attributes.
|
||||
TrackUser(context.Context, string, string, string, map[string]any)
|
||||
|
||||
// Identifies a group. Input is group, traits.
|
||||
IdentifyGroup(context.Context, string, map[string]any)
|
||||
|
||||
// Identifies a user. Input is group, user, traits.
|
||||
IdentifyUser(context.Context, string, string, map[string]any)
|
||||
}
|
||||
|
||||
@@ -24,18 +24,6 @@ func (provider *Provider) Start(_ context.Context) error {
|
||||
|
||||
func (provider *Provider) Send(ctx context.Context, messages ...analyticstypes.Message) {}
|
||||
|
||||
func (provider *Provider) TrackGroup(ctx context.Context, group, event string, attributes map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) TrackUser(ctx context.Context, group, user, event string, attributes map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) IdentifyGroup(ctx context.Context, group string, traits map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) IdentifyUser(ctx context.Context, group, user string, traits map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) Stop(_ context.Context) error {
|
||||
close(provider.stopC)
|
||||
return nil
|
||||
|
||||
@@ -27,25 +27,7 @@ func (provider *provider) Start(_ context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Send(ctx context.Context, messages ...analyticstypes.Message) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) TrackGroup(ctx context.Context, group, event string, attributes map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) TrackUser(ctx context.Context, group, user, event string, attributes map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyGroup(ctx context.Context, group string, traits map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyUser(ctx context.Context, group, user string, traits map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
func (provider *provider) Send(ctx context.Context, messages ...analyticstypes.Message) {}
|
||||
|
||||
func (provider *provider) Stop(_ context.Context) error {
|
||||
close(provider.stopC)
|
||||
|
||||
@@ -50,100 +50,6 @@ func (provider *provider) Send(ctx context.Context, messages ...analyticstypes.M
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) TrackGroup(ctx context.Context, group, event string, properties map[string]any) {
|
||||
if properties == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping event", "group", group, "event", event)
|
||||
return
|
||||
}
|
||||
|
||||
err := provider.client.Enqueue(analyticstypes.Track{
|
||||
UserId: "stats_" + group,
|
||||
Event: event,
|
||||
Properties: analyticstypes.NewPropertiesFromMap(properties),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: group,
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) TrackUser(ctx context.Context, group, user, event string, properties map[string]any) {
|
||||
if properties == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping event", "user", user, "group", group, "event", event)
|
||||
return
|
||||
}
|
||||
|
||||
err := provider.client.Enqueue(analyticstypes.Track{
|
||||
UserId: user,
|
||||
Event: event,
|
||||
Properties: analyticstypes.NewPropertiesFromMap(properties),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: group,
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyGroup(ctx context.Context, group string, traits map[string]any) {
|
||||
if traits == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping identify", "group", group)
|
||||
return
|
||||
}
|
||||
|
||||
// identify the user
|
||||
err := provider.client.Enqueue(analyticstypes.Identify{
|
||||
UserId: "stats_" + group,
|
||||
Traits: analyticstypes.NewTraitsFromMap(traits),
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
|
||||
// identify the group using the stats user
|
||||
err = provider.client.Enqueue(analyticstypes.Group{
|
||||
UserId: "stats_" + group,
|
||||
GroupId: group,
|
||||
Traits: analyticstypes.NewTraitsFromMap(traits),
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyUser(ctx context.Context, group, user string, traits map[string]any) {
|
||||
if traits == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping identify", "user", user, "group", group)
|
||||
return
|
||||
}
|
||||
|
||||
// identify the user
|
||||
err := provider.client.Enqueue(analyticstypes.Identify{
|
||||
UserId: user,
|
||||
Traits: analyticstypes.NewTraitsFromMap(traits),
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
|
||||
// associate the user with the group
|
||||
err = provider.client.Enqueue(analyticstypes.Group{
|
||||
UserId: user,
|
||||
GroupId: group,
|
||||
Traits: analyticstypes.NewTraits().Set("id", group), // A trait is required
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
if err := provider.client.Close(); err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to close segment client", "err", err)
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -44,7 +45,19 @@ func (module *module) Create(ctx context.Context, orgID valuer.UUID, createdBy s
|
||||
return nil, err
|
||||
}
|
||||
|
||||
module.analytics.TrackUser(ctx, orgID.String(), creator.String(), "Dashboard Created", dashboardtypes.NewStatsFromStorableDashboards([]*dashboardtypes.StorableDashboard{storableDashboard}))
|
||||
module.analytics.Send(ctx,
|
||||
analyticstypes.Track{
|
||||
UserId: creator.String(),
|
||||
Event: "Dashboard Created",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(dashboardtypes.NewStatsFromStorableDashboards([]*dashboardtypes.StorableDashboard{storableDashboard})),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: orgID,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
return dashboard, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ FROM (
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -42,7 +42,7 @@ FROM (
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[10]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
HAVING t1_time > 0 AND t2_time > t1_time
|
||||
)
|
||||
ORDER BY t1_time
|
||||
LIMIT 5;`
|
||||
@@ -96,7 +96,7 @@ FROM (
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
minIf(timestamp, serviceName = step3.1 AND name = step3.2) AS t3_time
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -105,7 +105,7 @@ FROM (
|
||||
OR (serviceName = step3.1 AND name = step3.2 AND (contains_error_t3 = 0 OR has_error = true) %[14]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
HAVING t1_time > 0 AND t2_time > t1_time AND t3_time > t2_time
|
||||
)
|
||||
ORDER BY t1_time
|
||||
LIMIT 5;`
|
||||
@@ -161,7 +161,7 @@ WITH
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS s1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS s2_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -170,7 +170,7 @@ WITH
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[12]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
HAVING t1_time > 0 AND t2_time > t1_time
|
||||
)
|
||||
|
||||
, totals AS (
|
||||
@@ -179,8 +179,8 @@ WITH
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT CASE WHEN s1_error = 1 THEN trace_id END) AS sum_s1_error,
|
||||
count(DISTINCT CASE WHEN s2_error = 1 THEN trace_id END) AS sum_s2_error,
|
||||
avgIf((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time) AS avg_duration,
|
||||
quantileIf(0.99)((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time) AS latency
|
||||
avg((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time)) / 1e6) AS avg_duration,
|
||||
quantile(0.99)((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time)) / 1e6) AS latency
|
||||
FROM funnel
|
||||
)
|
||||
|
||||
@@ -252,7 +252,7 @@ WITH
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS s1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS s2_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step3.1 AND name = step3.2)) AS s3_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -261,23 +261,23 @@ WITH
|
||||
OR (serviceName = step3.1 AND name = step3.2 AND (contains_error_t3 = 0 OR has_error = true) %[17]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
)
|
||||
|
||||
, totals AS (
|
||||
SELECT
|
||||
count(DISTINCT trace_id) AS total_s1_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT CASE WHEN t3_time > t2_time AND t2_time > t1_time THEN trace_id END) AS total_s3_spans,
|
||||
count(DISTINCT CASE WHEN t3_time > t2_time THEN trace_id END) AS total_s3_spans,
|
||||
|
||||
count(DISTINCT CASE WHEN s1_error = 1 THEN trace_id END) AS sum_s1_error,
|
||||
count(DISTINCT CASE WHEN s2_error = 1 THEN trace_id END) AS sum_s2_error,
|
||||
count(DISTINCT CASE WHEN s3_error = 1 THEN trace_id END) AS sum_s3_error,
|
||||
|
||||
avgIf((toUnixTimestamp64Nano(t3_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time AND t3_time > t2_time) AS avg_funnel_duration,
|
||||
quantileIf(0.99)((toUnixTimestamp64Nano(t3_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time AND t3_time > t2_time) AS p99_funnel_latency
|
||||
|
||||
avgIf((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time) AS avg_duration_12,
|
||||
quantileIf(0.99)((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time) AS latency_12,
|
||||
|
||||
avgIf((toUnixTimestamp64Nano(t3_time) - toUnixTimestamp64Nano(t2_time))/1e6, t2_time > 0 AND t3_time > t2_time) AS avg_duration_23,
|
||||
quantileIf(0.99)((toUnixTimestamp64Nano(t3_time) - toUnixTimestamp64Nano(t2_time))/1e6, t2_time > 0 AND t3_time > t2_time) AS latency_23
|
||||
FROM funnel
|
||||
)
|
||||
|
||||
@@ -285,8 +285,8 @@ SELECT
|
||||
round(if(total_s1_spans > 0, total_s3_spans * 100.0 / total_s1_spans, 0), 2) AS conversion_rate,
|
||||
total_s3_spans / nullIf(time_window_sec, 0) AS avg_rate,
|
||||
greatest(sum_s1_error, sum_s2_error, sum_s3_error) AS errors,
|
||||
avg_funnel_duration AS avg_duration,
|
||||
p99_funnel_latency AS latency
|
||||
avg_duration_23 AS avg_duration,
|
||||
latency_23 AS latency
|
||||
FROM totals;
|
||||
`
|
||||
return fmt.Sprintf(
|
||||
@@ -345,7 +345,7 @@ FROM (
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS t1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS t2_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -354,7 +354,7 @@ FROM (
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[10]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
HAVING t1_time > 0 AND t2_time > t1_time
|
||||
) AS funnel;
|
||||
`
|
||||
return fmt.Sprintf(queryTemplate,
|
||||
@@ -404,8 +404,8 @@ SELECT
|
||||
count(DISTINCT CASE WHEN t1_error = 1 THEN trace_id END) AS total_s1_errored_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time AND t2_error = 1 THEN trace_id END) AS total_s2_errored_spans,
|
||||
count(DISTINCT CASE WHEN t3_time > t2_time AND t2_time > t1_time THEN trace_id END) AS total_s3_spans,
|
||||
count(DISTINCT CASE WHEN t3_time > t2_time AND t2_time > t1_time AND t3_error = 1 THEN trace_id END) AS total_s3_errored_spans
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time AND t3_time > t2_time THEN trace_id END) AS total_s3_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time AND t3_time > t2_time AND t3_error = 1 THEN trace_id END) AS total_s3_errored_spans
|
||||
FROM (
|
||||
SELECT
|
||||
trace_id,
|
||||
@@ -415,7 +415,7 @@ FROM (
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS t1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS t2_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step3.1 AND name = step3.2)) AS t3_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -424,7 +424,7 @@ FROM (
|
||||
OR (serviceName = step3.1 AND name = step3.2 AND (contains_error_t3 = 0 OR has_error = true) %[14]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
HAVING t1_time > 0 AND t2_time > t1_time AND t3_time > t2_time
|
||||
) AS funnel;
|
||||
`
|
||||
return fmt.Sprintf(queryTemplate,
|
||||
@@ -477,7 +477,7 @@ FROM (
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
count() AS span_count
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -539,7 +539,7 @@ FROM (
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS t1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS t2_error,
|
||||
count() AS span_count
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -625,7 +625,7 @@ FROM (
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS s1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS s2_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -634,7 +634,6 @@ FROM (
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[12]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
) AS funnel
|
||||
) AS totals;
|
||||
`
|
||||
@@ -702,7 +701,7 @@ WITH
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS s1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS s2_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step3.1 AND name = step3.2)) AS s3_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
FROM signoz_traces.signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
@@ -711,7 +710,6 @@ WITH
|
||||
OR (serviceName = step3.1 AND name = step3.2 AND (contains_error_t3 = 0 OR has_error = true) %[17]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
)
|
||||
`
|
||||
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type getter struct {
|
||||
store types.UserStore
|
||||
}
|
||||
|
||||
func NewGetter(store types.UserStore) user.Getter {
|
||||
return &getter{store: store}
|
||||
}
|
||||
|
||||
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
|
||||
gettableUsers, err := module.store.ListUsers(ctx, orgID.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
users := make([]*types.User, len(gettableUsers))
|
||||
for i, user := range gettableUsers {
|
||||
users[i] = &user.User
|
||||
}
|
||||
|
||||
return users, nil
|
||||
}
|
||||
@@ -326,7 +326,7 @@ func (h *handler) UpdateUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
user.UpdatedAt = time.Now()
|
||||
|
||||
updatedUser, err := h.module.UpdateUser(ctx, claims.OrgID, id, &user, claims.UserID)
|
||||
updatedUser, err := h.module.UpdateUser(ctx, claims.OrgID, id, &user)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
@@ -347,7 +347,7 @@ func (h *handler) DeleteUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.module.DeleteUser(ctx, claims.OrgID, id, claims.UserID); err != nil {
|
||||
if err := h.module.DeleteUser(ctx, claims.OrgID, id); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/emailtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -134,9 +135,35 @@ func (m *Module) CreateUserWithPassword(ctx context.Context, user *types.User, p
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traitsOrProperties := types.NewTraitsFromUser(user)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traitsOrProperties)
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Created", traitsOrProperties)
|
||||
m.analytics.Send(ctx,
|
||||
analyticstypes.Identify{
|
||||
UserId: user.ID.String(),
|
||||
Traits: analyticstypes.
|
||||
NewTraits().
|
||||
SetName(user.DisplayName).
|
||||
SetEmail(user.Email).
|
||||
Set("role", user.Role).
|
||||
SetCreatedAt(user.CreatedAt),
|
||||
},
|
||||
analyticstypes.Group{
|
||||
UserId: user.ID.String(),
|
||||
GroupId: user.OrgID,
|
||||
},
|
||||
analyticstypes.Track{
|
||||
UserId: user.ID.String(),
|
||||
Event: "User Created",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(map[string]any{
|
||||
"role": user.Role,
|
||||
"email": user.Email,
|
||||
"name": user.DisplayName,
|
||||
}),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: user.OrgID,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
return user, nil
|
||||
}
|
||||
@@ -146,9 +173,35 @@ func (m *Module) CreateUser(ctx context.Context, user *types.User) error {
|
||||
return err
|
||||
}
|
||||
|
||||
traitsOrProperties := types.NewTraitsFromUser(user)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traitsOrProperties)
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Created", traitsOrProperties)
|
||||
m.analytics.Send(ctx,
|
||||
analyticstypes.Identify{
|
||||
UserId: user.ID.String(),
|
||||
Traits: analyticstypes.
|
||||
NewTraits().
|
||||
SetName(user.DisplayName).
|
||||
SetEmail(user.Email).
|
||||
Set("role", user.Role).
|
||||
SetCreatedAt(user.CreatedAt),
|
||||
},
|
||||
analyticstypes.Group{
|
||||
UserId: user.ID.String(),
|
||||
GroupId: user.OrgID,
|
||||
},
|
||||
analyticstypes.Track{
|
||||
UserId: user.ID.String(),
|
||||
Event: "User Created",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(map[string]any{
|
||||
"role": user.Role,
|
||||
"email": user.Email,
|
||||
"name": user.DisplayName,
|
||||
}),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: user.OrgID,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -173,22 +226,11 @@ func (m *Module) ListUsers(ctx context.Context, orgID string) ([]*types.Gettable
|
||||
return m.store.ListUsers(ctx, orgID)
|
||||
}
|
||||
|
||||
func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user *types.User, updatedBy string) (*types.User, error) {
|
||||
user, err := m.store.UpdateUser(ctx, orgID, id, user)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traits := types.NewTraitsFromUser(user)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traits)
|
||||
|
||||
traits["updated_by"] = updatedBy
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Updated", traits)
|
||||
|
||||
return user, nil
|
||||
func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user *types.User) (*types.User, error) {
|
||||
return m.store.UpdateUser(ctx, orgID, id, user)
|
||||
}
|
||||
|
||||
func (m *Module) DeleteUser(ctx context.Context, orgID string, id string, deletedBy string) error {
|
||||
func (m *Module) DeleteUser(ctx context.Context, orgID string, id string) error {
|
||||
user, err := m.store.GetUserByID(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -208,15 +250,7 @@ func (m *Module) DeleteUser(ctx context.Context, orgID string, id string, delete
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
|
||||
}
|
||||
|
||||
if err := m.store.DeleteUser(ctx, orgID, user.ID.StringValue()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Deleted", map[string]any{
|
||||
"deleted_by": deletedBy,
|
||||
})
|
||||
|
||||
return nil
|
||||
return m.store.DeleteUser(ctx, orgID, user.ID.StringValue())
|
||||
}
|
||||
|
||||
func (m *Module) CreateResetPasswordToken(ctx context.Context, userID string) (*types.ResetPasswordRequest, error) {
|
||||
@@ -610,16 +644,10 @@ func (m *Module) Register(ctx context.Context, req *types.PostableRegisterOrgAnd
|
||||
}
|
||||
|
||||
func (m *Module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
|
||||
stats := make(map[string]any)
|
||||
count, err := m.store.CountByOrgID(ctx, orgID)
|
||||
if err == nil {
|
||||
stats["user.count"] = count
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
count, err = m.store.CountAPIKeyByOrgID(ctx, orgID)
|
||||
if err == nil {
|
||||
stats["factor.api_key.count"] = count
|
||||
}
|
||||
|
||||
return stats, nil
|
||||
return map[string]any{"user.count": count}, nil
|
||||
}
|
||||
|
||||
@@ -826,21 +826,3 @@ func (store *store) CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64,
|
||||
|
||||
return int64(count), nil
|
||||
}
|
||||
|
||||
func (store *store) CountAPIKeyByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error) {
|
||||
apiKey := new(types.StorableAPIKey)
|
||||
|
||||
count, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(apiKey).
|
||||
Join("JOIN users ON users.id = storable_api_key.user_id").
|
||||
Where("org_id = ?", orgID).
|
||||
Count(ctx)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return int64(count), nil
|
||||
}
|
||||
|
||||
@@ -28,8 +28,8 @@ type Module interface {
|
||||
GetUserByEmailInOrg(ctx context.Context, orgID string, email string) (*types.GettableUser, error)
|
||||
GetUsersByRoleInOrg(ctx context.Context, orgID string, role types.Role) ([]*types.GettableUser, error)
|
||||
ListUsers(ctx context.Context, orgID string) ([]*types.GettableUser, error)
|
||||
UpdateUser(ctx context.Context, orgID string, id string, user *types.User, updatedBy string) (*types.User, error)
|
||||
DeleteUser(ctx context.Context, orgID string, id string, deletedBy string) error
|
||||
UpdateUser(ctx context.Context, orgID string, id string, user *types.User) (*types.User, error)
|
||||
DeleteUser(ctx context.Context, orgID string, id string) error
|
||||
|
||||
// login
|
||||
GetAuthenticatedUser(ctx context.Context, orgID, email, password, refreshToken string) (*types.User, error)
|
||||
@@ -70,11 +70,6 @@ type Module interface {
|
||||
statsreporter.StatsCollector
|
||||
}
|
||||
|
||||
type Getter interface {
|
||||
// Get gets the users based on the given id
|
||||
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
// invite
|
||||
CreateInvite(http.ResponseWriter, *http.Request)
|
||||
|
||||
@@ -2,7 +2,7 @@ package clickhouseprometheus
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/prometheus/common/model"
|
||||
|
||||
"github.com/prometheus/prometheus/prompb"
|
||||
)
|
||||
|
||||
@@ -17,12 +17,7 @@ func unmarshalLabels(s string) ([]prompb.Label, string, error) {
|
||||
for n, v := range m {
|
||||
if n == "__name__" {
|
||||
metricName = v
|
||||
} else {
|
||||
if !model.IsValidLegacyMetricName(n) {
|
||||
n = `"` + n + `"`
|
||||
}
|
||||
}
|
||||
|
||||
res = append(res, prompb.Label{
|
||||
Name: n,
|
||||
Value: v,
|
||||
|
||||
@@ -33,12 +33,6 @@ func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// Validate the query request
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
|
||||
@@ -117,7 +117,7 @@ func (bc *bucketCache) GetMissRanges(
|
||||
}
|
||||
|
||||
// Put stores fresh query results in the cache
|
||||
func (bc *bucketCache) Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, step qbtypes.Step, fresh *qbtypes.Result) {
|
||||
func (bc *bucketCache) Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, fresh *qbtypes.Result) {
|
||||
// Get query window
|
||||
startMs, endMs := q.Window()
|
||||
|
||||
@@ -159,36 +159,8 @@ func (bc *bucketCache) Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Que
|
||||
return
|
||||
}
|
||||
|
||||
// Adjust start and end times to only cache complete intervals
|
||||
cachableStartMs := startMs
|
||||
stepMs := uint64(step.Duration.Milliseconds())
|
||||
|
||||
// If we have a step interval, adjust boundaries to only cache complete intervals
|
||||
if stepMs > 0 {
|
||||
// If start is not aligned, round up to next step boundary (first complete interval)
|
||||
if startMs%stepMs != 0 {
|
||||
cachableStartMs = ((startMs / stepMs) + 1) * stepMs
|
||||
}
|
||||
|
||||
// If end is not aligned, round down to previous step boundary (last complete interval)
|
||||
if cachableEndMs%stepMs != 0 {
|
||||
cachableEndMs = (cachableEndMs / stepMs) * stepMs
|
||||
}
|
||||
|
||||
// If after adjustment we have no complete intervals, don't cache
|
||||
if cachableStartMs >= cachableEndMs {
|
||||
bc.logger.DebugContext(ctx, "no complete intervals to cache",
|
||||
"original_start", startMs,
|
||||
"original_end", endMs,
|
||||
"adjusted_start", cachableStartMs,
|
||||
"adjusted_end", cachableEndMs,
|
||||
"step", stepMs)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Convert trimmed result to buckets with adjusted boundaries
|
||||
freshBuckets := bc.resultToBuckets(ctx, trimmedResult, cachableStartMs, cachableEndMs)
|
||||
// Convert trimmed result to buckets
|
||||
freshBuckets := bc.resultToBuckets(ctx, trimmedResult, startMs, cachableEndMs)
|
||||
|
||||
// If no fresh buckets and no existing data, don't cache
|
||||
if len(freshBuckets) == 0 && len(existingData.Buckets) == 0 {
|
||||
@@ -513,12 +485,6 @@ func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cac
|
||||
}
|
||||
|
||||
if existingSeries, ok := seriesMap[key]; ok {
|
||||
// Merge values, avoiding duplicate timestamps
|
||||
timestampMap := make(map[int64]bool)
|
||||
for _, v := range existingSeries.Values {
|
||||
timestampMap[v.Timestamp] = true
|
||||
}
|
||||
|
||||
// Pre-allocate capacity for merged values
|
||||
newCap := len(existingSeries.Values) + len(series.Values)
|
||||
if cap(existingSeries.Values) < newCap {
|
||||
@@ -526,13 +492,7 @@ func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cac
|
||||
copy(newValues, existingSeries.Values)
|
||||
existingSeries.Values = newValues
|
||||
}
|
||||
|
||||
// Only add values with new timestamps
|
||||
for _, v := range series.Values {
|
||||
if !timestampMap[v.Timestamp] {
|
||||
existingSeries.Values = append(existingSeries.Values, v)
|
||||
}
|
||||
}
|
||||
existingSeries.Values = append(existingSeries.Values, series.Values...)
|
||||
} else {
|
||||
// New series
|
||||
seriesMap[key] = series
|
||||
@@ -737,7 +697,7 @@ func (bc *bucketCache) trimResultToFluxBoundary(result *qbtypes.Result, fluxBoun
|
||||
switch result.Type {
|
||||
case qbtypes.RequestTypeTimeSeries:
|
||||
// Trim time series data
|
||||
if tsData, ok := result.Value.(*qbtypes.TimeSeriesData); ok && tsData != nil {
|
||||
if tsData, ok := result.Value.(*qbtypes.TimeSeriesData); ok {
|
||||
trimmedData := &qbtypes.TimeSeriesData{
|
||||
QueryName: tsData.QueryName,
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ func BenchmarkBucketCache_GetMissRanges(b *testing.B) {
|
||||
endMs: uint64((i + 1) * 10000),
|
||||
}
|
||||
result := createBenchmarkResult(query.startMs, query.endMs, 1000)
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
}
|
||||
|
||||
// Create test queries with varying cache hit patterns
|
||||
@@ -121,7 +121,7 @@ func BenchmarkBucketCache_Put(b *testing.B) {
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
for j := 0; j < tc.numQueries; j++ {
|
||||
bc.Put(ctx, orgID, queries[j], qbtypes.Step{Duration: 1000 * time.Millisecond}, results[j])
|
||||
bc.Put(ctx, orgID, queries[j], results[j])
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -259,7 +259,7 @@ func BenchmarkBucketCache_ConcurrentOperations(b *testing.B) {
|
||||
endMs: uint64((i + 1) * 10000),
|
||||
}
|
||||
result := createBenchmarkResult(query.startMs, query.endMs, 1000)
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
@@ -284,7 +284,7 @@ func BenchmarkBucketCache_ConcurrentOperations(b *testing.B) {
|
||||
endMs: uint64((i + 1) * 10000),
|
||||
}
|
||||
result := createBenchmarkResult(query.startMs, query.endMs, 1000)
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
case 2: // Partial read
|
||||
query := &mockQuery{
|
||||
fingerprint: fmt.Sprintf("concurrent-query-%d", i%100),
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestBucketCacheStepAlignment(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgID := valuer.UUID{}
|
||||
cache := createTestCache(t)
|
||||
bc := NewBucketCache(instrumentationtest.New().ToProviderSettings(), cache, time.Hour, 5*time.Minute)
|
||||
|
||||
// Test with 5-minute step
|
||||
step := qbtypes.Step{Duration: 5 * time.Minute}
|
||||
|
||||
// Query from 12:02 to 12:58 (both unaligned)
|
||||
// Complete intervals: 12:05 to 12:55
|
||||
query := &mockQuery{
|
||||
fingerprint: "test-step-alignment",
|
||||
startMs: 1672563720000, // 12:02
|
||||
endMs: 1672567080000, // 12:58
|
||||
}
|
||||
|
||||
result := &qbtypes.Result{
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
Value: &qbtypes.TimeSeriesData{
|
||||
QueryName: "test",
|
||||
Aggregations: []*qbtypes.AggregationBucket{
|
||||
{
|
||||
Index: 0,
|
||||
Series: []*qbtypes.TimeSeries{
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"},
|
||||
},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
{Timestamp: 1672563720000, Value: 1, Partial: true}, // 12:02
|
||||
{Timestamp: 1672563900000, Value: 2}, // 12:05
|
||||
{Timestamp: 1672564200000, Value: 2.5}, // 12:10
|
||||
{Timestamp: 1672564500000, Value: 2.6}, // 12:15
|
||||
{Timestamp: 1672566600000, Value: 2.9}, // 12:50
|
||||
{Timestamp: 1672566900000, Value: 3}, // 12:55
|
||||
{Timestamp: 1672567080000, Value: 4, Partial: true}, // 12:58
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Put result in cache
|
||||
bc.Put(ctx, orgID, query, step, result)
|
||||
|
||||
// Get cached data
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, step)
|
||||
|
||||
// Should have cached data
|
||||
require.NotNil(t, cached)
|
||||
|
||||
// Log the missing ranges to debug
|
||||
t.Logf("Missing ranges: %v", missing)
|
||||
for i, r := range missing {
|
||||
t.Logf("Missing range %d: From=%d, To=%d", i, r.From, r.To)
|
||||
}
|
||||
|
||||
// Should have 2 missing ranges for partial intervals
|
||||
require.Len(t, missing, 2)
|
||||
|
||||
// First partial: 12:02 to 12:05
|
||||
assert.Equal(t, uint64(1672563720000), missing[0].From)
|
||||
assert.Equal(t, uint64(1672563900000), missing[0].To)
|
||||
|
||||
// Second partial: 12:55 to 12:58
|
||||
assert.Equal(t, uint64(1672566900000), missing[1].From, "Second missing range From")
|
||||
assert.Equal(t, uint64(1672567080000), missing[1].To, "Second missing range To")
|
||||
}
|
||||
|
||||
func TestBucketCacheNoStepInterval(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgID := valuer.UUID{}
|
||||
cache := createTestCache(t)
|
||||
bc := NewBucketCache(instrumentationtest.New().ToProviderSettings(), cache, time.Hour, 5*time.Minute)
|
||||
|
||||
// Test with no step (stepMs = 0)
|
||||
step := qbtypes.Step{Duration: 0}
|
||||
|
||||
query := &mockQuery{
|
||||
fingerprint: "test-no-step",
|
||||
startMs: 1672563720000,
|
||||
endMs: 1672567080000,
|
||||
}
|
||||
|
||||
result := &qbtypes.Result{
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
Value: &qbtypes.TimeSeriesData{
|
||||
QueryName: "test",
|
||||
Aggregations: []*qbtypes.AggregationBucket{{Index: 0, Series: []*qbtypes.TimeSeries{}}},
|
||||
},
|
||||
}
|
||||
|
||||
// Should cache the entire range when step is 0
|
||||
bc.Put(ctx, orgID, query, step, result)
|
||||
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, step)
|
||||
assert.NotNil(t, cached)
|
||||
assert.Len(t, missing, 0)
|
||||
}
|
||||
@@ -128,7 +128,7 @@ func TestBucketCache_GetMissRanges_EmptyCache(t *testing.T) {
|
||||
endMs: 5000,
|
||||
}
|
||||
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
assert.Nil(t, cached)
|
||||
assert.Len(t, missing, 1)
|
||||
@@ -159,13 +159,13 @@ func TestBucketCache_Put_And_Get(t *testing.T) {
|
||||
}
|
||||
|
||||
// Store in cache
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, result)
|
||||
|
||||
// Wait a bit for cache to be written
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Retrieve from cache
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
assert.NotNil(t, cached.Value)
|
||||
assert.Len(t, missing, 0)
|
||||
@@ -193,7 +193,7 @@ func TestBucketCache_PartialHit(t *testing.T) {
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
Value: createTestTimeSeries("A", 1000, 3000, 1000),
|
||||
}
|
||||
bc.Put(context.Background(), valuer.UUID{}, query1, qbtypes.Step{Duration: 1000 * time.Millisecond}, result1)
|
||||
bc.Put(context.Background(), valuer.UUID{}, query1, result1)
|
||||
|
||||
// Wait for cache write
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
@@ -205,7 +205,7 @@ func TestBucketCache_PartialHit(t *testing.T) {
|
||||
endMs: 5000,
|
||||
}
|
||||
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query2, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query2, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Should have cached data
|
||||
assert.NotNil(t, cached.Value)
|
||||
@@ -226,7 +226,7 @@ func TestBucketCache_MultipleBuckets(t *testing.T) {
|
||||
startMs: 1000,
|
||||
endMs: 2000,
|
||||
}
|
||||
bc.Put(context.Background(), valuer.UUID{}, query1, qbtypes.Step{Duration: 100 * time.Millisecond}, &qbtypes.Result{
|
||||
bc.Put(context.Background(), valuer.UUID{}, query1, &qbtypes.Result{
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
Value: createTestTimeSeries("A", 1000, 2000, 100),
|
||||
})
|
||||
@@ -236,7 +236,7 @@ func TestBucketCache_MultipleBuckets(t *testing.T) {
|
||||
startMs: 3000,
|
||||
endMs: 4000,
|
||||
}
|
||||
bc.Put(context.Background(), valuer.UUID{}, query2, qbtypes.Step{Duration: 100 * time.Millisecond}, &qbtypes.Result{
|
||||
bc.Put(context.Background(), valuer.UUID{}, query2, &qbtypes.Result{
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
Value: createTestTimeSeries("A", 3000, 4000, 100),
|
||||
})
|
||||
@@ -251,7 +251,7 @@ func TestBucketCache_MultipleBuckets(t *testing.T) {
|
||||
endMs: 4500,
|
||||
}
|
||||
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query3, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query3, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Should have cached data
|
||||
assert.NotNil(t, cached.Value)
|
||||
@@ -284,13 +284,13 @@ func TestBucketCache_FluxInterval(t *testing.T) {
|
||||
}
|
||||
|
||||
// This should not be cached due to flux interval
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, result)
|
||||
|
||||
// Wait a bit
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Try to get the data
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Should have no cached data
|
||||
assert.Nil(t, cached)
|
||||
@@ -354,7 +354,7 @@ func TestBucketCache_MergeTimeSeriesResults(t *testing.T) {
|
||||
startMs: 1000,
|
||||
endMs: 3000,
|
||||
}
|
||||
bc.Put(context.Background(), valuer.UUID{}, query1, qbtypes.Step{Duration: 1000 * time.Millisecond}, &qbtypes.Result{
|
||||
bc.Put(context.Background(), valuer.UUID{}, query1, &qbtypes.Result{
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
Value: &qbtypes.TimeSeriesData{
|
||||
QueryName: "A",
|
||||
@@ -370,7 +370,7 @@ func TestBucketCache_MergeTimeSeriesResults(t *testing.T) {
|
||||
startMs: 3000,
|
||||
endMs: 5000,
|
||||
}
|
||||
bc.Put(context.Background(), valuer.UUID{}, query2, qbtypes.Step{Duration: 1000 * time.Millisecond}, &qbtypes.Result{
|
||||
bc.Put(context.Background(), valuer.UUID{}, query2, &qbtypes.Result{
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
Value: &qbtypes.TimeSeriesData{
|
||||
QueryName: "A",
|
||||
@@ -390,7 +390,7 @@ func TestBucketCache_MergeTimeSeriesResults(t *testing.T) {
|
||||
endMs: 5000,
|
||||
}
|
||||
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query3, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query3, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Should have no missing ranges
|
||||
assert.Len(t, missing, 0)
|
||||
@@ -445,10 +445,10 @@ func TestBucketCache_RawData(t *testing.T) {
|
||||
Value: rawData,
|
||||
}
|
||||
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, result)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Raw data should not be cached
|
||||
assert.Nil(t, cached)
|
||||
@@ -485,10 +485,10 @@ func TestBucketCache_ScalarData(t *testing.T) {
|
||||
Value: scalarData,
|
||||
}
|
||||
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, result)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Scalar data should not be cached
|
||||
assert.Nil(t, cached)
|
||||
@@ -513,11 +513,11 @@ func TestBucketCache_EmptyFingerprint(t *testing.T) {
|
||||
Value: createTestTimeSeries("A", 1000, 5000, 1000),
|
||||
}
|
||||
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, result)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Should still be able to retrieve
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000})
|
||||
assert.NotNil(t, cached.Value)
|
||||
assert.Len(t, missing, 0)
|
||||
}
|
||||
@@ -568,7 +568,7 @@ func TestBucketCache_ConcurrentAccess(t *testing.T) {
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
Value: createTestTimeSeries(fmt.Sprintf("Q%d", id), query.startMs, query.endMs, 100),
|
||||
}
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 100 * time.Microsecond}, result)
|
||||
bc.Put(context.Background(), valuer.UUID{}, query, result)
|
||||
done <- true
|
||||
}(i)
|
||||
}
|
||||
@@ -581,7 +581,7 @@ func TestBucketCache_ConcurrentAccess(t *testing.T) {
|
||||
startMs: uint64(id * 1000),
|
||||
endMs: uint64((id + 1) * 1000),
|
||||
}
|
||||
bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
bc.GetMissRanges(context.Background(), valuer.UUID{}, query, qbtypes.Step{Duration: 1000})
|
||||
done <- true
|
||||
}(i)
|
||||
}
|
||||
@@ -628,10 +628,10 @@ func TestBucketCache_GetMissRanges_FluxInterval(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, cachedResult)
|
||||
bc.Put(ctx, orgID, query, cachedResult)
|
||||
|
||||
// Get miss ranges
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000})
|
||||
assert.NotNil(t, cached)
|
||||
t.Logf("Missing ranges: %+v, query range: %d-%d", missing, query.startMs, query.endMs)
|
||||
|
||||
@@ -690,10 +690,10 @@ func TestBucketCache_Put_FluxIntervalTrimming(t *testing.T) {
|
||||
}
|
||||
|
||||
// Put the result
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
|
||||
// Retrieve cached data
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Should have cached data
|
||||
assert.NotNil(t, cached)
|
||||
@@ -760,10 +760,10 @@ func TestBucketCache_Put_EntireRangeInFluxInterval(t *testing.T) {
|
||||
}
|
||||
|
||||
// Put the result - should not cache anything
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
|
||||
// Try to get cached data - should have no cached data
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Should have no cached value
|
||||
assert.Nil(t, cached)
|
||||
@@ -785,6 +785,18 @@ func TestBucketCache_EmptyDataHandling(t *testing.T) {
|
||||
shouldCache bool
|
||||
description string
|
||||
}{
|
||||
{
|
||||
name: "truly_empty_time_series",
|
||||
result: &qbtypes.Result{
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
Value: &qbtypes.TimeSeriesData{
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{},
|
||||
},
|
||||
},
|
||||
shouldCache: false,
|
||||
description: "No aggregations means truly empty - should not cache",
|
||||
},
|
||||
{
|
||||
name: "filtered_empty_time_series",
|
||||
result: &qbtypes.Result{
|
||||
@@ -866,16 +878,17 @@ func TestBucketCache_EmptyDataHandling(t *testing.T) {
|
||||
}
|
||||
|
||||
// Put the result
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, tt.result)
|
||||
bc.Put(ctx, orgID, query, tt.result)
|
||||
|
||||
// Wait a bit for cache to be written
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Try to get cached data
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
if tt.shouldCache {
|
||||
assert.NotNil(t, cached, tt.description)
|
||||
assert.Len(t, missing, 0, "Should have no missing ranges when data is cached")
|
||||
} else {
|
||||
assert.Nil(t, cached, tt.description)
|
||||
assert.Len(t, missing, 1, "Should have entire range as missing when data is not cached")
|
||||
@@ -931,13 +944,13 @@ func TestBucketCache_PartialValues(t *testing.T) {
|
||||
}
|
||||
|
||||
// Put the result
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
|
||||
// Wait for cache to be written
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Get cached data
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Should have cached data
|
||||
assert.NotNil(t, cached)
|
||||
@@ -1001,13 +1014,13 @@ func TestBucketCache_AllPartialValues(t *testing.T) {
|
||||
}
|
||||
|
||||
// Put the result
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
|
||||
// Wait for cache to be written
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Get cached data
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// When all values are partial and filtered out, the result is cached as empty
|
||||
// This prevents re-querying for the same misaligned time range
|
||||
@@ -1062,7 +1075,7 @@ func TestBucketCache_FilteredCachedResults(t *testing.T) {
|
||||
}
|
||||
|
||||
// Cache the wide range
|
||||
bc.Put(ctx, orgID, query1, qbtypes.Step{Duration: 1000 * time.Millisecond}, result1)
|
||||
bc.Put(ctx, orgID, query1, result1)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Now query for a smaller range (2000-3500ms)
|
||||
@@ -1073,7 +1086,7 @@ func TestBucketCache_FilteredCachedResults(t *testing.T) {
|
||||
}
|
||||
|
||||
// Get cached data - should be filtered to requested range
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query2, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query2, qbtypes.Step{Duration: 1000})
|
||||
|
||||
// Should have no missing ranges
|
||||
assert.Len(t, missing, 0)
|
||||
@@ -1233,7 +1246,7 @@ func TestBucketCache_PartialValueDetection(t *testing.T) {
|
||||
}
|
||||
|
||||
// Put the result
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Get cached data
|
||||
@@ -1287,7 +1300,7 @@ func TestBucketCache_PartialValueDetection(t *testing.T) {
|
||||
}
|
||||
|
||||
// Put the result
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Get cached data
|
||||
@@ -1339,7 +1352,7 @@ func TestBucketCache_PartialValueDetection(t *testing.T) {
|
||||
}
|
||||
|
||||
// Put the result
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Get cached data
|
||||
@@ -1396,11 +1409,11 @@ func TestBucketCache_NoCache(t *testing.T) {
|
||||
}
|
||||
|
||||
// Put the result in cache
|
||||
bc.Put(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond}, result)
|
||||
bc.Put(ctx, orgID, query, result)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
|
||||
// Verify data is cached
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000 * time.Millisecond})
|
||||
cached, missing := bc.GetMissRanges(ctx, orgID, query, qbtypes.Step{Duration: 1000})
|
||||
assert.NotNil(t, cached)
|
||||
assert.Len(t, missing, 0)
|
||||
|
||||
|
||||
@@ -118,10 +118,6 @@ func (q *builderQuery[T]) Fingerprint() string {
|
||||
parts = append(parts, fmt.Sprintf("having=%s", q.spec.Having.Expression))
|
||||
}
|
||||
|
||||
if q.spec.ShiftBy != 0 {
|
||||
parts = append(parts, fmt.Sprintf("shiftby=%d", q.spec.ShiftBy))
|
||||
}
|
||||
|
||||
return strings.Join(parts, "&")
|
||||
}
|
||||
|
||||
@@ -208,14 +204,7 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
|
||||
// Pass query window and step for partial value detection
|
||||
queryWindow := &qbtypes.TimeRange{From: q.fromMS, To: q.toMS}
|
||||
|
||||
kind := q.kind
|
||||
// all metric queries are time series then reduced if required
|
||||
if q.spec.Signal == telemetrytypes.SignalMetrics {
|
||||
kind = qbtypes.RequestTypeTimeSeries
|
||||
}
|
||||
|
||||
payload, err := consume(rows, kind, queryWindow, q.spec.StepInterval, q.spec.Name)
|
||||
payload, err := consume(rows, q.kind, queryWindow, q.spec.StepInterval, q.spec.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -235,18 +224,16 @@ func (q *builderQuery[T]) executeWindowList(ctx context.Context) (*qbtypes.Resul
|
||||
isAsc := len(q.spec.Order) > 0 &&
|
||||
strings.ToLower(string(q.spec.Order[0].Direction.StringValue())) == "asc"
|
||||
|
||||
fromMS, toMS := q.fromMS, q.toMS
|
||||
|
||||
// Adjust [fromMS,toMS] window if a cursor was supplied
|
||||
if cur := strings.TrimSpace(q.spec.Cursor); cur != "" {
|
||||
if ts, err := decodeCursor(cur); err == nil {
|
||||
if isAsc {
|
||||
if uint64(ts) >= fromMS {
|
||||
fromMS = uint64(ts + 1)
|
||||
if uint64(ts) >= q.fromMS {
|
||||
q.fromMS = uint64(ts + 1)
|
||||
}
|
||||
} else { // DESC
|
||||
if uint64(ts) <= toMS {
|
||||
toMS = uint64(ts - 1)
|
||||
if uint64(ts) <= q.toMS {
|
||||
q.toMS = uint64(ts - 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -265,16 +252,7 @@ func (q *builderQuery[T]) executeWindowList(ctx context.Context) (*qbtypes.Resul
|
||||
totalBytes := uint64(0)
|
||||
start := time.Now()
|
||||
|
||||
// Get buckets and reverse them for ascending order
|
||||
buckets := makeBuckets(fromMS, toMS)
|
||||
if isAsc {
|
||||
// Reverse the buckets for ascending order
|
||||
for i, j := 0, len(buckets)-1; i < j; i, j = i+1, j-1 {
|
||||
buckets[i], buckets[j] = buckets[j], buckets[i]
|
||||
}
|
||||
}
|
||||
|
||||
for _, r := range buckets {
|
||||
for _, r := range makeBuckets(q.fromMS, q.toMS) {
|
||||
q.spec.Offset = 0
|
||||
q.spec.Limit = need
|
||||
|
||||
|
||||
@@ -1,131 +0,0 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestBuilderQueryFingerprint(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
query *builderQuery[qbtypes.MetricAggregation]
|
||||
expectInKey []string
|
||||
notExpectInKey []string
|
||||
}{
|
||||
{
|
||||
name: "fingerprint includes shiftby when ShiftBy field is set",
|
||||
query: &builderQuery[qbtypes.MetricAggregation]{
|
||||
kind: qbtypes.RequestTypeTimeSeries,
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
ShiftBy: 3600,
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "3600"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectInKey: []string{"shiftby=3600"},
|
||||
notExpectInKey: []string{"functions=", "timeshift", "absolute"},
|
||||
},
|
||||
{
|
||||
name: "fingerprint includes shiftby but not other functions",
|
||||
query: &builderQuery[qbtypes.MetricAggregation]{
|
||||
kind: qbtypes.RequestTypeTimeSeries,
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
ShiftBy: 3600,
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "3600"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: qbtypes.FunctionNameAbsolute,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectInKey: []string{"shiftby=3600"},
|
||||
notExpectInKey: []string{"functions=", "absolute"},
|
||||
},
|
||||
{
|
||||
name: "no shiftby in fingerprint when ShiftBy is zero",
|
||||
query: &builderQuery[qbtypes.MetricAggregation]{
|
||||
kind: qbtypes.RequestTypeTimeSeries,
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
ShiftBy: 0,
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameAbsolute,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectInKey: []string{},
|
||||
notExpectInKey: []string{"shiftby=", "functions=", "absolute"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
fingerprint := tt.query.Fingerprint()
|
||||
for _, expected := range tt.expectInKey {
|
||||
assert.True(t, strings.Contains(fingerprint, expected),
|
||||
"Expected fingerprint to contain '%s', got: %s", expected, fingerprint)
|
||||
}
|
||||
for _, notExpected := range tt.notExpectInKey {
|
||||
assert.False(t, strings.Contains(fingerprint, notExpected),
|
||||
"Expected fingerprint NOT to contain '%s', got: %s", notExpected, fingerprint)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMakeBucketsOrder(t *testing.T) {
|
||||
// Test that makeBuckets returns buckets in reverse chronological order by default
|
||||
// Using milliseconds as input - need > 1 hour range to get multiple buckets
|
||||
now := uint64(1700000000000) // Some timestamp in ms
|
||||
startMS := now
|
||||
endMS := now + uint64(10*60*60*1000) // 10 hours later
|
||||
|
||||
buckets := makeBuckets(startMS, endMS)
|
||||
|
||||
// Should have multiple buckets for a 10 hour range
|
||||
assert.True(t, len(buckets) > 1, "Should have multiple buckets for 10 hour range, got %d", len(buckets))
|
||||
|
||||
// Log buckets for debugging
|
||||
t.Logf("Generated %d buckets:", len(buckets))
|
||||
for i, b := range buckets {
|
||||
durationMs := (b.toNS - b.fromNS) / 1e6
|
||||
t.Logf("Bucket %d: duration=%dms", i, durationMs)
|
||||
}
|
||||
|
||||
// Verify buckets are in reverse chronological order (newest to oldest)
|
||||
for i := 0; i < len(buckets)-1; i++ {
|
||||
assert.True(t, buckets[i].toNS > buckets[i+1].toNS,
|
||||
"Bucket %d end should be after bucket %d end", i, i+1)
|
||||
assert.Equal(t, buckets[i].fromNS, buckets[i+1].toNS,
|
||||
"Bucket %d start should equal bucket %d end (continuous buckets)", i, i+1)
|
||||
}
|
||||
|
||||
// First bucket should end at endNS (converted to nanoseconds)
|
||||
expectedEndNS := querybuilder.ToNanoSecs(endMS)
|
||||
assert.Equal(t, expectedEndNS, buckets[0].toNS)
|
||||
|
||||
// Last bucket should start at startNS (converted to nanoseconds)
|
||||
expectedStartNS := querybuilder.ToNanoSecs(startMS)
|
||||
assert.Equal(t, expectedStartNS, buckets[len(buckets)-1].fromNS)
|
||||
}
|
||||
@@ -176,7 +176,7 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
|
||||
lblVals = append(lblVals, *val)
|
||||
lblObjs = append(lblObjs, &qbtypes.Label{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||
Value: *val,
|
||||
Value: val,
|
||||
})
|
||||
|
||||
default:
|
||||
@@ -227,9 +227,8 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
|
||||
}
|
||||
}
|
||||
if maxAgg < 0 {
|
||||
return &qbtypes.TimeSeriesData{
|
||||
QueryName: queryName,
|
||||
}, nil
|
||||
//nolint:nilnil
|
||||
return nil, nil // empty result-set
|
||||
}
|
||||
|
||||
buckets := make([]*qbtypes.AggregationBucket, maxAgg+1)
|
||||
@@ -320,9 +319,8 @@ func readAsScalar(rows driver.Rows, queryName string) (*qbtypes.ScalarData, erro
|
||||
}
|
||||
|
||||
return &qbtypes.ScalarData{
|
||||
QueryName: queryName,
|
||||
Columns: cd,
|
||||
Data: data,
|
||||
Columns: cd,
|
||||
Data: data,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -17,5 +17,5 @@ type BucketCache interface {
|
||||
// cached portion + list of gaps to fetch
|
||||
GetMissRanges(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, step qbtypes.Step) (cached *qbtypes.Result, missing []*qbtypes.TimeRange)
|
||||
// store fresh buckets for future hits
|
||||
Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, step qbtypes.Step, fresh *qbtypes.Result)
|
||||
}
|
||||
Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, fresh *qbtypes.Result)
|
||||
}
|
||||
@@ -1,652 +0,0 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
// queryInfo holds common query properties
|
||||
type queryInfo struct {
|
||||
Name string
|
||||
Disabled bool
|
||||
Step qbtypes.Step
|
||||
}
|
||||
|
||||
// getqueryInfo extracts common info from any query type
|
||||
func getqueryInfo(spec any) queryInfo {
|
||||
switch s := spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
return queryInfo{Name: s.Name, Disabled: s.Disabled, Step: s.StepInterval}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
return queryInfo{Name: s.Name, Disabled: s.Disabled, Step: s.StepInterval}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
return queryInfo{Name: s.Name, Disabled: s.Disabled, Step: s.StepInterval}
|
||||
case qbtypes.QueryBuilderFormula:
|
||||
return queryInfo{Name: s.Name, Disabled: false}
|
||||
case qbtypes.PromQuery:
|
||||
return queryInfo{Name: s.Name, Disabled: s.Disabled, Step: s.Step}
|
||||
case qbtypes.ClickHouseQuery:
|
||||
return queryInfo{Name: s.Name, Disabled: s.Disabled}
|
||||
}
|
||||
return queryInfo{}
|
||||
}
|
||||
|
||||
// getQueryName is a convenience function when only name is needed
|
||||
func getQueryName(spec any) string {
|
||||
return getqueryInfo(spec).Name
|
||||
}
|
||||
|
||||
func (q *querier) postProcessResults(ctx context.Context, results map[string]any, req *qbtypes.QueryRangeRequest) (map[string]any, error) {
|
||||
// Convert results to typed format for processing
|
||||
typedResults := make(map[string]*qbtypes.Result)
|
||||
for name, result := range results {
|
||||
typedResults[name] = &qbtypes.Result{
|
||||
Value: result,
|
||||
}
|
||||
}
|
||||
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
switch spec := query.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
if result, ok := typedResults[spec.Name]; ok {
|
||||
result = postProcessBuilderQuery(q, result, spec, req)
|
||||
typedResults[spec.Name] = result
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
if result, ok := typedResults[spec.Name]; ok {
|
||||
result = postProcessBuilderQuery(q, result, spec, req)
|
||||
typedResults[spec.Name] = result
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
if result, ok := typedResults[spec.Name]; ok {
|
||||
result = postProcessMetricQuery(q, result, spec, req)
|
||||
typedResults[spec.Name] = result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply formula calculations
|
||||
typedResults = q.applyFormulas(ctx, typedResults, req)
|
||||
|
||||
// Filter out disabled queries
|
||||
typedResults = q.filterDisabledQueries(typedResults, req)
|
||||
|
||||
// Apply table formatting for UI if requested
|
||||
if req.FormatOptions != nil && req.FormatOptions.FormatTableResultForUI && req.RequestType == qbtypes.RequestTypeScalar {
|
||||
// Format results as a table - this merges all queries into a single table
|
||||
tableResult := q.formatScalarResultsAsTable(typedResults, req)
|
||||
|
||||
// Return the table under the first query's name so it gets included in results
|
||||
if len(req.CompositeQuery.Queries) > 0 {
|
||||
firstQueryName := getQueryName(req.CompositeQuery.Queries[0].Spec)
|
||||
if firstQueryName != "" && tableResult["table"] != nil {
|
||||
// Return table under first query name
|
||||
return map[string]any{firstQueryName: tableResult["table"]}, nil
|
||||
}
|
||||
}
|
||||
|
||||
return tableResult, nil
|
||||
}
|
||||
|
||||
// Convert back to map[string]any
|
||||
finalResults := make(map[string]any)
|
||||
for name, result := range typedResults {
|
||||
finalResults[name] = result.Value
|
||||
}
|
||||
|
||||
return finalResults, nil
|
||||
}
|
||||
|
||||
// postProcessBuilderQuery applies postprocessing to a single builder query result
|
||||
func postProcessBuilderQuery[T any](
|
||||
q *querier,
|
||||
result *qbtypes.Result,
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
_ *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
|
||||
// Apply functions
|
||||
if len(query.Functions) > 0 {
|
||||
result = q.applyFunctions(result, query.Functions)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// postProcessMetricQuery applies postprocessing to a metric query result
|
||||
func postProcessMetricQuery(
|
||||
q *querier,
|
||||
result *qbtypes.Result,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
|
||||
if query.Limit > 0 {
|
||||
result = q.applySeriesLimit(result, query.Limit, query.Order)
|
||||
}
|
||||
|
||||
if len(query.Functions) > 0 {
|
||||
result = q.applyFunctions(result, query.Functions)
|
||||
}
|
||||
|
||||
// Apply reduce to for scalar request type
|
||||
if req.RequestType == qbtypes.RequestTypeScalar {
|
||||
if len(query.Aggregations) > 0 && query.Aggregations[0].ReduceTo != qbtypes.ReduceToUnknown {
|
||||
result = q.applyMetricReduceTo(result, query.Aggregations[0].ReduceTo)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// applyMetricReduceTo applies reduce to operation using the metric's ReduceTo field
|
||||
func (q *querier) applyMetricReduceTo(result *qbtypes.Result, reduceOp qbtypes.ReduceTo) *qbtypes.Result {
|
||||
tsData, ok := result.Value.(*qbtypes.TimeSeriesData)
|
||||
if !ok {
|
||||
return result
|
||||
}
|
||||
|
||||
if tsData != nil {
|
||||
for _, agg := range tsData.Aggregations {
|
||||
for i, series := range agg.Series {
|
||||
// Use the FunctionReduceTo helper
|
||||
reducedSeries := qbtypes.FunctionReduceTo(series, reduceOp)
|
||||
agg.Series[i] = reducedSeries
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scalarData := convertTimeSeriesDataToScalar(tsData, tsData.QueryName)
|
||||
result.Value = scalarData
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// applySeriesLimit limits the number of series in the result
|
||||
func (q *querier) applySeriesLimit(result *qbtypes.Result, limit int, orderBy []qbtypes.OrderBy) *qbtypes.Result {
|
||||
tsData, ok := result.Value.(*qbtypes.TimeSeriesData)
|
||||
if !ok {
|
||||
return result
|
||||
}
|
||||
|
||||
if tsData != nil {
|
||||
for _, agg := range tsData.Aggregations {
|
||||
// Use the ApplySeriesLimit function from querybuildertypes
|
||||
agg.Series = qbtypes.ApplySeriesLimit(agg.Series, orderBy, limit)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// applyFunctions applies functions to time series data
|
||||
func (q *querier) applyFunctions(result *qbtypes.Result, functions []qbtypes.Function) *qbtypes.Result {
|
||||
tsData, ok := result.Value.(*qbtypes.TimeSeriesData)
|
||||
if !ok {
|
||||
return result
|
||||
}
|
||||
|
||||
if tsData != nil {
|
||||
for _, agg := range tsData.Aggregations {
|
||||
for i, series := range agg.Series {
|
||||
agg.Series[i] = qbtypes.ApplyFunctions(functions, series)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// applyFormulas processes formula queries in the composite query
|
||||
func (q *querier) applyFormulas(ctx context.Context, results map[string]*qbtypes.Result, req *qbtypes.QueryRangeRequest) map[string]*qbtypes.Result {
|
||||
// Collect formula queries
|
||||
formulaQueries := make(map[string]qbtypes.QueryBuilderFormula)
|
||||
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
if query.Type == qbtypes.QueryTypeFormula {
|
||||
if formula, ok := query.Spec.(qbtypes.QueryBuilderFormula); ok {
|
||||
formulaQueries[formula.Name] = formula
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process each formula
|
||||
for name, formula := range formulaQueries {
|
||||
// Check if we're dealing with time series or scalar data
|
||||
if req.RequestType == qbtypes.RequestTypeTimeSeries {
|
||||
result := q.processTimeSeriesFormula(ctx, results, formula, req)
|
||||
if result != nil {
|
||||
results[name] = result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// processTimeSeriesFormula handles formula evaluation for time series data
|
||||
func (q *querier) processTimeSeriesFormula(
|
||||
ctx context.Context,
|
||||
results map[string]*qbtypes.Result,
|
||||
formula qbtypes.QueryBuilderFormula,
|
||||
_ *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
// Prepare time series data for formula evaluation
|
||||
timeSeriesData := make(map[string]*qbtypes.TimeSeriesData)
|
||||
|
||||
// Extract time series data from results
|
||||
for queryName, result := range results {
|
||||
if tsData, ok := result.Value.(*qbtypes.TimeSeriesData); ok {
|
||||
timeSeriesData[queryName] = tsData
|
||||
}
|
||||
}
|
||||
|
||||
// Create formula evaluator
|
||||
// TODO(srikanthccv): add conditional default zero
|
||||
canDefaultZero := make(map[string]bool)
|
||||
evaluator, err := qbtypes.NewFormulaEvaluator(formula.Expression, canDefaultZero)
|
||||
if err != nil {
|
||||
q.logger.ErrorContext(ctx, "failed to create formula evaluator", "error", err, "formula", formula.Name)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Evaluate the formula
|
||||
formulaSeries, err := evaluator.EvaluateFormula(timeSeriesData)
|
||||
if err != nil {
|
||||
q.logger.ErrorContext(ctx, "failed to evaluate formula", "error", err, "formula", formula.Name)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create result for formula
|
||||
formulaResult := &qbtypes.TimeSeriesData{
|
||||
QueryName: formula.Name,
|
||||
Aggregations: []*qbtypes.AggregationBucket{
|
||||
{
|
||||
Index: 0,
|
||||
Series: formulaSeries,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Apply functions if any
|
||||
result := &qbtypes.Result{
|
||||
Value: formulaResult,
|
||||
}
|
||||
|
||||
if len(formula.Functions) > 0 {
|
||||
result = q.applyFunctions(result, formula.Functions)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// filterDisabledQueries removes results for disabled queries
|
||||
func (q *querier) filterDisabledQueries(results map[string]*qbtypes.Result, req *qbtypes.QueryRangeRequest) map[string]*qbtypes.Result {
|
||||
filtered := make(map[string]*qbtypes.Result)
|
||||
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
info := getqueryInfo(query.Spec)
|
||||
if !info.Disabled {
|
||||
if result, ok := results[info.Name]; ok {
|
||||
filtered[info.Name] = result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
// formatScalarResultsAsTable formats scalar results as a unified table for UI display
|
||||
func (q *querier) formatScalarResultsAsTable(results map[string]*qbtypes.Result, _ *qbtypes.QueryRangeRequest) map[string]any {
|
||||
if len(results) == 0 {
|
||||
return map[string]any{"table": &qbtypes.ScalarData{}}
|
||||
}
|
||||
|
||||
// Convert all results to ScalarData first
|
||||
scalarResults := make(map[string]*qbtypes.ScalarData)
|
||||
for name, result := range results {
|
||||
if sd, ok := result.Value.(*qbtypes.ScalarData); ok {
|
||||
scalarResults[name] = sd
|
||||
} else if tsData, ok := result.Value.(*qbtypes.TimeSeriesData); ok {
|
||||
scalarResults[name] = convertTimeSeriesDataToScalar(tsData, name)
|
||||
}
|
||||
}
|
||||
|
||||
// If single result already has multiple queries, just deduplicate
|
||||
if len(scalarResults) == 1 {
|
||||
for _, sd := range scalarResults {
|
||||
if hasMultipleQueries(sd) {
|
||||
return map[string]any{"table": deduplicateRows(sd)}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise merge all results
|
||||
merged := mergeScalarData(scalarResults)
|
||||
return map[string]any{"table": merged}
|
||||
}
|
||||
|
||||
// convertTimeSeriesDataToScalar converts time series to scalar format
|
||||
func convertTimeSeriesDataToScalar(tsData *qbtypes.TimeSeriesData, queryName string) *qbtypes.ScalarData {
|
||||
if tsData == nil || len(tsData.Aggregations) == 0 {
|
||||
return &qbtypes.ScalarData{QueryName: queryName}
|
||||
}
|
||||
|
||||
columns := []*qbtypes.ColumnDescriptor{}
|
||||
|
||||
// Add group columns from first series
|
||||
if len(tsData.Aggregations[0].Series) > 0 {
|
||||
for _, label := range tsData.Aggregations[0].Series[0].Labels {
|
||||
columns = append(columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: label.Key,
|
||||
QueryName: queryName,
|
||||
Type: qbtypes.ColumnTypeGroup,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Add aggregation columns
|
||||
for _, agg := range tsData.Aggregations {
|
||||
name := agg.Alias
|
||||
if name == "" {
|
||||
name = fmt.Sprintf("__result_%d", agg.Index)
|
||||
}
|
||||
columns = append(columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||
QueryName: queryName,
|
||||
AggregationIndex: int64(agg.Index),
|
||||
Meta: agg.Meta,
|
||||
Type: qbtypes.ColumnTypeAggregation,
|
||||
})
|
||||
}
|
||||
|
||||
// Build rows
|
||||
data := [][]any{}
|
||||
for seriesIdx, series := range tsData.Aggregations[0].Series {
|
||||
row := make([]any, len(columns))
|
||||
|
||||
// Add group values
|
||||
for i, label := range series.Labels {
|
||||
row[i] = label.Value
|
||||
}
|
||||
|
||||
// Add aggregation values (last value)
|
||||
groupColCount := len(series.Labels)
|
||||
for aggIdx, agg := range tsData.Aggregations {
|
||||
if seriesIdx < len(agg.Series) && len(agg.Series[seriesIdx].Values) > 0 {
|
||||
lastValue := agg.Series[seriesIdx].Values[len(agg.Series[seriesIdx].Values)-1].Value
|
||||
row[groupColCount+aggIdx] = lastValue
|
||||
} else {
|
||||
row[groupColCount+aggIdx] = "n/a"
|
||||
}
|
||||
}
|
||||
|
||||
data = append(data, row)
|
||||
}
|
||||
|
||||
return &qbtypes.ScalarData{
|
||||
QueryName: queryName,
|
||||
Columns: columns,
|
||||
Data: data,
|
||||
}
|
||||
}
|
||||
|
||||
// hasMultipleQueries checks if ScalarData contains columns from multiple queries
|
||||
func hasMultipleQueries(sd *qbtypes.ScalarData) bool {
|
||||
queries := make(map[string]bool)
|
||||
for _, col := range sd.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeAggregation && col.QueryName != "" {
|
||||
queries[col.QueryName] = true
|
||||
}
|
||||
}
|
||||
return len(queries) > 1
|
||||
}
|
||||
|
||||
// deduplicateRows removes duplicate rows based on group columns
|
||||
func deduplicateRows(sd *qbtypes.ScalarData) *qbtypes.ScalarData {
|
||||
// Find group column indices
|
||||
groupIndices := []int{}
|
||||
for i, col := range sd.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeGroup {
|
||||
groupIndices = append(groupIndices, i)
|
||||
}
|
||||
}
|
||||
|
||||
// Build unique rows map
|
||||
uniqueRows := make(map[string][]any)
|
||||
for _, row := range sd.Data {
|
||||
key := buildRowKey(row, groupIndices)
|
||||
if existing, found := uniqueRows[key]; found {
|
||||
// Merge non-n/a values
|
||||
for i, val := range row {
|
||||
if existing[i] == "n/a" && val != "n/a" {
|
||||
existing[i] = val
|
||||
}
|
||||
}
|
||||
} else {
|
||||
rowCopy := make([]any, len(row))
|
||||
copy(rowCopy, row)
|
||||
uniqueRows[key] = rowCopy
|
||||
}
|
||||
}
|
||||
|
||||
// Convert back to slice
|
||||
data := make([][]any, 0, len(uniqueRows))
|
||||
for _, row := range uniqueRows {
|
||||
data = append(data, row)
|
||||
}
|
||||
|
||||
// Sort by first aggregation column
|
||||
sortByFirstAggregation(data, sd.Columns)
|
||||
|
||||
return &qbtypes.ScalarData{
|
||||
Columns: sd.Columns,
|
||||
Data: data,
|
||||
}
|
||||
}
|
||||
|
||||
// mergeScalarData merges multiple scalar data results
|
||||
func mergeScalarData(results map[string]*qbtypes.ScalarData) *qbtypes.ScalarData {
|
||||
// Collect unique group columns
|
||||
groupCols := []string{}
|
||||
groupColMap := make(map[string]*qbtypes.ColumnDescriptor)
|
||||
|
||||
for _, sd := range results {
|
||||
for _, col := range sd.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeGroup {
|
||||
if _, exists := groupColMap[col.Name]; !exists {
|
||||
groupColMap[col.Name] = col
|
||||
groupCols = append(groupCols, col.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build final columns
|
||||
columns := []*qbtypes.ColumnDescriptor{}
|
||||
|
||||
// Add group columns
|
||||
for _, name := range groupCols {
|
||||
columns = append(columns, groupColMap[name])
|
||||
}
|
||||
|
||||
// Add aggregation columns from each query (sorted by query name)
|
||||
queryNames := make([]string, 0, len(results))
|
||||
for name := range results {
|
||||
queryNames = append(queryNames, name)
|
||||
}
|
||||
sort.Strings(queryNames)
|
||||
|
||||
for _, queryName := range queryNames {
|
||||
sd := results[queryName]
|
||||
for _, col := range sd.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeAggregation {
|
||||
columns = append(columns, col)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Merge rows
|
||||
rowMap := make(map[string][]any)
|
||||
|
||||
for queryName, sd := range results {
|
||||
// Create index mappings
|
||||
groupMap := make(map[string]int)
|
||||
for i, col := range sd.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeGroup {
|
||||
groupMap[col.Name] = i
|
||||
}
|
||||
}
|
||||
|
||||
// Process each row
|
||||
for _, row := range sd.Data {
|
||||
key := buildKeyFromGroupCols(row, groupMap, groupCols)
|
||||
|
||||
if _, exists := rowMap[key]; !exists {
|
||||
// Initialize new row
|
||||
newRow := make([]any, len(columns))
|
||||
// Set group values
|
||||
for i, colName := range groupCols {
|
||||
if idx, ok := groupMap[colName]; ok && idx < len(row) {
|
||||
newRow[i] = row[idx]
|
||||
} else {
|
||||
newRow[i] = "n/a"
|
||||
}
|
||||
}
|
||||
// Initialize all aggregations to n/a
|
||||
for i := len(groupCols); i < len(columns); i++ {
|
||||
newRow[i] = "n/a"
|
||||
}
|
||||
rowMap[key] = newRow
|
||||
}
|
||||
|
||||
// Set aggregation values for this query
|
||||
mergedRow := rowMap[key]
|
||||
colIdx := len(groupCols)
|
||||
for _, col := range columns[len(groupCols):] {
|
||||
if col.QueryName == queryName {
|
||||
// Find the value in the original row
|
||||
for i, origCol := range sd.Columns {
|
||||
if origCol.Type == qbtypes.ColumnTypeAggregation &&
|
||||
origCol.AggregationIndex == col.AggregationIndex {
|
||||
if i < len(row) {
|
||||
mergedRow[colIdx] = row[i]
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
colIdx++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert to slice
|
||||
data := make([][]any, 0, len(rowMap))
|
||||
for _, row := range rowMap {
|
||||
data = append(data, row)
|
||||
}
|
||||
|
||||
// Sort by first aggregation column
|
||||
sortByFirstAggregation(data, columns)
|
||||
|
||||
return &qbtypes.ScalarData{
|
||||
Columns: columns,
|
||||
Data: data,
|
||||
}
|
||||
}
|
||||
|
||||
// buildRowKey builds a unique key from row values at specified indices
|
||||
func buildRowKey(row []any, indices []int) string {
|
||||
parts := make([]string, len(indices))
|
||||
for i, idx := range indices {
|
||||
if idx < len(row) {
|
||||
parts[i] = fmt.Sprintf("%v", row[idx])
|
||||
} else {
|
||||
parts[i] = "n/a"
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%v", parts)
|
||||
}
|
||||
|
||||
// buildKeyFromGroupCols builds a key from group column values
|
||||
func buildKeyFromGroupCols(row []any, groupMap map[string]int, groupCols []string) string {
|
||||
parts := make([]string, len(groupCols))
|
||||
for i, colName := range groupCols {
|
||||
if idx, ok := groupMap[colName]; ok && idx < len(row) {
|
||||
parts[i] = fmt.Sprintf("%v", row[idx])
|
||||
} else {
|
||||
parts[i] = "n/a"
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%v", parts)
|
||||
}
|
||||
|
||||
// sortByFirstAggregation sorts data by the first aggregation column (descending)
|
||||
func sortByFirstAggregation(data [][]any, columns []*qbtypes.ColumnDescriptor) {
|
||||
// Find first aggregation column
|
||||
aggIdx := -1
|
||||
for i, col := range columns {
|
||||
if col.Type == qbtypes.ColumnTypeAggregation {
|
||||
aggIdx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if aggIdx < 0 {
|
||||
return
|
||||
}
|
||||
|
||||
sort.SliceStable(data, func(i, j int) bool {
|
||||
return compareValues(data[i][aggIdx], data[j][aggIdx]) > 0
|
||||
})
|
||||
}
|
||||
|
||||
// compareValues compares two values for sorting (handles n/a and numeric types)
|
||||
func compareValues(a, b any) int {
|
||||
// Handle n/a values
|
||||
if a == "n/a" && b == "n/a" {
|
||||
return 0
|
||||
}
|
||||
if a == "n/a" {
|
||||
return -1
|
||||
}
|
||||
if b == "n/a" {
|
||||
return 1
|
||||
}
|
||||
|
||||
// Compare numeric values
|
||||
aFloat, aOk := toFloat64(a)
|
||||
bFloat, bOk := toFloat64(b)
|
||||
|
||||
if aOk && bOk {
|
||||
if aFloat > bFloat {
|
||||
return 1
|
||||
} else if aFloat < bFloat {
|
||||
return -1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// Fallback to string comparison
|
||||
return 0
|
||||
}
|
||||
|
||||
// toFloat64 attempts to convert a value to float64
|
||||
func toFloat64(v any) (float64, bool) {
|
||||
switch val := v.(type) {
|
||||
case float64:
|
||||
return val, true
|
||||
case int64:
|
||||
return float64(val), true
|
||||
case int:
|
||||
return float64(val), true
|
||||
case int32:
|
||||
return float64(val), true
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
@@ -5,14 +5,12 @@ import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"slices"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
@@ -56,82 +54,8 @@ func New(
|
||||
}
|
||||
}
|
||||
|
||||
// extractShiftFromBuilderQuery extracts the shift value from timeShift function if present
|
||||
func extractShiftFromBuilderQuery[T any](spec qbtypes.QueryBuilderQuery[T]) int64 {
|
||||
for _, fn := range spec.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameTimeShift && len(fn.Args) > 0 {
|
||||
switch v := fn.Args[0].Value.(type) {
|
||||
case float64:
|
||||
return int64(v)
|
||||
case int64:
|
||||
return v
|
||||
case int:
|
||||
return int64(v)
|
||||
case string:
|
||||
if shiftFloat, err := strconv.ParseFloat(v, 64); err == nil {
|
||||
return int64(shiftFloat)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// adjustTimeRangeForShift adjusts the time range based on the shift value from timeShift function
|
||||
func adjustTimeRangeForShift[T any](spec qbtypes.QueryBuilderQuery[T], tr qbtypes.TimeRange, kind qbtypes.RequestType) qbtypes.TimeRange {
|
||||
// Only apply time shift for time series and scalar queries
|
||||
// Raw/list queries don't support timeshift
|
||||
if kind != qbtypes.RequestTypeTimeSeries && kind != qbtypes.RequestTypeScalar {
|
||||
return tr
|
||||
}
|
||||
|
||||
// Use the ShiftBy field if it's already populated, otherwise extract it
|
||||
shiftBy := spec.ShiftBy
|
||||
if shiftBy == 0 {
|
||||
shiftBy = extractShiftFromBuilderQuery(spec)
|
||||
}
|
||||
|
||||
if shiftBy == 0 {
|
||||
return tr
|
||||
}
|
||||
|
||||
// ShiftBy is in seconds, convert to milliseconds and shift backward in time
|
||||
shiftMS := shiftBy * 1000
|
||||
return qbtypes.TimeRange{
|
||||
From: tr.From - uint64(shiftMS),
|
||||
To: tr.To - uint64(shiftMS),
|
||||
}
|
||||
}
|
||||
|
||||
func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtypes.QueryRangeRequest) (*qbtypes.QueryRangeResponse, error) {
|
||||
|
||||
// First pass: collect all metric names that need temporality
|
||||
metricNames := make([]string, 0)
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
if query.Type == qbtypes.QueryTypeBuilder {
|
||||
if spec, ok := query.Spec.(qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]); ok {
|
||||
for _, agg := range spec.Aggregations {
|
||||
if agg.MetricName != "" {
|
||||
metricNames = append(metricNames, agg.MetricName)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch temporality for all metrics at once
|
||||
var metricTemporality map[string]metrictypes.Temporality
|
||||
if len(metricNames) > 0 {
|
||||
var err error
|
||||
metricTemporality, err = q.metadataStore.FetchTemporalityMulti(ctx, metricNames...)
|
||||
if err != nil {
|
||||
q.logger.WarnContext(ctx, "failed to fetch metric temporality", "error", err, "metrics", metricNames)
|
||||
// Continue without temporality - statement builder will handle unspecified
|
||||
metricTemporality = make(map[string]metrictypes.Temporality)
|
||||
}
|
||||
q.logger.DebugContext(ctx, "fetched metric temporalities", "metric_temporality", metricTemporality)
|
||||
}
|
||||
|
||||
queries := make(map[string]qbtypes.Query)
|
||||
steps := make(map[string]qbtypes.Step)
|
||||
|
||||
@@ -155,28 +79,15 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
case qbtypes.QueryTypeBuilder:
|
||||
switch spec := query.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, timeRange, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
for i := range spec.Aggregations {
|
||||
if spec.Aggregations[i].MetricName != "" && spec.Aggregations[i].Temporality == metrictypes.Unknown {
|
||||
if temp, ok := metricTemporality[spec.Aggregations[i].MetricName]; ok && temp != metrictypes.Unknown {
|
||||
spec.Aggregations[i].Temporality = temp
|
||||
}
|
||||
}
|
||||
}
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, timeRange, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
default:
|
||||
@@ -222,18 +133,13 @@ func (q *querier) run(ctx context.Context, orgID valuer.UUID, qs map[string]qbty
|
||||
}
|
||||
}
|
||||
|
||||
processedResults, err := q.postProcessResults(ctx, results, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &qbtypes.QueryRangeResponse{
|
||||
Type: req.RequestType,
|
||||
Data: struct {
|
||||
Results []any `json:"results"`
|
||||
Warnings []string `json:"warnings"`
|
||||
}{
|
||||
Results: maps.Values(processedResults),
|
||||
Results: maps.Values(results),
|
||||
Warnings: warnings,
|
||||
},
|
||||
Meta: struct {
|
||||
@@ -267,7 +173,7 @@ func (q *querier) executeWithCache(ctx context.Context, orgID valuer.UUID, query
|
||||
return nil, err
|
||||
}
|
||||
// Store in cache for future use
|
||||
q.bucketCache.Put(ctx, orgID, query, step, result)
|
||||
q.bucketCache.Put(ctx, orgID, query, result)
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
@@ -277,10 +183,6 @@ func (q *querier) executeWithCache(ctx context.Context, orgID valuer.UUID, query
|
||||
errors := make([]error, len(missingRanges))
|
||||
totalStats := qbtypes.ExecStats{}
|
||||
|
||||
q.logger.DebugContext(ctx, "executing queries for missing ranges",
|
||||
"missing_ranges_count", len(missingRanges),
|
||||
"ranges", missingRanges)
|
||||
|
||||
sem := make(chan struct{}, 4)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
@@ -322,7 +224,7 @@ func (q *querier) executeWithCache(ctx context.Context, orgID valuer.UUID, query
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.bucketCache.Put(ctx, orgID, query, step, result)
|
||||
q.bucketCache.Put(ctx, orgID, query, result)
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
@@ -346,7 +248,7 @@ func (q *querier) executeWithCache(ctx context.Context, orgID valuer.UUID, query
|
||||
mergedResult.Stats.DurationMS += totalStats.DurationMS
|
||||
|
||||
// Store merged result in cache
|
||||
q.bucketCache.Put(ctx, orgID, query, step, mergedResult)
|
||||
q.bucketCache.Put(ctx, orgID, query, mergedResult)
|
||||
|
||||
return mergedResult, nil
|
||||
}
|
||||
@@ -359,17 +261,11 @@ func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtyp
|
||||
case *chSQLQuery:
|
||||
return newchSQLQuery(q.telemetryStore, qt.query, qt.args, timeRange, qt.kind)
|
||||
case *builderQuery[qbtypes.TraceAggregation]:
|
||||
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, adjustedTimeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, timeRange, qt.kind)
|
||||
case *builderQuery[qbtypes.LogAggregation]:
|
||||
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, adjustedTimeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, timeRange, qt.kind)
|
||||
case *builderQuery[qbtypes.MetricAggregation]:
|
||||
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, adjustedTimeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, timeRange, qt.kind)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
@@ -377,29 +273,8 @@ func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtyp
|
||||
|
||||
// mergeResults merges cached result with fresh results
|
||||
func (q *querier) mergeResults(cached *qbtypes.Result, fresh []*qbtypes.Result) *qbtypes.Result {
|
||||
if cached == nil {
|
||||
if len(fresh) == 1 {
|
||||
return fresh[0]
|
||||
}
|
||||
if len(fresh) == 0 {
|
||||
return nil
|
||||
}
|
||||
// If cached is nil but we have multiple fresh results, we need to merge them
|
||||
// We need to merge all fresh results properly to avoid duplicates
|
||||
merged := &qbtypes.Result{
|
||||
Type: fresh[0].Type,
|
||||
Stats: fresh[0].Stats,
|
||||
Warnings: fresh[0].Warnings,
|
||||
}
|
||||
|
||||
// Merge all fresh results including the first one
|
||||
switch merged.Type {
|
||||
case qbtypes.RequestTypeTimeSeries:
|
||||
// Pass nil as cached value to ensure proper merging of all fresh results
|
||||
merged.Value = q.mergeTimeSeriesResults(nil, fresh)
|
||||
}
|
||||
|
||||
return merged
|
||||
if cached == nil && len(fresh) == 1 {
|
||||
return fresh[0]
|
||||
}
|
||||
|
||||
// Start with cached result
|
||||
@@ -440,52 +315,23 @@ func (q *querier) mergeResults(cached *qbtypes.Result, fresh []*qbtypes.Result)
|
||||
// mergeTimeSeriesResults merges time series data
|
||||
func (q *querier) mergeTimeSeriesResults(cachedValue *qbtypes.TimeSeriesData, freshResults []*qbtypes.Result) *qbtypes.TimeSeriesData {
|
||||
|
||||
// Map to store merged series by aggregation index and series key
|
||||
// Map to store merged series by query name and series key
|
||||
seriesMap := make(map[int]map[string]*qbtypes.TimeSeries)
|
||||
// Map to store aggregation bucket metadata
|
||||
bucketMetadata := make(map[int]*qbtypes.AggregationBucket)
|
||||
|
||||
// Process cached data if available
|
||||
if cachedValue != nil && cachedValue.Aggregations != nil {
|
||||
for _, aggBucket := range cachedValue.Aggregations {
|
||||
if seriesMap[aggBucket.Index] == nil {
|
||||
seriesMap[aggBucket.Index] = make(map[string]*qbtypes.TimeSeries)
|
||||
}
|
||||
if bucketMetadata[aggBucket.Index] == nil {
|
||||
bucketMetadata[aggBucket.Index] = aggBucket
|
||||
}
|
||||
for _, series := range aggBucket.Series {
|
||||
key := qbtypes.GetUniqueSeriesKey(series.Labels)
|
||||
if existingSeries, ok := seriesMap[aggBucket.Index][key]; ok {
|
||||
// Merge values from duplicate series in cached data, avoiding duplicate timestamps
|
||||
timestampMap := make(map[int64]bool)
|
||||
for _, v := range existingSeries.Values {
|
||||
timestampMap[v.Timestamp] = true
|
||||
}
|
||||
|
||||
// Only add values with new timestamps
|
||||
for _, v := range series.Values {
|
||||
if !timestampMap[v.Timestamp] {
|
||||
existingSeries.Values = append(existingSeries.Values, v)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Create a copy to avoid modifying the cached data
|
||||
seriesCopy := &qbtypes.TimeSeries{
|
||||
Labels: series.Labels,
|
||||
Values: make([]*qbtypes.TimeSeriesValue, len(series.Values)),
|
||||
}
|
||||
copy(seriesCopy.Values, series.Values)
|
||||
seriesMap[aggBucket.Index][key] = seriesCopy
|
||||
}
|
||||
}
|
||||
for _, aggBucket := range cachedValue.Aggregations {
|
||||
if seriesMap[aggBucket.Index] == nil {
|
||||
seriesMap[aggBucket.Index] = make(map[string]*qbtypes.TimeSeries)
|
||||
}
|
||||
for _, series := range aggBucket.Series {
|
||||
key := qbtypes.GetUniqueSeriesKey(series.Labels)
|
||||
seriesMap[aggBucket.Index][key] = series
|
||||
}
|
||||
}
|
||||
|
||||
// Add fresh series
|
||||
for _, result := range freshResults {
|
||||
freshTS, ok := result.Value.(*qbtypes.TimeSeriesData)
|
||||
if !ok || freshTS == nil || freshTS.Aggregations == nil {
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -493,12 +339,6 @@ func (q *querier) mergeTimeSeriesResults(cachedValue *qbtypes.TimeSeriesData, fr
|
||||
if seriesMap[aggBucket.Index] == nil {
|
||||
seriesMap[aggBucket.Index] = make(map[string]*qbtypes.TimeSeries)
|
||||
}
|
||||
// Prefer fresh metadata over cached metadata
|
||||
if aggBucket.Alias != "" || aggBucket.Meta.Unit != "" {
|
||||
bucketMetadata[aggBucket.Index] = aggBucket
|
||||
} else if bucketMetadata[aggBucket.Index] == nil {
|
||||
bucketMetadata[aggBucket.Index] = aggBucket
|
||||
}
|
||||
}
|
||||
|
||||
for _, aggBucket := range freshTS.Aggregations {
|
||||
@@ -506,19 +346,8 @@ func (q *querier) mergeTimeSeriesResults(cachedValue *qbtypes.TimeSeriesData, fr
|
||||
key := qbtypes.GetUniqueSeriesKey(series.Labels)
|
||||
|
||||
if existingSeries, ok := seriesMap[aggBucket.Index][key]; ok {
|
||||
// Merge values, avoiding duplicate timestamps
|
||||
// Create a map to track existing timestamps
|
||||
timestampMap := make(map[int64]bool)
|
||||
for _, v := range existingSeries.Values {
|
||||
timestampMap[v.Timestamp] = true
|
||||
}
|
||||
|
||||
// Only add values with new timestamps
|
||||
for _, v := range series.Values {
|
||||
if !timestampMap[v.Timestamp] {
|
||||
existingSeries.Values = append(existingSeries.Values, v)
|
||||
}
|
||||
}
|
||||
// Merge values
|
||||
existingSeries.Values = append(existingSeries.Values, series.Values...)
|
||||
} else {
|
||||
// New series
|
||||
seriesMap[aggBucket.Index][key] = series
|
||||
@@ -528,18 +357,10 @@ func (q *querier) mergeTimeSeriesResults(cachedValue *qbtypes.TimeSeriesData, fr
|
||||
}
|
||||
|
||||
result := &qbtypes.TimeSeriesData{
|
||||
QueryName: cachedValue.QueryName,
|
||||
Aggregations: []*qbtypes.AggregationBucket{},
|
||||
}
|
||||
|
||||
// Set QueryName from cached or first fresh result
|
||||
if cachedValue != nil {
|
||||
result.QueryName = cachedValue.QueryName
|
||||
} else if len(freshResults) > 0 {
|
||||
if freshTS, ok := freshResults[0].Value.(*qbtypes.TimeSeriesData); ok && freshTS != nil {
|
||||
result.QueryName = freshTS.QueryName
|
||||
}
|
||||
}
|
||||
|
||||
for index, series := range seriesMap {
|
||||
var aggSeries []*qbtypes.TimeSeries
|
||||
for _, s := range series {
|
||||
@@ -556,17 +377,10 @@ func (q *querier) mergeTimeSeriesResults(cachedValue *qbtypes.TimeSeriesData, fr
|
||||
aggSeries = append(aggSeries, s)
|
||||
}
|
||||
|
||||
// Preserve bucket metadata from either cached or fresh results
|
||||
bucket := &qbtypes.AggregationBucket{
|
||||
result.Aggregations = append(result.Aggregations, &qbtypes.AggregationBucket{
|
||||
Index: index,
|
||||
Series: aggSeries,
|
||||
}
|
||||
if metadata, ok := bucketMetadata[index]; ok {
|
||||
bucket.Alias = metadata.Alias
|
||||
bucket.Meta = metadata.Meta
|
||||
}
|
||||
|
||||
result.Aggregations = append(result.Aggregations, bucket)
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
@@ -1,229 +0,0 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// TestAdjustTimeRangeForShift tests the time range adjustment logic
|
||||
func TestAdjustTimeRangeForShift(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
spec qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]
|
||||
timeRange qbtypes.TimeRange
|
||||
requestType qbtypes.RequestType
|
||||
expectedFromMS uint64
|
||||
expectedToMS uint64
|
||||
}{
|
||||
{
|
||||
name: "no shift",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{},
|
||||
},
|
||||
timeRange: qbtypes.TimeRange{
|
||||
From: 1000000,
|
||||
To: 2000000,
|
||||
},
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
expectedFromMS: 1000000,
|
||||
expectedToMS: 2000000,
|
||||
},
|
||||
{
|
||||
name: "shift by 60 seconds using timeShift function",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "60"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
timeRange: qbtypes.TimeRange{
|
||||
From: 1000000,
|
||||
To: 2000000,
|
||||
},
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
expectedFromMS: 940000, // 1000000 - 60000
|
||||
expectedToMS: 1940000, // 2000000 - 60000
|
||||
},
|
||||
{
|
||||
name: "shift by negative 30 seconds (future shift)",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "-30"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
timeRange: qbtypes.TimeRange{
|
||||
From: 1000000,
|
||||
To: 2000000,
|
||||
},
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
expectedFromMS: 1030000, // 1000000 - (-30000)
|
||||
expectedToMS: 2030000, // 2000000 - (-30000)
|
||||
},
|
||||
{
|
||||
name: "no shift for raw request type even with timeShift function",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "3600"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
timeRange: qbtypes.TimeRange{
|
||||
From: 1000000,
|
||||
To: 2000000,
|
||||
},
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
expectedFromMS: 1000000, // No shift for raw queries
|
||||
expectedToMS: 2000000,
|
||||
},
|
||||
{
|
||||
name: "shift applied for scalar request type with timeShift function",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "3600"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
timeRange: qbtypes.TimeRange{
|
||||
From: 10000000,
|
||||
To: 20000000,
|
||||
},
|
||||
requestType: qbtypes.RequestTypeScalar,
|
||||
expectedFromMS: 6400000, // 10000000 - 3600000
|
||||
expectedToMS: 16400000, // 20000000 - 3600000
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := adjustTimeRangeForShift(tt.spec, tt.timeRange, tt.requestType)
|
||||
assert.Equal(t, tt.expectedFromMS, result.From, "fromMS mismatch")
|
||||
assert.Equal(t, tt.expectedToMS, result.To, "toMS mismatch")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestExtractShiftFromBuilderQuery tests the shift extraction logic
|
||||
func TestExtractShiftFromBuilderQuery(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
spec qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]
|
||||
expectedShiftBy int64
|
||||
}{
|
||||
{
|
||||
name: "extract from timeShift function with float64",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: float64(3600)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedShiftBy: 3600,
|
||||
},
|
||||
{
|
||||
name: "extract from timeShift function with int64",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: int64(3600)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedShiftBy: 3600,
|
||||
},
|
||||
{
|
||||
name: "extract from timeShift function with string",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "3600"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedShiftBy: 3600,
|
||||
},
|
||||
{
|
||||
name: "no timeShift function",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameAbsolute,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedShiftBy: 0,
|
||||
},
|
||||
{
|
||||
name: "invalid timeShift value",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "invalid"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedShiftBy: 0,
|
||||
},
|
||||
{
|
||||
name: "multiple functions with timeShift",
|
||||
spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameAbsolute,
|
||||
},
|
||||
{
|
||||
Name: qbtypes.FunctionNameTimeShift,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "1800"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: qbtypes.FunctionNameClampMax,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: "100"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedShiftBy: 1800,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
shiftBy := extractShiftFromBuilderQuery(tt.spec)
|
||||
assert.Equal(t, tt.expectedShiftBy, shiftBy)
|
||||
})
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user