Compare commits
14 Commits
v0.55.0-cl
...
v0.55.0-cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
72cbc1a9e7 | ||
|
|
a9841755a7 | ||
|
|
03e6c33f82 | ||
|
|
3c5aa86ee2 | ||
|
|
06a89b21da | ||
|
|
8c891f0e87 | ||
|
|
49dd5f2ef7 | ||
|
|
83d01e7a0d | ||
|
|
f8e97c9c5c | ||
|
|
b78ade2cf2 | ||
|
|
1b59719891 | ||
|
|
481c4e1271 | ||
|
|
fe0d2a967f | ||
|
|
e77a6f4d7a |
58
.github/ISSUE_TEMPLATE/request_dashboard.md
vendored
Normal file
58
.github/ISSUE_TEMPLATE/request_dashboard.md
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
name: Request Dashboard
|
||||
about: Request a new dashboard for the SigNoz Dashboards repository
|
||||
title: ''
|
||||
labels: 'dashboard-template'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## 📝 Dashboard Request Template
|
||||
|
||||
*Use this template to request a new dashboard for the SigNoz Dashboards repository. Please provide as much detail as possible to help us understand your needs.*
|
||||
|
||||
---
|
||||
|
||||
### 1. Dashboard Name
|
||||
|
||||
Name of the requested dashboard (e.g., MySQL Monitoring Dashboard):
|
||||
|
||||
---
|
||||
|
||||
### 2. Expected Dashboard Sections and Panels
|
||||
|
||||
#### Section Name
|
||||
|
||||
Brief description of the section (e.g., "Resource usage metrics for MySQL database").
|
||||
|
||||
#### Panel Name
|
||||
|
||||
Panel description (e.g., "Value-type panels displaying current CPU usage, memory usage, etc.").
|
||||
|
||||
- **Example:**
|
||||
- **Section**: Resource Metrics
|
||||
- **Panel**: CPU Usage - Displays the current CPU usage across all database instances.
|
||||
- **Panel**: Memory Usage - Displays the total memory used by the MySQL process.
|
||||
|
||||
(Repeat this format for additional sections and panels)
|
||||
|
||||
---
|
||||
|
||||
### 3. Expected Variables
|
||||
|
||||
List any variables you expect to use in the dashboard (e.g., `deployment.environment`, `hostname`, etc.).
|
||||
|
||||
---
|
||||
|
||||
### 4. Additional Comments or Requirements
|
||||
|
||||
Any additional details or special requirements for the dashboard?
|
||||
|
||||
---
|
||||
|
||||
### 📋 Notes
|
||||
|
||||
Please review the [CONTRIBUTING.md](https://github.com/SigNoz/dashboards/blob/main/CONTRIBUTING.md) for guidelines on dashboard structure, naming conventions, and how to submit a pull request.
|
||||
|
||||
---
|
||||
Thank you for your request! We will review it and provide feedback or guidance as necessary.
|
||||
@@ -2,6 +2,9 @@ package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
)
|
||||
|
||||
type DailyProvider struct {
|
||||
@@ -24,9 +27,18 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
|
||||
opt(dp)
|
||||
}
|
||||
|
||||
dp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: dp.reader,
|
||||
Cache: dp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: dp.fluxInterval,
|
||||
FeatureLookup: dp.ff,
|
||||
})
|
||||
|
||||
return dp
|
||||
}
|
||||
|
||||
func (p *DailyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
return nil, nil
|
||||
req.Seasonality = SeasonalityDaily
|
||||
return p.getAnomalies(ctx, req)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@ package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
)
|
||||
|
||||
type HourlyProvider struct {
|
||||
@@ -24,9 +27,18 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
|
||||
opt(hp)
|
||||
}
|
||||
|
||||
hp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: hp.reader,
|
||||
Cache: hp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: hp.fluxInterval,
|
||||
FeatureLookup: hp.ff,
|
||||
})
|
||||
|
||||
return hp
|
||||
}
|
||||
|
||||
func (p *HourlyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
return nil, nil
|
||||
req.Seasonality = SeasonalityHourly
|
||||
return p.getAnomalies(ctx, req)
|
||||
}
|
||||
|
||||
@@ -16,6 +16,13 @@ const (
|
||||
SeasonalityWeekly Seasonality = "weekly"
|
||||
)
|
||||
|
||||
var (
|
||||
oneWeekOffset = 24 * 7 * time.Hour.Milliseconds()
|
||||
oneDayOffset = 24 * time.Hour.Milliseconds()
|
||||
oneHourOffset = time.Hour.Milliseconds()
|
||||
fiveMinOffset = 5 * time.Minute.Milliseconds()
|
||||
)
|
||||
|
||||
func (s Seasonality) IsValid() bool {
|
||||
switch s {
|
||||
case SeasonalityHourly, SeasonalityDaily, SeasonalityWeekly:
|
||||
@@ -35,7 +42,7 @@ type GetAnomaliesResponse struct {
|
||||
}
|
||||
|
||||
// anomalyParams is the params for anomaly detection
|
||||
// prediction = avg(past_period_query) + avg(current_season_query) - avg(past_season_query)
|
||||
// prediction = avg(past_period_query) + avg(current_season_query) - mean(past_season_query, past2_season_query, past3_season_query)
|
||||
//
|
||||
// ^ ^
|
||||
// | |
|
||||
@@ -49,9 +56,9 @@ type anomalyQueryParams struct {
|
||||
// and to detect anomalies
|
||||
CurrentPeriodQuery *v3.QueryRangeParamsV3
|
||||
// PastPeriodQuery is the query range params for past seasonal period
|
||||
// Example: For weekly seasonality, (now-1w-4h-5m, now-1w)
|
||||
// : For daily seasonality, (now-1d-2h-5m, now-1d)
|
||||
// : For hourly seasonality, (now-1h-30m-5m, now-1h)
|
||||
// Example: For weekly seasonality, (now-1w-5m, now-1w)
|
||||
// : For daily seasonality, (now-1d-5m, now-1d)
|
||||
// : For hourly seasonality, (now-1h-5m, now-1h)
|
||||
PastPeriodQuery *v3.QueryRangeParamsV3
|
||||
// CurrentSeasonQuery is the query range params for current period (seasonal)
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-1w-5m, now)
|
||||
@@ -63,16 +70,17 @@ type anomalyQueryParams struct {
|
||||
// : For daily seasonality, this is the query range params for the (now-2d-5m, now-1d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-2h-5m, now-1h)
|
||||
PastSeasonQuery *v3.QueryRangeParamsV3
|
||||
}
|
||||
|
||||
func copyCompositeQuery(req *v3.QueryRangeParamsV3) *v3.CompositeQuery {
|
||||
deepCopyCompositeQuery := *req.CompositeQuery
|
||||
deepCopyCompositeQuery.BuilderQueries = make(map[string]*v3.BuilderQuery)
|
||||
for k, v := range req.CompositeQuery.BuilderQueries {
|
||||
query := *v
|
||||
deepCopyCompositeQuery.BuilderQueries[k] = &query
|
||||
}
|
||||
return &deepCopyCompositeQuery
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal period to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-3w-5m, now-2w)
|
||||
// : For daily seasonality, this is the query range params for the (now-3d-5m, now-2d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-3h-5m, now-2h)
|
||||
Past2SeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal period to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-4w-5m, now-3w)
|
||||
// : For daily seasonality, this is the query range params for the (now-4d-5m, now-3d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-4h-5m, now-3h)
|
||||
Past3SeasonQuery *v3.QueryRangeParamsV3
|
||||
}
|
||||
|
||||
func updateStepInterval(req *v3.QueryRangeParamsV3) {
|
||||
@@ -95,7 +103,7 @@ func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonali
|
||||
currentPeriodQuery := &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
CompositeQuery: req.CompositeQuery,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
@@ -104,24 +112,24 @@ func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonali
|
||||
var pastPeriodStart, pastPeriodEnd int64
|
||||
|
||||
switch seasonality {
|
||||
// for one week period, we fetch the data from the past week with 4 hours offset
|
||||
// for one week period, we fetch the data from the past week with 5 min offset
|
||||
case SeasonalityWeekly:
|
||||
pastPeriodStart = start - 166*time.Hour.Milliseconds() - 4*time.Hour.Milliseconds()
|
||||
pastPeriodEnd = end - 166*time.Hour.Milliseconds()
|
||||
// for one day period, we fetch the data from the past day with 2 hours offset
|
||||
pastPeriodStart = start - oneWeekOffset - fiveMinOffset
|
||||
pastPeriodEnd = end - oneWeekOffset
|
||||
// for one day period, we fetch the data from the past day with 5 min offset
|
||||
case SeasonalityDaily:
|
||||
pastPeriodStart = start - 23*time.Hour.Milliseconds() - 2*time.Hour.Milliseconds()
|
||||
pastPeriodEnd = end - 23*time.Hour.Milliseconds()
|
||||
// for one hour period, we fetch the data from the past hour with 30 minutes offset
|
||||
pastPeriodStart = start - oneDayOffset - fiveMinOffset
|
||||
pastPeriodEnd = end - oneDayOffset
|
||||
// for one hour period, we fetch the data from the past hour with 5 min offset
|
||||
case SeasonalityHourly:
|
||||
pastPeriodStart = start - 1*time.Hour.Milliseconds() - 30*time.Minute.Milliseconds()
|
||||
pastPeriodEnd = end - 1*time.Hour.Milliseconds()
|
||||
pastPeriodStart = start - oneHourOffset - fiveMinOffset
|
||||
pastPeriodEnd = end - oneHourOffset
|
||||
}
|
||||
|
||||
pastPeriodQuery := &v3.QueryRangeParamsV3{
|
||||
Start: pastPeriodStart,
|
||||
End: pastPeriodEnd,
|
||||
CompositeQuery: copyCompositeQuery(req),
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
@@ -131,20 +139,20 @@ func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonali
|
||||
var currentGrowthPeriodStart, currentGrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
currentGrowthPeriodStart = start - 7*24*time.Hour.Milliseconds()
|
||||
currentGrowthPeriodStart = start - oneWeekOffset
|
||||
currentGrowthPeriodEnd = end
|
||||
case SeasonalityDaily:
|
||||
currentGrowthPeriodStart = start - 23*time.Hour.Milliseconds()
|
||||
currentGrowthPeriodStart = start - oneDayOffset
|
||||
currentGrowthPeriodEnd = end
|
||||
case SeasonalityHourly:
|
||||
currentGrowthPeriodStart = start - 1*time.Hour.Milliseconds()
|
||||
currentGrowthPeriodStart = start - oneHourOffset
|
||||
currentGrowthPeriodEnd = end
|
||||
}
|
||||
|
||||
currentGrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: currentGrowthPeriodStart,
|
||||
End: currentGrowthPeriodEnd,
|
||||
CompositeQuery: copyCompositeQuery(req),
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
@@ -153,30 +161,76 @@ func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonali
|
||||
var pastGrowthPeriodStart, pastGrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
pastGrowthPeriodStart = start - 14*24*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodEnd = start - 7*24*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodStart = start - 2*oneWeekOffset
|
||||
pastGrowthPeriodEnd = start - 1*oneWeekOffset
|
||||
case SeasonalityDaily:
|
||||
pastGrowthPeriodStart = start - 2*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodEnd = start - 1*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodStart = start - 2*oneDayOffset
|
||||
pastGrowthPeriodEnd = start - 1*oneDayOffset
|
||||
case SeasonalityHourly:
|
||||
pastGrowthPeriodStart = start - 2*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodEnd = start - 1*time.Hour.Milliseconds()
|
||||
pastGrowthPeriodStart = start - 2*oneHourOffset
|
||||
pastGrowthPeriodEnd = start - 1*oneHourOffset
|
||||
}
|
||||
|
||||
pastGrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: pastGrowthPeriodStart,
|
||||
End: pastGrowthPeriodEnd,
|
||||
CompositeQuery: copyCompositeQuery(req),
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(pastGrowthQuery)
|
||||
|
||||
var past2GrowthPeriodStart, past2GrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
past2GrowthPeriodStart = start - 3*oneWeekOffset
|
||||
past2GrowthPeriodEnd = start - 2*oneWeekOffset
|
||||
case SeasonalityDaily:
|
||||
past2GrowthPeriodStart = start - 3*oneDayOffset
|
||||
past2GrowthPeriodEnd = start - 2*oneDayOffset
|
||||
case SeasonalityHourly:
|
||||
past2GrowthPeriodStart = start - 3*oneHourOffset
|
||||
past2GrowthPeriodEnd = start - 2*oneHourOffset
|
||||
}
|
||||
|
||||
past2GrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: past2GrowthPeriodStart,
|
||||
End: past2GrowthPeriodEnd,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(past2GrowthQuery)
|
||||
|
||||
var past3GrowthPeriodStart, past3GrowthPeriodEnd int64
|
||||
switch seasonality {
|
||||
case SeasonalityWeekly:
|
||||
past3GrowthPeriodStart = start - 4*oneWeekOffset
|
||||
past3GrowthPeriodEnd = start - 3*oneWeekOffset
|
||||
case SeasonalityDaily:
|
||||
past3GrowthPeriodStart = start - 4*oneDayOffset
|
||||
past3GrowthPeriodEnd = start - 3*oneDayOffset
|
||||
case SeasonalityHourly:
|
||||
past3GrowthPeriodStart = start - 4*oneHourOffset
|
||||
past3GrowthPeriodEnd = start - 3*oneHourOffset
|
||||
}
|
||||
|
||||
past3GrowthQuery := &v3.QueryRangeParamsV3{
|
||||
Start: past3GrowthPeriodStart,
|
||||
End: past3GrowthPeriodEnd,
|
||||
CompositeQuery: req.CompositeQuery.Clone(),
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}
|
||||
updateStepInterval(past3GrowthQuery)
|
||||
|
||||
return &anomalyQueryParams{
|
||||
CurrentPeriodQuery: currentPeriodQuery,
|
||||
PastPeriodQuery: pastPeriodQuery,
|
||||
CurrentSeasonQuery: currentGrowthQuery,
|
||||
PastSeasonQuery: pastGrowthQuery,
|
||||
Past2SeasonQuery: past2GrowthQuery,
|
||||
Past3SeasonQuery: past3GrowthQuery,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,4 +239,6 @@ type anomalyQueryResults struct {
|
||||
PastPeriodResults []*v3.Result
|
||||
CurrentSeasonResults []*v3.Result
|
||||
PastSeasonResults []*v3.Result
|
||||
Past2SeasonResults []*v3.Result
|
||||
Past3SeasonResults []*v3.Result
|
||||
}
|
||||
|
||||
@@ -3,14 +3,21 @@ package anomaly
|
||||
import (
|
||||
"context"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
// TODO(srikanthccv): make this configurable?
|
||||
movingAvgWindowSize = 7
|
||||
)
|
||||
|
||||
// BaseProvider is an interface that includes common methods for all provider types
|
||||
type BaseProvider interface {
|
||||
GetBaseSeasonalProvider() *BaseSeasonalProvider
|
||||
@@ -46,6 +53,7 @@ func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[
|
||||
type BaseSeasonalProvider struct {
|
||||
querierV2 interfaces.Querier
|
||||
reader interfaces.Reader
|
||||
fluxInterval time.Duration
|
||||
cache cache.Cache
|
||||
keyGenerator cache.KeyGenerator
|
||||
ff interfaces.FeatureLookup
|
||||
@@ -53,28 +61,68 @@ type BaseSeasonalProvider struct {
|
||||
|
||||
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
|
||||
if !req.Seasonality.IsValid() {
|
||||
req.Seasonality = SeasonalityWeekly
|
||||
req.Seasonality = SeasonalityDaily
|
||||
}
|
||||
return prepareAnomalyQueryParams(req.Params, req.Seasonality)
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQueryParams) (*anomalyQueryResults, error) {
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery, nil)
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery, nil)
|
||||
currentPeriodResults, err = postprocess.PostProcessResult(currentPeriodResults, params.CurrentPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery, nil)
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery, nil)
|
||||
pastPeriodResults, err = postprocess.PostProcessResult(pastPeriodResults, params.PastPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentSeasonResults, err = postprocess.PostProcessResult(currentSeasonResults, params.CurrentSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pastSeasonResults, err = postprocess.PostProcessResult(pastSeasonResults, params.PastSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past2SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
past2SeasonResults, err = postprocess.PostProcessResult(past2SeasonResults, params.Past2SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past3SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
past3SeasonResults, err = postprocess.PostProcessResult(past3SeasonResults, params.Past3SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -84,10 +132,18 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
PastPeriodResults: pastPeriodResults,
|
||||
CurrentSeasonResults: currentSeasonResults,
|
||||
PastSeasonResults: pastSeasonResults,
|
||||
Past2SeasonResults: past2SeasonResults,
|
||||
Past3SeasonResults: past3SeasonResults,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// getMatchingSeries gets the matching series from the query result
|
||||
// for the given series
|
||||
func (p *BaseSeasonalProvider) getMatchingSeries(queryResult *v3.Result, series *v3.Series) *v3.Series {
|
||||
if queryResult == nil || len(queryResult.Series) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, curr := range queryResult.Series {
|
||||
currLabels := labels.FromMap(curr.Labels)
|
||||
seriesLabels := labels.FromMap(series.Labels)
|
||||
@@ -99,6 +155,9 @@ func (p *BaseSeasonalProvider) getMatchingSeries(queryResult *v3.Result, series
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getAvg(series *v3.Series) float64 {
|
||||
if series == nil || len(series.Points) == 0 {
|
||||
return 0
|
||||
}
|
||||
var sum float64
|
||||
for _, smpl := range series.Points {
|
||||
sum += smpl.Value
|
||||
@@ -107,6 +166,9 @@ func (p *BaseSeasonalProvider) getAvg(series *v3.Series) float64 {
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getStdDev(series *v3.Series) float64 {
|
||||
if series == nil || len(series.Points) == 0 {
|
||||
return 0
|
||||
}
|
||||
avg := p.getAvg(series)
|
||||
var sum float64
|
||||
for _, smpl := range series.Points {
|
||||
@@ -115,15 +177,65 @@ func (p *BaseSeasonalProvider) getStdDev(series *v3.Series) float64 {
|
||||
return math.Sqrt(sum / float64(len(series.Points)))
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getPredictedSeries(series, prevSeries, currentSeasonSeries, pastSeasonSeries *v3.Series) *v3.Series {
|
||||
// getMovingAvg gets the moving average for the given series
|
||||
// for the given window size and start index
|
||||
func (p *BaseSeasonalProvider) getMovingAvg(series *v3.Series, movingAvgWindowSize, startIdx int) float64 {
|
||||
if series == nil || len(series.Points) == 0 {
|
||||
return 0
|
||||
}
|
||||
if startIdx >= len(series.Points)-movingAvgWindowSize {
|
||||
startIdx = len(series.Points) - movingAvgWindowSize
|
||||
}
|
||||
var sum float64
|
||||
points := series.Points[startIdx:]
|
||||
for i := 0; i < movingAvgWindowSize && i < len(points); i++ {
|
||||
sum += points[i].Value
|
||||
}
|
||||
avg := sum / float64(movingAvgWindowSize)
|
||||
return avg
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getMean(floats ...float64) float64 {
|
||||
if len(floats) == 0 {
|
||||
return 0
|
||||
}
|
||||
var sum float64
|
||||
for _, f := range floats {
|
||||
sum += f
|
||||
}
|
||||
return sum / float64(len(floats))
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getPredictedSeries(
|
||||
series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series,
|
||||
) *v3.Series {
|
||||
predictedSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
for _, curr := range series.Points {
|
||||
predictedValue := p.getAvg(prevSeries) + p.getAvg(currentSeasonSeries) - p.getAvg(pastSeasonSeries)
|
||||
// for each point in the series, get the predicted value
|
||||
// the predicted value is the moving average (with window size = 7) of the previous period series
|
||||
// plus the average of the current season series
|
||||
// minus the mean of the past season series, past2 season series and past3 season series
|
||||
for idx, curr := range series.Points {
|
||||
predictedValue :=
|
||||
p.getMovingAvg(prevSeries, movingAvgWindowSize, idx) +
|
||||
p.getAvg(currentSeasonSeries) -
|
||||
p.getMean(p.getAvg(pastSeasonSeries), p.getAvg(past2SeasonSeries), p.getAvg(past3SeasonSeries))
|
||||
|
||||
if predictedValue < 0 {
|
||||
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
}
|
||||
|
||||
zap.L().Info("predictedSeries",
|
||||
zap.Float64("movingAvg", p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)),
|
||||
zap.Float64("avg", p.getAvg(currentSeasonSeries)),
|
||||
zap.Float64("mean", p.getMean(p.getAvg(pastSeasonSeries), p.getAvg(past2SeasonSeries), p.getAvg(past3SeasonSeries))),
|
||||
zap.Any("labels", series.Labels),
|
||||
zap.Float64("predictedValue", predictedValue),
|
||||
)
|
||||
predictedSeries.Points = append(predictedSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: predictedValue,
|
||||
@@ -133,33 +245,80 @@ func (p *BaseSeasonalProvider) getPredictedSeries(series, prevSeries, currentSea
|
||||
return predictedSeries
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getExpectedValue(_, prevSeries, currentSeasonSeries, pastSeasonSeries *v3.Series) float64 {
|
||||
prevSeriesAvg := p.getAvg(prevSeries)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
zap.L().Debug("getExpectedValue",
|
||||
zap.Float64("prevSeriesAvg", prevSeriesAvg),
|
||||
zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg),
|
||||
zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg),
|
||||
zap.Float64("expectedValue", prevSeriesAvg+currentSeasonSeriesAvg-pastSeasonSeriesAvg),
|
||||
)
|
||||
return prevSeriesAvg + currentSeasonSeriesAvg - pastSeasonSeriesAvg
|
||||
// getBounds gets the upper and lower bounds for the given series
|
||||
// for the given z score threshold
|
||||
// moving avg of the previous period series + z score threshold * std dev of the series
|
||||
// moving avg of the previous period series - z score threshold * std dev of the series
|
||||
func (p *BaseSeasonalProvider) getBounds(
|
||||
series, prevSeries, _, _, _, _ *v3.Series,
|
||||
zScoreThreshold float64,
|
||||
) (*v3.Series, *v3.Series) {
|
||||
upperBoundSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
lowerBoundSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
for idx, curr := range series.Points {
|
||||
upperBound := p.getMovingAvg(prevSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(series)
|
||||
lowerBound := p.getMovingAvg(prevSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(series)
|
||||
upperBoundSeries.Points = append(upperBoundSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: upperBound,
|
||||
})
|
||||
lowerBoundSeries.Points = append(lowerBoundSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: math.Max(lowerBound, 0),
|
||||
})
|
||||
}
|
||||
|
||||
return upperBoundSeries, lowerBoundSeries
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getScore(series, prevSeries, weekSeries, weekPrevSeries *v3.Series, value float64) float64 {
|
||||
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries)
|
||||
// getExpectedValue gets the expected value for the given series
|
||||
// for the given index
|
||||
// prevSeriesAvg + currentSeasonSeriesAvg - mean of past season series, past2 season series and past3 season series
|
||||
func (p *BaseSeasonalProvider) getExpectedValue(
|
||||
_, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, idx int,
|
||||
) float64 {
|
||||
prevSeriesAvg := p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
||||
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
||||
return prevSeriesAvg + currentSeasonSeriesAvg - p.getMean(pastSeasonSeriesAvg, past2SeasonSeriesAvg, past3SeasonSeriesAvg)
|
||||
}
|
||||
|
||||
// getScore gets the anomaly score for the given series
|
||||
// for the given index
|
||||
// (value - expectedValue) / std dev of the series
|
||||
func (p *BaseSeasonalProvider) getScore(
|
||||
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
|
||||
) float64 {
|
||||
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
|
||||
return (value - expectedValue) / p.getStdDev(weekSeries)
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getAnomalyScores(series, prevSeries, currentSeasonSeries, pastSeasonSeries *v3.Series) *v3.Series {
|
||||
// getAnomalyScores gets the anomaly scores for the given series
|
||||
// for the given index
|
||||
// (value - expectedValue) / std dev of the series
|
||||
func (p *BaseSeasonalProvider) getAnomalyScores(
|
||||
series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *v3.Series,
|
||||
) *v3.Series {
|
||||
anomalyScoreSeries := &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: []v3.Point{},
|
||||
}
|
||||
|
||||
for _, curr := range series.Points {
|
||||
anomalyScore := p.getScore(series, prevSeries, currentSeasonSeries, pastSeasonSeries, curr.Value)
|
||||
for idx, curr := range series.Points {
|
||||
anomalyScore := p.getScore(series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries, curr.Value, idx)
|
||||
anomalyScoreSeries.Points = append(anomalyScoreSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: anomalyScore,
|
||||
@@ -169,7 +328,7 @@ func (p *BaseSeasonalProvider) getAnomalyScores(series, prevSeries, currentSeaso
|
||||
return anomalyScoreSeries
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
anomalyParams := p.getQueryParams(req)
|
||||
anomalyQueryResults, err := p.getResults(ctx, anomalyParams)
|
||||
if err != nil {
|
||||
@@ -196,7 +355,32 @@ func (p *BaseSeasonalProvider) GetAnomalies(ctx context.Context, req *GetAnomali
|
||||
pastSeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
past2SeasonResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.Past2SeasonResults {
|
||||
past2SeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
past3SeasonResultsMap := make(map[string]*v3.Result)
|
||||
for _, result := range anomalyQueryResults.Past3SeasonResults {
|
||||
past3SeasonResultsMap[result.QueryName] = result
|
||||
}
|
||||
|
||||
for _, result := range currentPeriodResultsMap {
|
||||
funcs := req.Params.CompositeQuery.BuilderQueries[result.QueryName].Functions
|
||||
|
||||
var zScoreThreshold float64
|
||||
for _, f := range funcs {
|
||||
if f.Name == v3.FunctionNameAnomaly {
|
||||
value, ok := f.NamedArgs["z_score_threshold"]
|
||||
if ok {
|
||||
zScoreThreshold = value.(float64)
|
||||
} else {
|
||||
zScoreThreshold = 3
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
pastPeriodResult, ok := pastPeriodResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
@@ -209,21 +393,72 @@ func (p *BaseSeasonalProvider) GetAnomalies(ctx context.Context, req *GetAnomali
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
past2SeasonResult, ok := past2SeasonResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
past3SeasonResult, ok := past3SeasonResultsMap[result.QueryName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, series := range result.Series {
|
||||
stdDev := p.getStdDev(series)
|
||||
zap.L().Info("stdDev", zap.Float64("stdDev", stdDev), zap.Any("labels", series.Labels))
|
||||
|
||||
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
|
||||
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
|
||||
pastSeasonSeries := p.getMatchingSeries(pastSeasonResult, series)
|
||||
past2SeasonSeries := p.getMatchingSeries(past2SeasonResult, series)
|
||||
past3SeasonSeries := p.getMatchingSeries(past3SeasonResult, series)
|
||||
|
||||
predictedSeries := p.getPredictedSeries(series, pastPeriodSeries, currentSeasonSeries, pastSeasonSeries)
|
||||
prevSeriesAvg := p.getAvg(pastPeriodSeries)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
||||
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
||||
zap.L().Info("getAvg", zap.Float64("prevSeriesAvg", prevSeriesAvg), zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg), zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg), zap.Float64("past2SeasonSeriesAvg", past2SeasonSeriesAvg), zap.Float64("past3SeasonSeriesAvg", past3SeasonSeriesAvg), zap.Any("labels", series.Labels))
|
||||
|
||||
predictedSeries := p.getPredictedSeries(
|
||||
series,
|
||||
pastPeriodSeries,
|
||||
currentSeasonSeries,
|
||||
pastSeasonSeries,
|
||||
past2SeasonSeries,
|
||||
past3SeasonSeries,
|
||||
)
|
||||
result.PredictedSeries = append(result.PredictedSeries, predictedSeries)
|
||||
|
||||
anomalyScoreSeries := p.getAnomalyScores(series, pastPeriodSeries, currentSeasonSeries, pastSeasonSeries)
|
||||
upperBoundSeries, lowerBoundSeries := p.getBounds(
|
||||
series,
|
||||
pastPeriodSeries,
|
||||
currentSeasonSeries,
|
||||
pastSeasonSeries,
|
||||
past2SeasonSeries,
|
||||
past3SeasonSeries,
|
||||
zScoreThreshold,
|
||||
)
|
||||
result.UpperBoundSeries = append(result.UpperBoundSeries, upperBoundSeries)
|
||||
result.LowerBoundSeries = append(result.LowerBoundSeries, lowerBoundSeries)
|
||||
|
||||
anomalyScoreSeries := p.getAnomalyScores(
|
||||
series,
|
||||
pastPeriodSeries,
|
||||
currentSeasonSeries,
|
||||
pastSeasonSeries,
|
||||
past2SeasonSeries,
|
||||
past3SeasonSeries,
|
||||
)
|
||||
result.AnomalyScores = append(result.AnomalyScores, anomalyScoreSeries)
|
||||
}
|
||||
}
|
||||
|
||||
results := make([]*v3.Result, 0, len(currentPeriodResultsMap))
|
||||
for _, result := range currentPeriodResultsMap {
|
||||
results = append(results, result)
|
||||
}
|
||||
|
||||
return &GetAnomaliesResponse{
|
||||
Results: anomalyQueryResults.CurrentPeriodResults,
|
||||
Results: results,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@ package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
)
|
||||
|
||||
type WeeklyProvider struct {
|
||||
@@ -23,9 +26,18 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
|
||||
opt(wp)
|
||||
}
|
||||
|
||||
wp.querierV2 = querierV2.NewQuerier(querierV2.QuerierOptions{
|
||||
Reader: wp.reader,
|
||||
Cache: wp.cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: wp.fluxInterval,
|
||||
FeatureLookup: wp.ff,
|
||||
})
|
||||
|
||||
return wp
|
||||
}
|
||||
|
||||
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
return nil, nil
|
||||
req.Seasonality = SeasonalityWeekly
|
||||
return p.getAnomalies(ctx, req)
|
||||
}
|
||||
|
||||
@@ -207,7 +207,6 @@
|
||||
"eslint-plugin-sonarjs": "^0.12.0",
|
||||
"husky": "^7.0.4",
|
||||
"is-ci": "^3.0.1",
|
||||
"jest-playwright-preset": "^1.7.2",
|
||||
"jest-styled-components": "^7.0.8",
|
||||
"lint-staged": "^12.5.0",
|
||||
"msw": "1.3.2",
|
||||
|
||||
@@ -65,7 +65,7 @@ export const logAlertDefaults: AlertDef = {
|
||||
chQueries: {
|
||||
A: {
|
||||
name: 'A',
|
||||
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs_v2 \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
|
||||
@@ -133,6 +133,9 @@ function LogsExplorerViews({
|
||||
// State
|
||||
const [page, setPage] = useState<number>(1);
|
||||
const [logs, setLogs] = useState<ILog[]>([]);
|
||||
const [lastLogLineTimestamp, setLastLogLineTimestamp] = useState<
|
||||
number | string | null
|
||||
>();
|
||||
const [requestData, setRequestData] = useState<Query | null>(null);
|
||||
const [showFormatMenuItems, setShowFormatMenuItems] = useState(false);
|
||||
const [queryId, setQueryId] = useState<string>(v4());
|
||||
@@ -270,6 +273,14 @@ function LogsExplorerViews({
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
}),
|
||||
// send the lastLogTimeStamp only when the panel type is list and the orderBy is timestamp and the order is desc
|
||||
lastLogLineTimestamp:
|
||||
panelType === PANEL_TYPES.LIST &&
|
||||
requestData?.builder?.queryData?.[0]?.orderBy?.[0]?.columnName ===
|
||||
'timestamp' &&
|
||||
requestData?.builder?.queryData?.[0]?.orderBy?.[0]?.order === 'desc'
|
||||
? lastLogLineTimestamp
|
||||
: undefined,
|
||||
},
|
||||
undefined,
|
||||
listQueryKeyRef,
|
||||
@@ -347,6 +358,10 @@ function LogsExplorerViews({
|
||||
pageSize: nextPageSize,
|
||||
});
|
||||
|
||||
// initialise the last log timestamp to null as we don't have the logs.
|
||||
// as soon as we scroll to the end of the logs we set the lastLogLineTimestamp to the last log timestamp.
|
||||
setLastLogLineTimestamp(lastLog.timestamp);
|
||||
|
||||
setPage((prevPage) => prevPage + 1);
|
||||
|
||||
setRequestData(newRequestData);
|
||||
@@ -539,6 +554,11 @@ function LogsExplorerViews({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [data]);
|
||||
|
||||
useEffect(() => {
|
||||
// clear the lastLogLineTimestamp when the data changes
|
||||
setLastLogLineTimestamp(null);
|
||||
}, [data]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
requestData?.id !== stagedQuery?.id ||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import getStep from 'lib/getStep';
|
||||
import { mapQueryDataToApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import store from 'store';
|
||||
import { QueryRangePayload } from 'types/api/metrics/getQueryRange';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
@@ -24,7 +25,11 @@ export const prepareQueryRangePayload = ({
|
||||
fillGaps = false,
|
||||
}: GetQueryResultsProps): PrepareQueryRangePayload => {
|
||||
let legendMap: Record<string, string> = {};
|
||||
const { allowSelectedIntervalForStepGen, ...restParams } = params;
|
||||
const {
|
||||
allowSelectedIntervalForStepGen,
|
||||
lastLogLineTimestamp,
|
||||
...restParams
|
||||
} = params;
|
||||
|
||||
const compositeQuery: QueryRangePayload['compositeQuery'] = {
|
||||
queryType: query.queryType,
|
||||
@@ -90,9 +95,13 @@ export const prepareQueryRangePayload = ({
|
||||
interval: globalSelectedInterval,
|
||||
});
|
||||
|
||||
const endLogTimeStamp = !isUndefined(lastLogLineTimestamp)
|
||||
? new Date(lastLogLineTimestamp as string | number)?.getTime() || undefined
|
||||
: undefined;
|
||||
|
||||
const queryPayload: QueryRangePayload = {
|
||||
start: parseInt(start, 10) * 1e3,
|
||||
end: parseInt(end, 10) * 1e3,
|
||||
end: endLogTimeStamp || parseInt(end, 10) * 1e3,
|
||||
step: getStep({
|
||||
start: allowSelectedIntervalForStepGen
|
||||
? start
|
||||
|
||||
@@ -12,6 +12,10 @@
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.005em;
|
||||
&,
|
||||
&:hover {
|
||||
color: var(--text-vanilla-400);
|
||||
}
|
||||
}
|
||||
&__key {
|
||||
background: var(--bg-ink-400);
|
||||
@@ -20,13 +24,15 @@
|
||||
&__value {
|
||||
background: var(--bg-slate-400);
|
||||
}
|
||||
color: var(--text-vanilla-400);
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.key-value-label {
|
||||
border-color: var(--bg-vanilla-400);
|
||||
color: var(--text-ink-400);
|
||||
&__key,
|
||||
&__value {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
&__key {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import './KeyValueLabel.styles.scss';
|
||||
|
||||
import { Tooltip } from 'antd';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import TrimmedText from '../TrimmedText/TrimmedText';
|
||||
|
||||
@@ -15,19 +16,33 @@ export default function KeyValueLabel({
|
||||
badgeValue,
|
||||
maxCharacters = 20,
|
||||
}: KeyValueLabelProps): JSX.Element | null {
|
||||
const isUrl = useMemo(() => /^https?:\/\//.test(badgeValue), [badgeValue]);
|
||||
|
||||
if (!badgeKey || !badgeValue) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="key-value-label">
|
||||
<div className="key-value-label__key">
|
||||
<TrimmedText text={badgeKey} maxCharacters={maxCharacters} />
|
||||
</div>
|
||||
<Tooltip title={badgeValue}>
|
||||
<div className="key-value-label__value">
|
||||
{isUrl ? (
|
||||
<a
|
||||
href={badgeValue}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="key-value-label__value"
|
||||
>
|
||||
<TrimmedText text={badgeValue} maxCharacters={maxCharacters} />
|
||||
</div>
|
||||
</Tooltip>
|
||||
</a>
|
||||
) : (
|
||||
<Tooltip title={badgeValue}>
|
||||
<div className="key-value-label__value">
|
||||
<TrimmedText text={badgeValue} maxCharacters={maxCharacters} />
|
||||
</div>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -169,7 +169,7 @@
|
||||
resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98"
|
||||
integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw==
|
||||
|
||||
"@babel/core@^7.1.0", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.7.5", "@babel/core@^7.8.0":
|
||||
"@babel/core@^7.1.0", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0":
|
||||
version "7.21.4"
|
||||
resolved "https://registry.npmjs.org/@babel/core/-/core-7.21.4.tgz"
|
||||
integrity sha512-qt/YV149Jman/6AfmlxJ04LMIu8bMoyl3RB91yTFrxQmgbrSvQMy7cI8Q62FHx1t8wJ8B5fu0UDoLwHAhUo1QA==
|
||||
@@ -2659,18 +2659,6 @@
|
||||
dependencies:
|
||||
tslib "2.5.0"
|
||||
|
||||
"@hapi/hoek@^9.0.0":
|
||||
version "9.3.0"
|
||||
resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz"
|
||||
integrity sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==
|
||||
|
||||
"@hapi/topo@^5.0.0":
|
||||
version "5.1.0"
|
||||
resolved "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz"
|
||||
integrity sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==
|
||||
dependencies:
|
||||
"@hapi/hoek" "^9.0.0"
|
||||
|
||||
"@humanwhocodes/config-array@^0.5.0":
|
||||
version "0.5.0"
|
||||
resolved "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz"
|
||||
@@ -3751,23 +3739,6 @@
|
||||
unplugin "1.0.1"
|
||||
uuid "^9.0.0"
|
||||
|
||||
"@sideway/address@^4.1.3":
|
||||
version "4.1.4"
|
||||
resolved "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz"
|
||||
integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==
|
||||
dependencies:
|
||||
"@hapi/hoek" "^9.0.0"
|
||||
|
||||
"@sideway/formula@^3.0.1":
|
||||
version "3.0.1"
|
||||
resolved "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.1.tgz"
|
||||
integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==
|
||||
|
||||
"@sideway/pinpoint@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz"
|
||||
integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==
|
||||
|
||||
"@signozhq/design-tokens@0.0.8":
|
||||
version "0.0.8"
|
||||
resolved "https://registry.yarnpkg.com/@signozhq/design-tokens/-/design-tokens-0.0.8.tgz#368dc92cfe01d0cd893df140445c5d9dfd944a88"
|
||||
@@ -4591,13 +4562,6 @@
|
||||
resolved "https://registry.npmjs.org/@types/uuid/-/uuid-8.3.4.tgz"
|
||||
integrity sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw==
|
||||
|
||||
"@types/wait-on@^5.2.0":
|
||||
version "5.3.1"
|
||||
resolved "https://registry.npmjs.org/@types/wait-on/-/wait-on-5.3.1.tgz"
|
||||
integrity sha512-2FFOKCF/YydrMUaqg+fkk49qf0e5rDgwt6aQsMzFQzbS419h2gNOXyiwp/o2yYy27bi/C1z+HgfncryjGzlvgQ==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/webpack-dev-server@^4.7.2":
|
||||
version "4.7.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/webpack-dev-server/-/webpack-dev-server-4.7.2.tgz#a12d9881aa23cdd4cecbb2d31fa784a45c4967e0"
|
||||
@@ -5428,18 +5392,6 @@ anymatch@^3.0.3, anymatch@~3.1.2:
|
||||
normalize-path "^3.0.0"
|
||||
picomatch "^2.0.4"
|
||||
|
||||
append-transform@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz"
|
||||
integrity sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==
|
||||
dependencies:
|
||||
default-require-extensions "^3.0.0"
|
||||
|
||||
archy@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz"
|
||||
integrity sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==
|
||||
|
||||
arg@^4.1.0:
|
||||
version "4.1.3"
|
||||
resolved "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz"
|
||||
@@ -5635,13 +5587,6 @@ axios@1.7.4:
|
||||
form-data "^4.0.0"
|
||||
proxy-from-env "^1.1.0"
|
||||
|
||||
axios@^0.21.1:
|
||||
version "0.21.4"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575"
|
||||
integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==
|
||||
dependencies:
|
||||
follow-redirects "^1.14.0"
|
||||
|
||||
axobject-query@^3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.npmjs.org/axobject-query/-/axobject-query-3.1.1.tgz"
|
||||
@@ -6315,16 +6260,6 @@ bytes@3.1.2:
|
||||
resolved "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz"
|
||||
integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==
|
||||
|
||||
caching-transform@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz"
|
||||
integrity sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==
|
||||
dependencies:
|
||||
hasha "^5.0.0"
|
||||
make-dir "^3.0.0"
|
||||
package-hash "^4.0.0"
|
||||
write-file-atomic "^3.0.0"
|
||||
|
||||
call-bind@^1.0.0, call-bind@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz"
|
||||
@@ -6355,7 +6290,7 @@ camelcase-keys@^6.2.2:
|
||||
map-obj "^4.0.0"
|
||||
quick-lru "^4.0.1"
|
||||
|
||||
camelcase@^5.0.0, camelcase@^5.3.1:
|
||||
camelcase@^5.3.1:
|
||||
version "5.3.1"
|
||||
resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz"
|
||||
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
|
||||
@@ -6620,15 +6555,6 @@ cli-width@^3.0.0:
|
||||
resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6"
|
||||
integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==
|
||||
|
||||
cliui@^6.0.0:
|
||||
version "6.0.0"
|
||||
resolved "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz"
|
||||
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
|
||||
dependencies:
|
||||
string-width "^4.2.0"
|
||||
strip-ansi "^6.0.0"
|
||||
wrap-ansi "^6.2.0"
|
||||
|
||||
cliui@^7.0.2:
|
||||
version "7.0.4"
|
||||
resolved "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz"
|
||||
@@ -6772,16 +6698,6 @@ commander@^10.0.0:
|
||||
resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06"
|
||||
integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==
|
||||
|
||||
commander@^3.0.2:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.npmjs.org/commander/-/commander-3.0.2.tgz"
|
||||
integrity sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow==
|
||||
|
||||
commander@^5.1.0:
|
||||
version "5.1.0"
|
||||
resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae"
|
||||
integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==
|
||||
|
||||
commander@^7.0.0, commander@^7.2.0:
|
||||
version "7.2.0"
|
||||
resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz"
|
||||
@@ -6802,11 +6718,6 @@ common-path-prefix@^3.0.0:
|
||||
resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0"
|
||||
integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==
|
||||
|
||||
commondir@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz"
|
||||
integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==
|
||||
|
||||
compare-func@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz"
|
||||
@@ -7073,7 +6984,7 @@ cross-spawn@^6.0.5:
|
||||
shebang-command "^1.2.0"
|
||||
which "^1.2.9"
|
||||
|
||||
cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
|
||||
cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
|
||||
version "7.0.3"
|
||||
resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz"
|
||||
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
|
||||
@@ -7303,14 +7214,6 @@ custom-event-polyfill@^1.0.6:
|
||||
resolved "https://registry.npmjs.org/custom-event-polyfill/-/custom-event-polyfill-1.0.7.tgz"
|
||||
integrity sha512-TDDkd5DkaZxZFM8p+1I3yAlvM3rSr1wbrOliG4yJiwinMZN8z/iGL7BTlDkrJcYTmgUSb4ywVCc3ZaUtOtC76w==
|
||||
|
||||
cwd@^0.10.0:
|
||||
version "0.10.0"
|
||||
resolved "https://registry.npmjs.org/cwd/-/cwd-0.10.0.tgz"
|
||||
integrity sha512-YGZxdTTL9lmLkCUTpg4j0zQ7IhRB5ZmqNBbGCl3Tg6MP/d5/6sY7L5mmTjzbc6JKgVZYiqTQTNhPFsbXNGlRaA==
|
||||
dependencies:
|
||||
find-pkg "^0.1.2"
|
||||
fs-exists-sync "^0.1.0"
|
||||
|
||||
"d3-array@1 - 3", "d3-array@2 - 3", "d3-array@2.10.0 - 3":
|
||||
version "3.2.3"
|
||||
resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.3.tgz"
|
||||
@@ -7555,7 +7458,7 @@ decamelize-keys@^1.1.0:
|
||||
decamelize "^1.1.0"
|
||||
map-obj "^1.0.0"
|
||||
|
||||
decamelize@^1.1.0, decamelize@^1.2.0:
|
||||
decamelize@^1.1.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
|
||||
integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==
|
||||
@@ -7637,13 +7540,6 @@ default-gateway@^6.0.3:
|
||||
dependencies:
|
||||
execa "^5.0.0"
|
||||
|
||||
default-require-extensions@^3.0.0:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.1.tgz"
|
||||
integrity sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw==
|
||||
dependencies:
|
||||
strip-bom "^4.0.0"
|
||||
|
||||
defaults@^1.0.3:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a"
|
||||
@@ -8163,11 +8059,6 @@ es-to-primitive@^1.2.1:
|
||||
is-date-object "^1.0.1"
|
||||
is-symbol "^1.0.2"
|
||||
|
||||
es6-error@^4.0.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz"
|
||||
integrity sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==
|
||||
|
||||
escalade@^3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz"
|
||||
@@ -8608,18 +8499,6 @@ exit@^0.1.2:
|
||||
resolved "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz"
|
||||
integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==
|
||||
|
||||
expand-tilde@^1.2.2:
|
||||
version "1.2.2"
|
||||
resolved "https://registry.npmjs.org/expand-tilde/-/expand-tilde-1.2.2.tgz"
|
||||
integrity sha512-rtmc+cjLZqnu9dSYosX9EWmSJhTwpACgJQTfj4hgg2JjOD/6SIQalZrt4a3aQeh++oNxkazcaxrhPUj6+g5G/Q==
|
||||
dependencies:
|
||||
os-homedir "^1.0.1"
|
||||
|
||||
expect-playwright@^0.8.0:
|
||||
version "0.8.0"
|
||||
resolved "https://registry.npmjs.org/expect-playwright/-/expect-playwright-0.8.0.tgz"
|
||||
integrity sha512-+kn8561vHAY+dt+0gMqqj1oY+g5xWrsuGMk4QGxotT2WS545nVqqjs37z6hrYfIuucwqthzwJfCJUEYqixyljg==
|
||||
|
||||
expect@^27.5.1:
|
||||
version "27.5.1"
|
||||
resolved "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz"
|
||||
@@ -8828,15 +8707,6 @@ finalhandler@1.2.0:
|
||||
statuses "2.0.1"
|
||||
unpipe "~1.0.0"
|
||||
|
||||
find-cache-dir@^3.2.0:
|
||||
version "3.3.2"
|
||||
resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz"
|
||||
integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==
|
||||
dependencies:
|
||||
commondir "^1.0.1"
|
||||
make-dir "^3.0.2"
|
||||
pkg-dir "^4.1.0"
|
||||
|
||||
find-cache-dir@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-4.0.0.tgz#a30ee0448f81a3990708f6453633c733e2f6eec2"
|
||||
@@ -8845,30 +8715,6 @@ find-cache-dir@^4.0.0:
|
||||
common-path-prefix "^3.0.0"
|
||||
pkg-dir "^7.0.0"
|
||||
|
||||
find-file-up@^0.1.2:
|
||||
version "0.1.3"
|
||||
resolved "https://registry.npmjs.org/find-file-up/-/find-file-up-0.1.3.tgz"
|
||||
integrity sha512-mBxmNbVyjg1LQIIpgO8hN+ybWBgDQK8qjht+EbrTCGmmPV/sc7RF1i9stPTD6bpvXZywBdrwRYxhSdJv867L6A==
|
||||
dependencies:
|
||||
fs-exists-sync "^0.1.0"
|
||||
resolve-dir "^0.1.0"
|
||||
|
||||
find-pkg@^0.1.2:
|
||||
version "0.1.2"
|
||||
resolved "https://registry.npmjs.org/find-pkg/-/find-pkg-0.1.2.tgz"
|
||||
integrity sha512-0rnQWcFwZr7eO0513HahrWafsc3CTFioEB7DRiEYCUM/70QXSY8f3mCST17HXLcPvEhzH/Ty/Bxd72ZZsr/yvw==
|
||||
dependencies:
|
||||
find-file-up "^0.1.2"
|
||||
|
||||
find-process@^1.4.4:
|
||||
version "1.4.7"
|
||||
resolved "https://registry.npmjs.org/find-process/-/find-process-1.4.7.tgz"
|
||||
integrity sha512-/U4CYp1214Xrp3u3Fqr9yNynUrr5Le4y0SsJh2lMDDSbpwYSz3M2SMWQC+wqcx79cN8PQtHQIL8KnuY9M66fdg==
|
||||
dependencies:
|
||||
chalk "^4.0.0"
|
||||
commander "^5.1.0"
|
||||
debug "^4.1.1"
|
||||
|
||||
find-up@^4.0.0, find-up@^4.1.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz"
|
||||
@@ -8925,7 +8771,7 @@ flubber@^0.4.2:
|
||||
svgpath "^2.2.1"
|
||||
topojson-client "^3.0.0"
|
||||
|
||||
follow-redirects@^1.0.0, follow-redirects@^1.14.0, follow-redirects@^1.15.6:
|
||||
follow-redirects@^1.0.0, follow-redirects@^1.15.6:
|
||||
version "1.15.6"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b"
|
||||
integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==
|
||||
@@ -8962,14 +8808,6 @@ force-graph@1:
|
||||
kapsule "^1.14"
|
||||
lodash-es "4"
|
||||
|
||||
foreground-child@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz"
|
||||
integrity sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==
|
||||
dependencies:
|
||||
cross-spawn "^7.0.0"
|
||||
signal-exit "^3.0.2"
|
||||
|
||||
form-data@^3.0.0:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz"
|
||||
@@ -9008,16 +8846,11 @@ fresh@0.5.2:
|
||||
resolved "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz"
|
||||
integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==
|
||||
|
||||
fromentries@^1.2.0, fromentries@^1.3.2:
|
||||
fromentries@^1.3.2:
|
||||
version "1.3.2"
|
||||
resolved "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz"
|
||||
integrity sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==
|
||||
|
||||
fs-exists-sync@^0.1.0:
|
||||
version "0.1.0"
|
||||
resolved "https://registry.npmjs.org/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz"
|
||||
integrity sha512-cR/vflFyPZtrN6b38ZyWxpWdhlXrzZEBawlpBQMq7033xVY7/kg0GDMBK5jg8lDYQckdJ5x/YC88lM3C7VMsLg==
|
||||
|
||||
fs-extra@^10.0.0:
|
||||
version "10.1.0"
|
||||
resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz"
|
||||
@@ -9104,7 +8937,7 @@ geotiff@^2.0.7:
|
||||
web-worker "^1.2.0"
|
||||
xml-utils "^1.0.2"
|
||||
|
||||
get-caller-file@^2.0.1, get-caller-file@^2.0.5:
|
||||
get-caller-file@^2.0.5:
|
||||
version "2.0.5"
|
||||
resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz"
|
||||
integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
|
||||
@@ -9208,24 +9041,6 @@ global-dirs@^0.1.1:
|
||||
dependencies:
|
||||
ini "^1.3.4"
|
||||
|
||||
global-modules@^0.2.3:
|
||||
version "0.2.3"
|
||||
resolved "https://registry.npmjs.org/global-modules/-/global-modules-0.2.3.tgz"
|
||||
integrity sha512-JeXuCbvYzYXcwE6acL9V2bAOeSIGl4dD+iwLY9iUx2VBJJ80R18HCn+JCwHM9Oegdfya3lEkGCdaRkSyc10hDA==
|
||||
dependencies:
|
||||
global-prefix "^0.1.4"
|
||||
is-windows "^0.2.0"
|
||||
|
||||
global-prefix@^0.1.4:
|
||||
version "0.1.5"
|
||||
resolved "https://registry.npmjs.org/global-prefix/-/global-prefix-0.1.5.tgz"
|
||||
integrity sha512-gOPiyxcD9dJGCEArAhF4Hd0BAqvAe/JzERP7tYumE4yIkmIedPUVXcJFWbV3/p/ovIIvKjkrTk+f1UVkq7vvbw==
|
||||
dependencies:
|
||||
homedir-polyfill "^1.0.0"
|
||||
ini "^1.3.4"
|
||||
is-windows "^0.2.0"
|
||||
which "^1.2.12"
|
||||
|
||||
global@^4.3.0, global@~4.4.0:
|
||||
version "4.4.0"
|
||||
resolved "https://registry.npmjs.org/global/-/global-4.4.0.tgz"
|
||||
@@ -9272,7 +9087,7 @@ gopd@^1.0.1:
|
||||
dependencies:
|
||||
get-intrinsic "^1.1.3"
|
||||
|
||||
graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9:
|
||||
graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9:
|
||||
version "4.2.11"
|
||||
resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz"
|
||||
integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
|
||||
@@ -9345,14 +9160,6 @@ has@^1.0.3:
|
||||
dependencies:
|
||||
function-bind "^1.1.1"
|
||||
|
||||
hasha@^5.0.0:
|
||||
version "5.2.2"
|
||||
resolved "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz"
|
||||
integrity sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==
|
||||
dependencies:
|
||||
is-stream "^2.0.0"
|
||||
type-fest "^0.8.0"
|
||||
|
||||
hasown@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c"
|
||||
@@ -9633,13 +9440,6 @@ hoist-non-react-statics@^3.0.0, hoist-non-react-statics@^3.1.0, hoist-non-react-
|
||||
dependencies:
|
||||
react-is "^16.7.0"
|
||||
|
||||
homedir-polyfill@^1.0.0:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz"
|
||||
integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==
|
||||
dependencies:
|
||||
parse-passwd "^1.0.0"
|
||||
|
||||
hosted-git-info@^2.1.4:
|
||||
version "2.8.9"
|
||||
resolved "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz"
|
||||
@@ -10376,16 +10176,6 @@ is-what@^3.14.1:
|
||||
resolved "https://registry.npmjs.org/is-what/-/is-what-3.14.1.tgz"
|
||||
integrity sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==
|
||||
|
||||
is-windows@^0.2.0:
|
||||
version "0.2.0"
|
||||
resolved "https://registry.npmjs.org/is-windows/-/is-windows-0.2.0.tgz"
|
||||
integrity sha512-n67eJYmXbniZB7RF4I/FTjK1s6RPOCTxhYrVYLRaCt3lF0mpWZPKr3T2LSZAqyjQsxR2qMmGYXXzK0YWwcPM1Q==
|
||||
|
||||
is-windows@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz"
|
||||
integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
|
||||
|
||||
is-wsl@^2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271"
|
||||
@@ -10423,23 +10213,6 @@ istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0:
|
||||
resolved "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz"
|
||||
integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==
|
||||
|
||||
istanbul-lib-hook@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz"
|
||||
integrity sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==
|
||||
dependencies:
|
||||
append-transform "^2.0.0"
|
||||
|
||||
istanbul-lib-instrument@^4.0.0:
|
||||
version "4.0.3"
|
||||
resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz"
|
||||
integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==
|
||||
dependencies:
|
||||
"@babel/core" "^7.7.5"
|
||||
"@istanbuljs/schema" "^0.1.2"
|
||||
istanbul-lib-coverage "^3.0.0"
|
||||
semver "^6.3.0"
|
||||
|
||||
istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0:
|
||||
version "5.2.1"
|
||||
resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz"
|
||||
@@ -10451,18 +10224,6 @@ istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0:
|
||||
istanbul-lib-coverage "^3.2.0"
|
||||
semver "^6.3.0"
|
||||
|
||||
istanbul-lib-processinfo@^2.0.2:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz"
|
||||
integrity sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==
|
||||
dependencies:
|
||||
archy "^1.0.0"
|
||||
cross-spawn "^7.0.3"
|
||||
istanbul-lib-coverage "^3.2.0"
|
||||
p-map "^3.0.0"
|
||||
rimraf "^3.0.0"
|
||||
uuid "^8.3.2"
|
||||
|
||||
istanbul-lib-report@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz"
|
||||
@@ -10481,7 +10242,7 @@ istanbul-lib-source-maps@^4.0.0:
|
||||
istanbul-lib-coverage "^3.0.0"
|
||||
source-map "^0.6.1"
|
||||
|
||||
istanbul-reports@^3.0.2, istanbul-reports@^3.1.3:
|
||||
istanbul-reports@^3.1.3:
|
||||
version "3.1.5"
|
||||
resolved "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz"
|
||||
integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==
|
||||
@@ -10777,39 +10538,11 @@ jest-mock@^27.5.1:
|
||||
"@jest/types" "^27.5.1"
|
||||
"@types/node" "*"
|
||||
|
||||
jest-playwright-preset@^1.7.2:
|
||||
version "1.7.2"
|
||||
resolved "https://registry.yarnpkg.com/jest-playwright-preset/-/jest-playwright-preset-1.7.2.tgz#708942c4dcc1edc85429079d2b47a9382298c454"
|
||||
integrity sha512-0M7M3z342bdKQLnS70cIptlJsW+uuGptbPnqIMg4K5Vp/L/DhqdTKZK7WM4n6miAUnZdUcjXKOdQWfZW/aBo7w==
|
||||
dependencies:
|
||||
expect-playwright "^0.8.0"
|
||||
jest-process-manager "^0.3.1"
|
||||
nyc "^15.1.0"
|
||||
playwright-core ">=1.2.0"
|
||||
rimraf "^3.0.2"
|
||||
uuid "^8.3.2"
|
||||
|
||||
jest-pnp-resolver@^1.2.2:
|
||||
version "1.2.3"
|
||||
resolved "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz"
|
||||
integrity sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==
|
||||
|
||||
jest-process-manager@^0.3.1:
|
||||
version "0.3.1"
|
||||
resolved "https://registry.npmjs.org/jest-process-manager/-/jest-process-manager-0.3.1.tgz"
|
||||
integrity sha512-x9W54UgZ7IkzUHgXtnI1x4GKOVjxtwW0CA/7yGbTHtT/YhENO0Lic2yfVyC/gekn7OIEMcQmy0L1r9WLQABfqw==
|
||||
dependencies:
|
||||
"@types/wait-on" "^5.2.0"
|
||||
chalk "^4.1.0"
|
||||
cwd "^0.10.0"
|
||||
exit "^0.1.2"
|
||||
find-process "^1.4.4"
|
||||
prompts "^2.4.1"
|
||||
signal-exit "^3.0.3"
|
||||
spawnd "^5.0.0"
|
||||
tree-kill "^1.2.2"
|
||||
wait-on "^5.3.0"
|
||||
|
||||
jest-regex-util@^27.5.1:
|
||||
version "27.5.1"
|
||||
resolved "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.5.1.tgz"
|
||||
@@ -11037,17 +10770,6 @@ jju@~1.4.0:
|
||||
resolved "https://registry.yarnpkg.com/jju/-/jju-1.4.0.tgz#a3abe2718af241a2b2904f84a625970f389ae32a"
|
||||
integrity sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==
|
||||
|
||||
joi@^17.3.0:
|
||||
version "17.9.2"
|
||||
resolved "https://registry.npmjs.org/joi/-/joi-17.9.2.tgz"
|
||||
integrity sha512-Itk/r+V4Dx0V3c7RLFdRh12IOjySm2/WGPMubBT92cQvRfYZhPM2W0hZlctjj72iES8jsRCwp7S/cRmWBnJ4nw==
|
||||
dependencies:
|
||||
"@hapi/hoek" "^9.0.0"
|
||||
"@hapi/topo" "^5.0.0"
|
||||
"@sideway/address" "^4.1.3"
|
||||
"@sideway/formula" "^3.0.1"
|
||||
"@sideway/pinpoint" "^2.0.0"
|
||||
|
||||
js-base64@^3.7.2:
|
||||
version "3.7.5"
|
||||
resolved "https://registry.npmjs.org/js-base64/-/js-base64-3.7.5.tgz"
|
||||
@@ -11475,11 +11197,6 @@ lodash.debounce@^4.0.8:
|
||||
resolved "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz"
|
||||
integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==
|
||||
|
||||
lodash.flattendeep@^4.4.0:
|
||||
version "4.4.0"
|
||||
resolved "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz"
|
||||
integrity sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==
|
||||
|
||||
lodash.get@^4.4.2:
|
||||
version "4.4.2"
|
||||
resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99"
|
||||
@@ -11614,7 +11331,7 @@ make-dir@^2.1.0:
|
||||
pify "^4.0.1"
|
||||
semver "^5.6.0"
|
||||
|
||||
make-dir@^3.0.0, make-dir@^3.0.2:
|
||||
make-dir@^3.0.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz"
|
||||
integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==
|
||||
@@ -12456,7 +12173,7 @@ minimist-options@4.1.0:
|
||||
is-plain-obj "^1.1.0"
|
||||
kind-of "^6.0.3"
|
||||
|
||||
minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6:
|
||||
minimist@^1.2.0, minimist@^1.2.6:
|
||||
version "1.2.8"
|
||||
resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz"
|
||||
integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==
|
||||
@@ -12691,13 +12408,6 @@ node-int64@^0.4.0:
|
||||
resolved "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz"
|
||||
integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==
|
||||
|
||||
node-preload@^0.2.1:
|
||||
version "0.2.1"
|
||||
resolved "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz"
|
||||
integrity sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==
|
||||
dependencies:
|
||||
process-on-spawn "^1.0.0"
|
||||
|
||||
node-releases@^2.0.13:
|
||||
version "2.0.13"
|
||||
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.13.tgz#d5ed1627c23e3461e819b02e57b75e4899b1c81d"
|
||||
@@ -12787,39 +12497,6 @@ nwsapi@^2.2.0:
|
||||
resolved "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.4.tgz"
|
||||
integrity sha512-NHj4rzRo0tQdijE9ZqAx6kYDcoRwYwSYzCA8MY3JzfxlrvEU0jhnhJT9BhqhJs7I/dKcrDm6TyulaRqZPIhN5g==
|
||||
|
||||
nyc@^15.1.0:
|
||||
version "15.1.0"
|
||||
resolved "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz"
|
||||
integrity sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==
|
||||
dependencies:
|
||||
"@istanbuljs/load-nyc-config" "^1.0.0"
|
||||
"@istanbuljs/schema" "^0.1.2"
|
||||
caching-transform "^4.0.0"
|
||||
convert-source-map "^1.7.0"
|
||||
decamelize "^1.2.0"
|
||||
find-cache-dir "^3.2.0"
|
||||
find-up "^4.1.0"
|
||||
foreground-child "^2.0.0"
|
||||
get-package-type "^0.1.0"
|
||||
glob "^7.1.6"
|
||||
istanbul-lib-coverage "^3.0.0"
|
||||
istanbul-lib-hook "^3.0.0"
|
||||
istanbul-lib-instrument "^4.0.0"
|
||||
istanbul-lib-processinfo "^2.0.2"
|
||||
istanbul-lib-report "^3.0.0"
|
||||
istanbul-lib-source-maps "^4.0.0"
|
||||
istanbul-reports "^3.0.2"
|
||||
make-dir "^3.0.0"
|
||||
node-preload "^0.2.1"
|
||||
p-map "^3.0.0"
|
||||
process-on-spawn "^1.0.0"
|
||||
resolve-from "^5.0.0"
|
||||
rimraf "^3.0.0"
|
||||
signal-exit "^3.0.2"
|
||||
spawn-wrap "^2.0.0"
|
||||
test-exclude "^6.0.0"
|
||||
yargs "^15.0.2"
|
||||
|
||||
object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz"
|
||||
@@ -13013,11 +12690,6 @@ ora@^5.4.1:
|
||||
strip-ansi "^6.0.0"
|
||||
wcwidth "^1.0.1"
|
||||
|
||||
os-homedir@^1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz"
|
||||
integrity sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==
|
||||
|
||||
os-tmpdir@~1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
|
||||
@@ -13080,13 +12752,6 @@ p-locate@^6.0.0:
|
||||
dependencies:
|
||||
p-limit "^4.0.0"
|
||||
|
||||
p-map@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz"
|
||||
integrity sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==
|
||||
dependencies:
|
||||
aggregate-error "^3.0.0"
|
||||
|
||||
p-map@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz"
|
||||
@@ -13107,16 +12772,6 @@ p-try@^2.0.0:
|
||||
resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz"
|
||||
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
|
||||
|
||||
package-hash@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz"
|
||||
integrity sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==
|
||||
dependencies:
|
||||
graceful-fs "^4.1.15"
|
||||
hasha "^5.0.0"
|
||||
lodash.flattendeep "^4.4.0"
|
||||
release-zalgo "^1.0.0"
|
||||
|
||||
pako@^2.0.4:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz"
|
||||
@@ -13229,11 +12884,6 @@ parse-numeric-range@^1.3.0:
|
||||
resolved "https://registry.yarnpkg.com/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz#7c63b61190d61e4d53a1197f0c83c47bb670ffa3"
|
||||
integrity sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==
|
||||
|
||||
parse-passwd@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz"
|
||||
integrity sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q==
|
||||
|
||||
parse5-htmlparser2-tree-adapter@^6.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-6.0.1.tgz"
|
||||
@@ -13410,7 +13060,7 @@ pirates@^4.0.4:
|
||||
resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz"
|
||||
integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==
|
||||
|
||||
pkg-dir@^4.1.0, pkg-dir@^4.2.0:
|
||||
pkg-dir@^4.2.0:
|
||||
version "4.2.0"
|
||||
resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz"
|
||||
integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==
|
||||
@@ -13424,7 +13074,7 @@ pkg-dir@^7.0.0:
|
||||
dependencies:
|
||||
find-up "^6.3.0"
|
||||
|
||||
playwright-core@1.33.0, playwright-core@>=1.2.0:
|
||||
playwright-core@1.33.0:
|
||||
version "1.33.0"
|
||||
resolved "https://registry.npmjs.org/playwright-core/-/playwright-core-1.33.0.tgz"
|
||||
integrity sha512-aizyPE1Cj62vAECdph1iaMILpT0WUDCq3E6rW6I+dleSbBoGbktvJtzS6VHkZ4DKNEOG9qJpiom/ZxO+S15LAw==
|
||||
@@ -13807,13 +13457,6 @@ process-nextick-args@~2.0.0:
|
||||
resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz"
|
||||
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
|
||||
|
||||
process-on-spawn@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz"
|
||||
integrity sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==
|
||||
dependencies:
|
||||
fromentries "^1.2.0"
|
||||
|
||||
process@^0.11.10:
|
||||
version "0.11.10"
|
||||
resolved "https://registry.npmjs.org/process/-/process-0.11.10.tgz"
|
||||
@@ -13829,7 +13472,7 @@ promise-polyfill@^3.1.0:
|
||||
resolved "https://registry.npmjs.org/promise-polyfill/-/promise-polyfill-3.1.0.tgz"
|
||||
integrity sha512-t20OwHJ4ZOUj5fV+qms67oczphAVkRC6Rrjcrne+V1FJkQMym7n69xJmYyXHulm9OUQ0Ie5KSzg0QhOYgaxy+w==
|
||||
|
||||
prompts@^2.0.1, prompts@^2.4.1:
|
||||
prompts@^2.0.1:
|
||||
version "2.4.2"
|
||||
resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz"
|
||||
integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==
|
||||
@@ -15010,13 +14653,6 @@ relateurl@^0.2.7:
|
||||
resolved "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz"
|
||||
integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==
|
||||
|
||||
release-zalgo@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz"
|
||||
integrity sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==
|
||||
dependencies:
|
||||
es6-error "^4.0.1"
|
||||
|
||||
remark-gfm@~3.0.1:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/remark-gfm/-/remark-gfm-3.0.1.tgz#0b180f095e3036545e9dddac0e8df3fa5cfee54f"
|
||||
@@ -15080,11 +14716,6 @@ require-from-string@^2.0.2:
|
||||
resolved "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz"
|
||||
integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==
|
||||
|
||||
require-main-filename@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz"
|
||||
integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
|
||||
|
||||
requires-port@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz"
|
||||
@@ -15107,14 +14738,6 @@ resolve-cwd@^3.0.0:
|
||||
dependencies:
|
||||
resolve-from "^5.0.0"
|
||||
|
||||
resolve-dir@^0.1.0:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.npmjs.org/resolve-dir/-/resolve-dir-0.1.1.tgz"
|
||||
integrity sha512-QxMPqI6le2u0dCLyiGzgy92kjkkL6zO0XyvHzjdTNH3zM6e5Hz3BwG6+aEyNgiQ5Xz6PwTwgQEj3U50dByPKIA==
|
||||
dependencies:
|
||||
expand-tilde "^1.2.2"
|
||||
global-modules "^0.2.3"
|
||||
|
||||
resolve-from@5.0.0, resolve-from@^5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz"
|
||||
@@ -15250,13 +14873,6 @@ rxjs@7.8.0:
|
||||
dependencies:
|
||||
tslib "^2.1.0"
|
||||
|
||||
rxjs@^6.6.3:
|
||||
version "6.6.7"
|
||||
resolved "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz"
|
||||
integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==
|
||||
dependencies:
|
||||
tslib "^1.9.0"
|
||||
|
||||
rxjs@^7.5.5:
|
||||
version "7.8.1"
|
||||
resolved "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz"
|
||||
@@ -15483,11 +15099,6 @@ serve-static@1.15.0:
|
||||
parseurl "~1.3.3"
|
||||
send "0.18.0"
|
||||
|
||||
set-blocking@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz"
|
||||
integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==
|
||||
|
||||
set-cookie-parser@^2.4.6:
|
||||
version "2.6.0"
|
||||
resolved "https://registry.yarnpkg.com/set-cookie-parser/-/set-cookie-parser-2.6.0.tgz#131921e50f62ff1a66a461d7d62d7b21d5d15a51"
|
||||
@@ -15722,28 +15333,6 @@ space-separated-tokens@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz#1ecd9d2350a3844572c3f4a312bceb018348859f"
|
||||
integrity sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==
|
||||
|
||||
spawn-wrap@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz"
|
||||
integrity sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==
|
||||
dependencies:
|
||||
foreground-child "^2.0.0"
|
||||
is-windows "^1.0.2"
|
||||
make-dir "^3.0.0"
|
||||
rimraf "^3.0.0"
|
||||
signal-exit "^3.0.2"
|
||||
which "^2.0.1"
|
||||
|
||||
spawnd@^5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.npmjs.org/spawnd/-/spawnd-5.0.0.tgz"
|
||||
integrity sha512-28+AJr82moMVWolQvlAIv3JcYDkjkFTEmfDc503wxrF5l2rQ3dFz6DpbXp3kD4zmgGGldfM4xM4v1sFj/ZaIOA==
|
||||
dependencies:
|
||||
exit "^0.1.2"
|
||||
signal-exit "^3.0.3"
|
||||
tree-kill "^1.2.2"
|
||||
wait-port "^0.2.9"
|
||||
|
||||
spdx-correct@^3.0.0:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz"
|
||||
@@ -16450,11 +16039,6 @@ tr46@~0.0.3:
|
||||
resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz"
|
||||
integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==
|
||||
|
||||
tree-kill@^1.2.2:
|
||||
version "1.2.2"
|
||||
resolved "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz"
|
||||
integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==
|
||||
|
||||
trim-lines@^3.0.0:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/trim-lines/-/trim-lines-3.0.1.tgz#d802e332a07df861c48802c04321017b1bd87338"
|
||||
@@ -16541,7 +16125,7 @@ tslib@2.5.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0:
|
||||
resolved "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz"
|
||||
integrity sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==
|
||||
|
||||
tslib@^1.8.1, tslib@^1.9.0:
|
||||
tslib@^1.8.1:
|
||||
version "1.14.1"
|
||||
resolved "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz"
|
||||
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
|
||||
@@ -16614,7 +16198,7 @@ type-fest@^0.6.0:
|
||||
resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz"
|
||||
integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==
|
||||
|
||||
type-fest@^0.8.0, type-fest@^0.8.1:
|
||||
type-fest@^0.8.1:
|
||||
version "0.8.1"
|
||||
resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz"
|
||||
integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
|
||||
@@ -17161,26 +16745,6 @@ w3c-xmlserializer@^2.0.0:
|
||||
dependencies:
|
||||
xml-name-validator "^3.0.0"
|
||||
|
||||
wait-on@^5.3.0:
|
||||
version "5.3.0"
|
||||
resolved "https://registry.npmjs.org/wait-on/-/wait-on-5.3.0.tgz"
|
||||
integrity sha512-DwrHrnTK+/0QFaB9a8Ol5Lna3k7WvUR4jzSKmz0YaPBpuN2sACyiPVKVfj6ejnjcajAcvn3wlbTyMIn9AZouOg==
|
||||
dependencies:
|
||||
axios "^0.21.1"
|
||||
joi "^17.3.0"
|
||||
lodash "^4.17.21"
|
||||
minimist "^1.2.5"
|
||||
rxjs "^6.6.3"
|
||||
|
||||
wait-port@^0.2.9:
|
||||
version "0.2.14"
|
||||
resolved "https://registry.npmjs.org/wait-port/-/wait-port-0.2.14.tgz"
|
||||
integrity sha512-kIzjWcr6ykl7WFbZd0TMae8xovwqcqbx6FM9l+7agOgUByhzdjfzZBPK2CPufldTOMxbUivss//Sh9MFawmPRQ==
|
||||
dependencies:
|
||||
chalk "^2.4.2"
|
||||
commander "^3.0.2"
|
||||
debug "^4.1.1"
|
||||
|
||||
walker@^1.0.7, walker@^1.0.8:
|
||||
version "1.0.8"
|
||||
resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f"
|
||||
@@ -17549,11 +17113,6 @@ which-collection@^1.0.1:
|
||||
is-weakmap "^2.0.1"
|
||||
is-weakset "^2.0.1"
|
||||
|
||||
which-module@^2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz"
|
||||
integrity sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==
|
||||
|
||||
which-typed-array@^1.1.10, which-typed-array@^1.1.11, which-typed-array@^1.1.2:
|
||||
version "1.1.11"
|
||||
resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.11.tgz#99d691f23c72aab6768680805a271b69761ed61a"
|
||||
@@ -17577,7 +17136,7 @@ which-typed-array@^1.1.9:
|
||||
has-tostringtag "^1.0.0"
|
||||
is-typed-array "^1.1.10"
|
||||
|
||||
which@^1.2.12, which@^1.2.9:
|
||||
which@^1.2.9:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
|
||||
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
|
||||
@@ -17731,11 +17290,6 @@ xtend@^4.0.0:
|
||||
resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz"
|
||||
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
|
||||
|
||||
y18n@^4.0.0:
|
||||
version "4.0.3"
|
||||
resolved "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz"
|
||||
integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==
|
||||
|
||||
y18n@^5.0.5:
|
||||
version "5.0.8"
|
||||
resolved "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz"
|
||||
@@ -17761,36 +17315,11 @@ yargs-parser@20.x, yargs-parser@^20.2.2, yargs-parser@^20.2.3:
|
||||
resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz"
|
||||
integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==
|
||||
|
||||
yargs-parser@^18.1.2:
|
||||
version "18.1.3"
|
||||
resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz"
|
||||
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
|
||||
dependencies:
|
||||
camelcase "^5.0.0"
|
||||
decamelize "^1.2.0"
|
||||
|
||||
yargs-parser@^21.1.1:
|
||||
version "21.1.1"
|
||||
resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz"
|
||||
integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==
|
||||
|
||||
yargs@^15.0.2:
|
||||
version "15.4.1"
|
||||
resolved "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz"
|
||||
integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==
|
||||
dependencies:
|
||||
cliui "^6.0.0"
|
||||
decamelize "^1.2.0"
|
||||
find-up "^4.1.0"
|
||||
get-caller-file "^2.0.1"
|
||||
require-directory "^2.1.1"
|
||||
require-main-filename "^2.0.0"
|
||||
set-blocking "^2.0.0"
|
||||
string-width "^4.2.0"
|
||||
which-module "^2.0.0"
|
||||
y18n "^4.0.0"
|
||||
yargs-parser "^18.1.2"
|
||||
|
||||
yargs@^16.2.0:
|
||||
version "16.2.0"
|
||||
resolved "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz"
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
package clickhouseReader
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"os"
|
||||
"reflect"
|
||||
"regexp"
|
||||
@@ -136,8 +133,9 @@ type ClickHouseReader struct {
|
||||
liveTailRefreshSeconds int
|
||||
cluster string
|
||||
|
||||
useLogsNewSchema bool
|
||||
logsTableName string
|
||||
useLogsNewSchema bool
|
||||
logsTableName string
|
||||
logsLocalTableName string
|
||||
}
|
||||
|
||||
// NewTraceReader returns a TraceReader for the database
|
||||
@@ -172,7 +170,7 @@ func NewReaderFromClickhouseConnection(
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
) *ClickHouseReader {
|
||||
alertManager, err := am.New("")
|
||||
alertManager, err := am.New()
|
||||
if err != nil {
|
||||
zap.L().Error("failed to initialize alert manager", zap.Error(err))
|
||||
zap.L().Error("check if the alert manager URL is correctly set and valid")
|
||||
@@ -202,8 +200,10 @@ func NewReaderFromClickhouseConnection(
|
||||
}
|
||||
|
||||
logsTableName := options.primary.LogsTable
|
||||
logsLocalTableName := options.primary.LogsLocalTable
|
||||
if useLogsNewSchema {
|
||||
logsTableName = options.primary.LogsTableV2
|
||||
logsLocalTableName = options.primary.LogsLocalTableV2
|
||||
}
|
||||
|
||||
return &ClickHouseReader{
|
||||
@@ -240,6 +240,7 @@ func NewReaderFromClickhouseConnection(
|
||||
logsResourceTableV2: options.primary.LogsResourceTableV2,
|
||||
logsResourceLocalTableV2: options.primary.LogsResourceLocalTableV2,
|
||||
logsTableName: logsTableName,
|
||||
logsLocalTableName: logsLocalTableName,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -410,267 +411,6 @@ func (r *ClickHouseReader) GetConn() clickhouse.Conn {
|
||||
return r.db
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) LoadChannel(channel *model.ChannelItem) *model.ApiError {
|
||||
|
||||
receiver := &am.Receiver{}
|
||||
if err := json.Unmarshal([]byte(channel.Data), receiver); err != nil { // Parse []byte to go struct pointer
|
||||
return &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
}
|
||||
|
||||
response, err := http.Post(constants.GetAlertManagerApiPrefix()+"v1/receivers", "application/json", bytes.NewBuffer([]byte(channel.Data)))
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in getting response of API call to alertmanager/v1/receivers", zap.Error(err))
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
if response.StatusCode > 299 {
|
||||
responseData, _ := io.ReadAll(response.Body)
|
||||
|
||||
err := fmt.Errorf("error in getting 2xx response in API call to alertmanager/v1/receivers")
|
||||
zap.L().Error("Error in getting 2xx response in API call to alertmanager/v1/receivers", zap.String("Status", response.Status), zap.String("Data", string(responseData)))
|
||||
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetChannel(id string) (*model.ChannelItem, *model.ApiError) {
|
||||
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
channel := model.ChannelItem{}
|
||||
|
||||
query := "SELECT id, created_at, updated_at, name, type, data data FROM notification_channels WHERE id=? "
|
||||
|
||||
stmt, err := r.localDB.Preparex(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in preparing sql query for GetChannel", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
err = stmt.Get(&channel, idInt)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in getting channel with id", zap.Int("id", idInt), zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return &channel, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) DeleteChannel(id string) *model.ApiError {
|
||||
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
|
||||
channelToDelete, apiErrorObj := r.GetChannel(id)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
return apiErrorObj
|
||||
}
|
||||
|
||||
tx, err := r.localDB.Begin()
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
{
|
||||
stmt, err := tx.Prepare(`DELETE FROM notification_channels WHERE id=$1;`)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in preparing statement for INSERT to notification_channels", zap.Error(err))
|
||||
tx.Rollback()
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
if _, err := stmt.Exec(idInt); err != nil {
|
||||
zap.L().Error("Error in Executing prepared statement for INSERT to notification_channels", zap.Error(err))
|
||||
tx.Rollback() // return an error too, we may want to wrap them
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
}
|
||||
|
||||
apiError := r.alertManager.DeleteRoute(channelToDelete.Name)
|
||||
if apiError != nil {
|
||||
tx.Rollback()
|
||||
return apiError
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
zap.L().Error("Error in committing transaction for DELETE command to notification_channels", zap.Error(err))
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetChannels() (*[]model.ChannelItem, *model.ApiError) {
|
||||
|
||||
channels := []model.ChannelItem{}
|
||||
|
||||
query := "SELECT id, created_at, updated_at, name, type, data data FROM notification_channels"
|
||||
|
||||
err := r.localDB.Select(&channels, query)
|
||||
|
||||
zap.L().Info(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return &channels, nil
|
||||
|
||||
}
|
||||
|
||||
func getChannelType(receiver *am.Receiver) string {
|
||||
|
||||
if receiver.EmailConfigs != nil {
|
||||
return "email"
|
||||
}
|
||||
if receiver.OpsGenieConfigs != nil {
|
||||
return "opsgenie"
|
||||
}
|
||||
if receiver.PagerdutyConfigs != nil {
|
||||
return "pagerduty"
|
||||
}
|
||||
if receiver.PushoverConfigs != nil {
|
||||
return "pushover"
|
||||
}
|
||||
if receiver.SNSConfigs != nil {
|
||||
return "sns"
|
||||
}
|
||||
if receiver.SlackConfigs != nil {
|
||||
return "slack"
|
||||
}
|
||||
if receiver.VictorOpsConfigs != nil {
|
||||
return "victorops"
|
||||
}
|
||||
if receiver.WebhookConfigs != nil {
|
||||
return "webhook"
|
||||
}
|
||||
if receiver.WechatConfigs != nil {
|
||||
return "wechat"
|
||||
}
|
||||
if receiver.MSTeamsConfigs != nil {
|
||||
return "msteams"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) EditChannel(receiver *am.Receiver, id string) (*am.Receiver, *model.ApiError) {
|
||||
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
|
||||
channel, apiErrObj := r.GetChannel(id)
|
||||
|
||||
if apiErrObj != nil {
|
||||
return nil, apiErrObj
|
||||
}
|
||||
if channel.Name != receiver.Name {
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("channel name cannot be changed")}
|
||||
}
|
||||
|
||||
tx, err := r.localDB.Begin()
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
channel_type := getChannelType(receiver)
|
||||
|
||||
// check if channel type is supported in the current user plan
|
||||
if err := r.featureFlags.CheckFeature(fmt.Sprintf("ALERT_CHANNEL_%s", strings.ToUpper(channel_type))); err != nil {
|
||||
zap.L().Warn("an unsupported feature was blocked", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("unsupported feature. please upgrade your plan to access this feature")}
|
||||
}
|
||||
|
||||
receiverString, _ := json.Marshal(receiver)
|
||||
|
||||
{
|
||||
stmt, err := tx.Prepare(`UPDATE notification_channels SET updated_at=$1, type=$2, data=$3 WHERE id=$4;`)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in preparing statement for UPDATE to notification_channels", zap.Error(err))
|
||||
tx.Rollback()
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
if _, err := stmt.Exec(time.Now(), channel_type, string(receiverString), idInt); err != nil {
|
||||
zap.L().Error("Error in Executing prepared statement for UPDATE to notification_channels", zap.Error(err))
|
||||
tx.Rollback() // return an error too, we may want to wrap them
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
}
|
||||
|
||||
apiError := r.alertManager.EditRoute(receiver)
|
||||
if apiError != nil {
|
||||
tx.Rollback()
|
||||
return nil, apiError
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
zap.L().Error("Error in committing transaction for INSERT to notification_channels", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return receiver, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) CreateChannel(receiver *am.Receiver) (*am.Receiver, *model.ApiError) {
|
||||
|
||||
channel_type := getChannelType(receiver)
|
||||
|
||||
// check if channel type is supported in the current user plan
|
||||
if err := r.featureFlags.CheckFeature(fmt.Sprintf("ALERT_CHANNEL_%s", strings.ToUpper(channel_type))); err != nil {
|
||||
zap.L().Warn("an unsupported feature was blocked", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("unsupported feature. please upgrade your plan to access this feature")}
|
||||
}
|
||||
|
||||
receiverString, _ := json.Marshal(receiver)
|
||||
|
||||
tx, err := r.localDB.Begin()
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
{
|
||||
stmt, err := tx.Prepare(`INSERT INTO notification_channels (created_at, updated_at, name, type, data) VALUES($1,$2,$3,$4,$5);`)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in preparing statement for INSERT to notification_channels", zap.Error(err))
|
||||
tx.Rollback()
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
if _, err := stmt.Exec(time.Now(), time.Now(), receiver.Name, channel_type, string(receiverString)); err != nil {
|
||||
zap.L().Error("Error in Executing prepared statement for INSERT to notification_channels", zap.Error(err))
|
||||
tx.Rollback() // return an error too, we may want to wrap them
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
}
|
||||
|
||||
apiError := r.alertManager.AddRoute(receiver)
|
||||
if apiError != nil {
|
||||
tx.Rollback()
|
||||
return nil, apiError
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
zap.L().Error("Error in committing transaction for INSERT to notification_channels", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return receiver, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetInstantQueryMetricsResult(ctx context.Context, queryParams *model.InstantQueryMetricsParams) (*promql.Result, *stats.QueryStats, *model.ApiError) {
|
||||
qry, err := r.queryEngine.NewInstantQuery(ctx, r.remoteStorage, nil, queryParams.Query, queryParams.Time)
|
||||
if err != nil {
|
||||
@@ -988,7 +728,7 @@ func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *
|
||||
return &serviceOverviewItems, nil
|
||||
}
|
||||
|
||||
func buildFilterArrayQuery(ctx context.Context, excludeMap map[string]struct{}, params []string, filter string, query *string, args []interface{}) []interface{} {
|
||||
func buildFilterArrayQuery(_ context.Context, excludeMap map[string]struct{}, params []string, filter string, query *string, args []interface{}) []interface{} {
|
||||
for i, e := range params {
|
||||
filterKey := filter + String(5)
|
||||
if i == 0 && i == len(params)-1 {
|
||||
@@ -1497,7 +1237,7 @@ func String(length int) string {
|
||||
return StringWithCharset(length, charset)
|
||||
}
|
||||
|
||||
func buildQueryWithTagParams(ctx context.Context, tags []model.TagQuery) (string, []interface{}, *model.ApiError) {
|
||||
func buildQueryWithTagParams(_ context.Context, tags []model.TagQuery) (string, []interface{}, *model.ApiError) {
|
||||
query := ""
|
||||
var args []interface{}
|
||||
for _, item := range tags {
|
||||
@@ -1707,7 +1447,7 @@ func (r *ClickHouseReader) GetTagFilters(ctx context.Context, queryParams *model
|
||||
return &tagFiltersResult, nil
|
||||
}
|
||||
|
||||
func excludeTags(ctx context.Context, tags []string) []string {
|
||||
func excludeTags(_ context.Context, tags []string) []string {
|
||||
excludedTagsMap := map[string]bool{
|
||||
"http.code": true,
|
||||
"http.route": true,
|
||||
@@ -2461,7 +2201,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context,
|
||||
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) deleteTtlTransactions(ctx context.Context, numberOfTransactionsStore int) {
|
||||
func (r *ClickHouseReader) deleteTtlTransactions(_ context.Context, numberOfTransactionsStore int) {
|
||||
_, err := r.localDB.Exec("DELETE FROM ttl_status WHERE transaction_id NOT IN (SELECT distinct transaction_id FROM ttl_status ORDER BY created_at DESC LIMIT ?)", numberOfTransactionsStore)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing ttl_status delete sql query", zap.Error(err))
|
||||
@@ -2469,7 +2209,7 @@ func (r *ClickHouseReader) deleteTtlTransactions(ctx context.Context, numberOfTr
|
||||
}
|
||||
|
||||
// checkTTLStatusItem checks if ttl_status table has an entry for the given table name
|
||||
func (r *ClickHouseReader) checkTTLStatusItem(ctx context.Context, tableName string) (model.TTLStatusItem, *model.ApiError) {
|
||||
func (r *ClickHouseReader) checkTTLStatusItem(_ context.Context, tableName string) (model.TTLStatusItem, *model.ApiError) {
|
||||
statusItem := []model.TTLStatusItem{}
|
||||
|
||||
query := `SELECT id, status, ttl, cold_storage_ttl FROM ttl_status WHERE table_name = ? ORDER BY created_at DESC`
|
||||
@@ -3268,23 +3008,23 @@ func (r *ClickHouseReader) GetLogFields(ctx context.Context) (*model.GetFieldsRe
|
||||
resources = removeUnderscoreDuplicateFields(resources)
|
||||
|
||||
statements := []model.ShowCreateTableStatement{}
|
||||
query = fmt.Sprintf("SHOW CREATE TABLE %s.%s", r.logsDB, r.logsTable)
|
||||
query = fmt.Sprintf("SHOW CREATE TABLE %s.%s", r.logsDB, r.logsLocalTableName)
|
||||
err = r.db.Select(ctx, &statements, query)
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Err: err, Typ: model.ErrorInternal}
|
||||
}
|
||||
|
||||
extractSelectedAndInterestingFields(statements[0].Statement, constants.Attributes, &attributes, &response)
|
||||
extractSelectedAndInterestingFields(statements[0].Statement, constants.Resources, &resources, &response)
|
||||
r.extractSelectedAndInterestingFields(statements[0].Statement, constants.Attributes, &attributes, &response)
|
||||
r.extractSelectedAndInterestingFields(statements[0].Statement, constants.Resources, &resources, &response)
|
||||
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
func extractSelectedAndInterestingFields(tableStatement string, fieldType string, fields *[]model.LogField, response *model.GetFieldsResponse) {
|
||||
func (r *ClickHouseReader) extractSelectedAndInterestingFields(tableStatement string, fieldType string, fields *[]model.LogField, response *model.GetFieldsResponse) {
|
||||
for _, field := range *fields {
|
||||
field.Type = fieldType
|
||||
// all static fields are assumed to be selected as we don't allow changing them
|
||||
if isSelectedField(tableStatement, field) {
|
||||
if isColumn(r.useLogsNewSchema, tableStatement, field.Type, field.Name, field.DataType) {
|
||||
response.Selected = append(response.Selected, field)
|
||||
} else {
|
||||
response.Interesting = append(response.Interesting, field)
|
||||
@@ -3292,13 +3032,6 @@ func extractSelectedAndInterestingFields(tableStatement string, fieldType string
|
||||
}
|
||||
}
|
||||
|
||||
func isSelectedField(tableStatement string, field model.LogField) bool {
|
||||
// in case of attributes and resources, if there is a materialized column present then it is selected
|
||||
// TODO: handle partial change complete eg:- index is removed but materialized column is still present
|
||||
name := utils.GetClickhouseColumnName(field.Type, field.DataType, field.Name)
|
||||
return strings.Contains(tableStatement, name)
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) UpdateLogFieldV2(ctx context.Context, field *model.UpdateField) *model.ApiError {
|
||||
if !field.Selected {
|
||||
return model.ForbiddenError(errors.New("removing a selected field is not allowed, please reach out to support."))
|
||||
@@ -3974,7 +3707,8 @@ func isColumn(useLogsNewSchema bool, tableStatement, attrType, field, datType st
|
||||
// value of attrType will be `resource` or `tag`, if `tag` change it to `attribute`
|
||||
var name string
|
||||
if useLogsNewSchema {
|
||||
name = utils.GetClickhouseColumnNameV2(attrType, datType, field)
|
||||
// adding explict '`'
|
||||
name = fmt.Sprintf("`%s`", utils.GetClickhouseColumnNameV2(attrType, datType, field))
|
||||
} else {
|
||||
name = utils.GetClickhouseColumnName(attrType, datType, field)
|
||||
}
|
||||
@@ -4033,7 +3767,7 @@ func (r *ClickHouseReader) GetLogAggregateAttributes(ctx context.Context, req *v
|
||||
defer rows.Close()
|
||||
|
||||
statements := []model.ShowCreateTableStatement{}
|
||||
query = fmt.Sprintf("SHOW CREATE TABLE %s.%s", r.logsDB, r.logsTable)
|
||||
query = fmt.Sprintf("SHOW CREATE TABLE %s.%s", r.logsDB, r.logsLocalTableName)
|
||||
err = r.db.Select(ctx, &statements, query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error while fetching logs schema: %s", err.Error())
|
||||
@@ -4087,7 +3821,7 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
|
||||
defer rows.Close()
|
||||
|
||||
statements := []model.ShowCreateTableStatement{}
|
||||
query = fmt.Sprintf("SHOW CREATE TABLE %s.%s", r.logsDB, r.logsTable)
|
||||
query = fmt.Sprintf("SHOW CREATE TABLE %s.%s", r.logsDB, r.logsLocalTableName)
|
||||
err = r.db.Select(ctx, &statements, query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error while fetching logs schema: %s", err.Error())
|
||||
@@ -4174,10 +3908,10 @@ func (r *ClickHouseReader) GetLogAttributeValues(ctx context.Context, req *v3.Fi
|
||||
|
||||
// prepare the query and run
|
||||
if len(req.SearchText) != 0 {
|
||||
query = fmt.Sprintf("select distinct %s from %s.%s where timestamp >= toInt64(toUnixTimestamp(now() - INTERVAL 48 HOUR)*1000000000) and %s ILIKE $1 limit $2", selectKey, r.logsDB, r.logsTable, filterValueColumnWhere)
|
||||
query = fmt.Sprintf("select distinct %s from %s.%s where timestamp >= toInt64(toUnixTimestamp(now() - INTERVAL 48 HOUR)*1000000000) and %s ILIKE $1 limit $2", selectKey, r.logsDB, r.logsLocalTableName, filterValueColumnWhere)
|
||||
rows, err = r.db.Query(ctx, query, searchText, req.Limit)
|
||||
} else {
|
||||
query = fmt.Sprintf("select distinct %s from %s.%s where timestamp >= toInt64(toUnixTimestamp(now() - INTERVAL 48 HOUR)*1000000000) limit $1", selectKey, r.logsDB, r.logsTable)
|
||||
query = fmt.Sprintf("select distinct %s from %s.%s where timestamp >= toInt64(toUnixTimestamp(now() - INTERVAL 48 HOUR)*1000000000) limit $1", selectKey, r.logsDB, r.logsLocalTableName)
|
||||
rows, err = r.db.Query(ctx, query, req.Limit)
|
||||
}
|
||||
} else if len(req.SearchText) != 0 {
|
||||
@@ -4364,41 +4098,65 @@ func (r *ClickHouseReader) GetQBFilterSuggestionsForLogs(
|
||||
func (r *ClickHouseReader) getValuesForLogAttributes(
|
||||
ctx context.Context, attributes []v3.AttributeKey, limit uint64,
|
||||
) ([][]any, *model.ApiError) {
|
||||
// query top `limit` distinct values seen for `tagKey`s of interest
|
||||
// ordered by timestamp when the value was seen
|
||||
query := fmt.Sprintf(
|
||||
`
|
||||
select tagKey, stringTagValue, int64TagValue, float64TagValue
|
||||
from (
|
||||
select
|
||||
tagKey,
|
||||
stringTagValue,
|
||||
int64TagValue,
|
||||
float64TagValue,
|
||||
row_number() over (partition by tagKey order by ts desc) as rank
|
||||
from (
|
||||
select
|
||||
tagKey,
|
||||
stringTagValue,
|
||||
int64TagValue,
|
||||
float64TagValue,
|
||||
max(timestamp) as ts
|
||||
from %s.%s
|
||||
where tagKey in $1
|
||||
group by (tagKey, stringTagValue, int64TagValue, float64TagValue)
|
||||
)
|
||||
)
|
||||
where rank <= %d
|
||||
`,
|
||||
r.logsDB, r.logsTagAttributeTable, limit,
|
||||
)
|
||||
/*
|
||||
The query used here needs to be as cheap as possible, and while uncommon, it is possible for
|
||||
a tag to have 100s of millions of values (eg: message, request_id)
|
||||
|
||||
attribNames := []string{}
|
||||
for _, attrib := range attributes {
|
||||
attribNames = append(attribNames, attrib.Key)
|
||||
Construct a query to UNION the result of querying first `limit` values for each attribute. For example:
|
||||
```
|
||||
select * from (
|
||||
(
|
||||
select tagKey, stringTagValue, int64TagValue, float64TagValue
|
||||
from signoz_logs.distributed_tag_attributes
|
||||
where tagKey = $1 and (
|
||||
stringTagValue != '' or int64TagValue is not null or float64TagValue is not null
|
||||
)
|
||||
limit 2
|
||||
) UNION DISTINCT (
|
||||
select tagKey, stringTagValue, int64TagValue, float64TagValue
|
||||
from signoz_logs.distributed_tag_attributes
|
||||
where tagKey = $2 and (
|
||||
stringTagValue != '' or int64TagValue is not null or float64TagValue is not null
|
||||
)
|
||||
limit 2
|
||||
)
|
||||
) settings max_threads=2
|
||||
```
|
||||
Since tag_attributes table uses ReplacingMergeTree, the values would be distinct and no order by
|
||||
is being used to ensure the `limit` clause minimizes the amount of data scanned.
|
||||
|
||||
This query scanned ~30k rows per attribute on fiscalnote-v2 for attributes like `message` and `time`
|
||||
that had >~110M values each
|
||||
*/
|
||||
|
||||
if len(attributes) > 10 {
|
||||
zap.L().Error(
|
||||
"log attribute values requested for too many attributes. This can lead to slow and costly queries",
|
||||
zap.Int("count", len(attributes)),
|
||||
)
|
||||
attributes = attributes[:10]
|
||||
}
|
||||
|
||||
rows, err := r.db.Query(ctx, query, attribNames)
|
||||
tagQueries := []string{}
|
||||
tagKeyQueryArgs := []any{}
|
||||
for idx, attrib := range attributes {
|
||||
tagQueries = append(tagQueries, fmt.Sprintf(`(
|
||||
select tagKey, stringTagValue, int64TagValue, float64TagValue
|
||||
from %s.%s
|
||||
where tagKey = $%d and (
|
||||
stringTagValue != '' or int64TagValue is not null or float64TagValue is not null
|
||||
)
|
||||
limit %d
|
||||
)`, r.logsDB, r.logsTagAttributeTable, idx+1, limit))
|
||||
|
||||
tagKeyQueryArgs = append(tagKeyQueryArgs, attrib.Key)
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`select * from (
|
||||
%s
|
||||
) settings max_threads=2`, strings.Join(tagQueries, " UNION DISTINCT "))
|
||||
|
||||
rows, err := r.db.Query(ctx, query, tagKeyQueryArgs...)
|
||||
if err != nil {
|
||||
zap.L().Error("couldn't query attrib values for suggestions", zap.Error(err))
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
|
||||
@@ -453,6 +453,7 @@ func GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error) {
|
||||
totalDashboardsWithPanelAndName := 0
|
||||
var dashboardNames []string
|
||||
count := 0
|
||||
logChQueriesCount := 0
|
||||
for _, dashboard := range dashboardsData {
|
||||
if isDashboardWithPanelAndName(dashboard.Data) {
|
||||
totalDashboardsWithPanelAndName = totalDashboardsWithPanelAndName + 1
|
||||
@@ -468,12 +469,16 @@ func GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error) {
|
||||
if isDashboardWithTSV2(dashboard.Data) {
|
||||
count = count + 1
|
||||
}
|
||||
if isDashboardWithLogsClickhouseQuery(dashboard.Data) {
|
||||
logChQueriesCount = logChQueriesCount + 1
|
||||
}
|
||||
}
|
||||
|
||||
dashboardsInfo.DashboardNames = dashboardNames
|
||||
dashboardsInfo.TotalDashboards = len(dashboardsData)
|
||||
dashboardsInfo.TotalDashboardsWithPanelAndName = totalDashboardsWithPanelAndName
|
||||
dashboardsInfo.QueriesWithTSV2 = count
|
||||
dashboardsInfo.DashboardsWithLogsChQuery = logChQueriesCount
|
||||
return &dashboardsInfo, nil
|
||||
}
|
||||
|
||||
@@ -485,6 +490,16 @@ func isDashboardWithTSV2(data map[string]interface{}) bool {
|
||||
return strings.Contains(string(jsonData), "time_series_v2")
|
||||
}
|
||||
|
||||
func isDashboardWithLogsClickhouseQuery(data map[string]interface{}) bool {
|
||||
jsonData, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
result := strings.Contains(string(jsonData), "signoz_logs.distributed_logs ") ||
|
||||
strings.Contains(string(jsonData), "signoz_logs.logs ")
|
||||
return result
|
||||
}
|
||||
|
||||
func isDashboardWithPanelAndName(data map[string]interface{}) bool {
|
||||
isDashboardName := false
|
||||
isDashboardWithPanelAndName := false
|
||||
|
||||
@@ -41,6 +41,7 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
"go.signoz.io/signoz/pkg/query-service/contextlinks"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
||||
|
||||
@@ -151,7 +152,7 @@ type APIHandlerOpts struct {
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
|
||||
alertManager, err := am.New("")
|
||||
alertManager, err := am.New()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -767,6 +768,48 @@ func (aH *APIHandler) getOverallStateTransitions(w http.ResponseWriter, r *http.
|
||||
aH.Respond(w, stateItems)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) metaForLinks(ctx context.Context, rule *rules.GettableRule) ([]v3.FilterItem, []v3.AttributeKey, map[string]v3.AttributeKey) {
|
||||
filterItems := []v3.FilterItem{}
|
||||
groupBy := []v3.AttributeKey{}
|
||||
keys := make(map[string]v3.AttributeKey)
|
||||
|
||||
if rule.AlertType == rules.AlertTypeLogs {
|
||||
logFields, err := aH.reader.GetLogFields(ctx)
|
||||
if err == nil {
|
||||
params := &v3.QueryRangeParamsV3{
|
||||
CompositeQuery: rule.RuleCondition.CompositeQuery,
|
||||
}
|
||||
keys = model.GetLogFieldsV3(ctx, params, logFields)
|
||||
} else {
|
||||
zap.L().Error("failed to get log fields using empty keys; the link might not work as expected", zap.Error(err))
|
||||
}
|
||||
} else if rule.AlertType == rules.AlertTypeTraces {
|
||||
traceFields, err := aH.reader.GetSpanAttributeKeys(ctx)
|
||||
if err == nil {
|
||||
keys = traceFields
|
||||
} else {
|
||||
zap.L().Error("failed to get span attributes using empty keys; the link might not work as expected", zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
if rule.AlertType == rules.AlertTypeLogs || rule.AlertType == rules.AlertTypeTraces {
|
||||
if rule.RuleCondition.CompositeQuery != nil {
|
||||
if rule.RuleCondition.QueryType() == v3.QueryTypeBuilder {
|
||||
selectedQuery := rule.RuleCondition.GetSelectedQueryName()
|
||||
if rule.RuleCondition.CompositeQuery.BuilderQueries[selectedQuery] != nil &&
|
||||
rule.RuleCondition.CompositeQuery.BuilderQueries[selectedQuery].Filters != nil {
|
||||
filterItems = rule.RuleCondition.CompositeQuery.BuilderQueries[selectedQuery].Filters.Items
|
||||
}
|
||||
if rule.RuleCondition.CompositeQuery.BuilderQueries[selectedQuery] != nil &&
|
||||
rule.RuleCondition.CompositeQuery.BuilderQueries[selectedQuery].GroupBy != nil {
|
||||
groupBy = rule.RuleCondition.CompositeQuery.BuilderQueries[selectedQuery].GroupBy
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return filterItems, groupBy, keys
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request) {
|
||||
ruleID := mux.Vars(r)["id"]
|
||||
params := model.QueryRuleStateHistory{}
|
||||
@@ -794,24 +837,18 @@ func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
filterItems := []v3.FilterItem{}
|
||||
if rule.AlertType == rules.AlertTypeLogs || rule.AlertType == rules.AlertTypeTraces {
|
||||
if rule.RuleCondition.CompositeQuery != nil {
|
||||
if rule.RuleCondition.QueryType() == v3.QueryTypeBuilder {
|
||||
for _, query := range rule.RuleCondition.CompositeQuery.BuilderQueries {
|
||||
if query.Filters != nil && len(query.Filters.Items) > 0 {
|
||||
filterItems = append(filterItems, query.Filters.Items...)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
newFilters := common.PrepareFilters(lbls, filterItems)
|
||||
ts := time.Unix(res.Items[idx].UnixMilli/1000, 0)
|
||||
filterItems, groupBy, keys := aH.metaForLinks(r.Context(), rule)
|
||||
newFilters := contextlinks.PrepareFilters(lbls, filterItems, groupBy, keys)
|
||||
end := time.Unix(res.Items[idx].UnixMilli/1000, 0)
|
||||
// why are we subtracting 3 minutes?
|
||||
// the query range is calculated based on the rule's evalWindow and evalDelay
|
||||
// alerts have 2 minutes delay built in, so we need to subtract that from the start time
|
||||
// to get the correct query range
|
||||
start := end.Add(-time.Duration(rule.EvalWindow)).Add(-3 * time.Minute)
|
||||
if rule.AlertType == rules.AlertTypeLogs {
|
||||
res.Items[idx].RelatedLogsLink = common.PrepareLinksToLogs(ts, newFilters)
|
||||
res.Items[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
|
||||
} else if rule.AlertType == rules.AlertTypeTraces {
|
||||
res.Items[idx].RelatedTracesLink = common.PrepareLinksToTraces(ts, newFilters)
|
||||
res.Items[idx].RelatedTracesLink = contextlinks.PrepareLinksToTraces(start, end, newFilters)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -842,12 +879,14 @@ func (aH *APIHandler) getRuleStateHistoryTopContributors(w http.ResponseWriter,
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
ts := time.Unix(params.End/1000, 0)
|
||||
filters := common.PrepareFilters(lbls, nil)
|
||||
filterItems, groupBy, keys := aH.metaForLinks(r.Context(), rule)
|
||||
newFilters := contextlinks.PrepareFilters(lbls, filterItems, groupBy, keys)
|
||||
end := time.Unix(params.End/1000, 0)
|
||||
start := time.Unix(params.Start/1000, 0)
|
||||
if rule.AlertType == rules.AlertTypeLogs {
|
||||
res[idx].RelatedLogsLink = common.PrepareLinksToLogs(ts, filters)
|
||||
res[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
|
||||
} else if rule.AlertType == rules.AlertTypeTraces {
|
||||
res[idx].RelatedTracesLink = common.PrepareLinksToTraces(ts, filters)
|
||||
res[idx].RelatedTracesLink = contextlinks.PrepareLinksToTraces(start, end, newFilters)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1051,23 +1090,6 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) saveAndReturn(w http.ResponseWriter, r *http.Request, signozDashboard model.DashboardData) {
|
||||
toSave := make(map[string]interface{})
|
||||
toSave["title"] = signozDashboard.Title
|
||||
toSave["description"] = signozDashboard.Description
|
||||
toSave["tags"] = signozDashboard.Tags
|
||||
toSave["layout"] = signozDashboard.Layout
|
||||
toSave["widgets"] = signozDashboard.Widgets
|
||||
toSave["variables"] = signozDashboard.Variables
|
||||
|
||||
dashboard, apiError := dashboards.CreateDashboard(r.Context(), toSave, aH.featureFlags)
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, dashboard)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) createDashboards(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
var postData map[string]interface{}
|
||||
@@ -1182,7 +1204,7 @@ func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (aH *APIHandler) getChannel(w http.ResponseWriter, r *http.Request) {
|
||||
id := mux.Vars(r)["id"]
|
||||
channel, apiErrorObj := aH.reader.GetChannel(id)
|
||||
channel, apiErrorObj := aH.ruleManager.RuleDB().GetChannel(id)
|
||||
if apiErrorObj != nil {
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
@@ -1192,7 +1214,7 @@ func (aH *APIHandler) getChannel(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (aH *APIHandler) deleteChannel(w http.ResponseWriter, r *http.Request) {
|
||||
id := mux.Vars(r)["id"]
|
||||
apiErrorObj := aH.reader.DeleteChannel(id)
|
||||
apiErrorObj := aH.ruleManager.RuleDB().DeleteChannel(id)
|
||||
if apiErrorObj != nil {
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
@@ -1201,7 +1223,7 @@ func (aH *APIHandler) deleteChannel(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (aH *APIHandler) listChannels(w http.ResponseWriter, r *http.Request) {
|
||||
channels, apiErrorObj := aH.reader.GetChannels()
|
||||
channels, apiErrorObj := aH.ruleManager.RuleDB().GetChannels()
|
||||
if apiErrorObj != nil {
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
@@ -1254,7 +1276,7 @@ func (aH *APIHandler) editChannel(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
_, apiErrorObj := aH.reader.EditChannel(receiver, id)
|
||||
_, apiErrorObj := aH.ruleManager.RuleDB().EditChannel(receiver, id)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
@@ -1282,7 +1304,7 @@ func (aH *APIHandler) createChannel(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
_, apiErrorObj := aH.reader.CreateChannel(receiver)
|
||||
_, apiErrorObj := aH.ruleManager.RuleDB().CreateChannel(receiver)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
@@ -3527,55 +3549,6 @@ func (aH *APIHandler) autoCompleteAttributeValues(w http.ResponseWriter, r *http
|
||||
aH.Respond(w, response)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getLogFieldsV3(ctx context.Context, queryRangeParams *v3.QueryRangeParamsV3) (map[string]v3.AttributeKey, error) {
|
||||
data := map[string]v3.AttributeKey{}
|
||||
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
|
||||
if query.DataSource == v3.DataSourceLogs {
|
||||
fields, apiError := aH.reader.GetLogFields(ctx)
|
||||
if apiError != nil {
|
||||
return nil, apiError.Err
|
||||
}
|
||||
|
||||
// top level fields meta will always be present in the frontend. (can be support for that as enchancement)
|
||||
getType := func(t string) (v3.AttributeKeyType, bool) {
|
||||
if t == "attributes" {
|
||||
return v3.AttributeKeyTypeTag, false
|
||||
} else if t == "resources" {
|
||||
return v3.AttributeKeyTypeResource, false
|
||||
}
|
||||
return "", true
|
||||
}
|
||||
|
||||
for _, selectedField := range fields.Selected {
|
||||
fieldType, pass := getType(selectedField.Type)
|
||||
if pass {
|
||||
continue
|
||||
}
|
||||
data[selectedField.Name] = v3.AttributeKey{
|
||||
Key: selectedField.Name,
|
||||
Type: fieldType,
|
||||
DataType: v3.AttributeKeyDataType(strings.ToLower(selectedField.DataType)),
|
||||
IsColumn: true,
|
||||
}
|
||||
}
|
||||
for _, interestingField := range fields.Interesting {
|
||||
fieldType, pass := getType(interestingField.Type)
|
||||
if pass {
|
||||
continue
|
||||
}
|
||||
data[interestingField.Name] = v3.AttributeKey{
|
||||
Key: interestingField.Name,
|
||||
Type: fieldType,
|
||||
DataType: v3.AttributeKeyDataType(strings.ToLower(interestingField.DataType)),
|
||||
IsColumn: false,
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getSpanKeysV3(ctx context.Context, queryRangeParams *v3.QueryRangeParamsV3) (map[string]v3.AttributeKey, error) {
|
||||
data := map[string]v3.AttributeKey{}
|
||||
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
|
||||
@@ -3617,14 +3590,14 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(queryRangeParams) {
|
||||
// get the fields if any logs query is present
|
||||
var fields map[string]v3.AttributeKey
|
||||
fields, err = aH.getLogFieldsV3(ctx, queryRangeParams)
|
||||
logsFields, err := aH.reader.GetLogFields(ctx)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
// get the fields if any logs query is present
|
||||
fields := model.GetLogFieldsV3(ctx, queryRangeParams, logsFields)
|
||||
logsv3.Enrich(queryRangeParams, fields)
|
||||
}
|
||||
|
||||
@@ -3656,15 +3629,19 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
// Hook up query progress tracking if requested
|
||||
queryIdHeader := r.Header.Get("X-SIGNOZ-QUERY-ID")
|
||||
if len(queryIdHeader) > 0 {
|
||||
ctx = context.WithValue(ctx, "queryId", queryIdHeader)
|
||||
|
||||
onQueryFinished, err := aH.reader.ReportQueryStartForProgressTracking(queryIdHeader)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error(
|
||||
"couldn't report query start for progress tracking",
|
||||
zap.String("queryId", queryIdHeader), zap.Error(err),
|
||||
)
|
||||
|
||||
} else {
|
||||
// Adding queryId to the context signals clickhouse queries to report progress
|
||||
//lint:ignore SA1029 ignore for now
|
||||
ctx = context.WithValue(ctx, "queryId", queryIdHeader)
|
||||
|
||||
defer func() {
|
||||
go onQueryFinished()
|
||||
}()
|
||||
@@ -3915,13 +3892,13 @@ func (aH *APIHandler) liveTailLogsV2(w http.ResponseWriter, r *http.Request) {
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(queryRangeParams) {
|
||||
// get the fields if any logs query is present
|
||||
var fields map[string]v3.AttributeKey
|
||||
fields, err = aH.getLogFieldsV3(r.Context(), queryRangeParams)
|
||||
logsFields, err := aH.reader.GetLogFields(r.Context())
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, nil)
|
||||
return
|
||||
}
|
||||
fields := model.GetLogFieldsV3(r.Context(), queryRangeParams, logsFields)
|
||||
logsv3.Enrich(queryRangeParams, fields)
|
||||
}
|
||||
|
||||
@@ -4000,14 +3977,14 @@ func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) {
|
||||
case v3.QueryTypeBuilder:
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(queryRangeParams) {
|
||||
// get the fields if any logs query is present
|
||||
var fields map[string]v3.AttributeKey
|
||||
fields, err = aH.getLogFieldsV3(r.Context(), queryRangeParams)
|
||||
logsFields, err := aH.reader.GetLogFields(r.Context())
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, nil)
|
||||
return
|
||||
}
|
||||
// get the fields if any logs query is present
|
||||
fields := model.GetLogFieldsV3(r.Context(), queryRangeParams, logsFields)
|
||||
logsv3.Enrich(queryRangeParams, fields)
|
||||
}
|
||||
|
||||
@@ -4084,13 +4061,13 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(queryRangeParams) {
|
||||
// get the fields if any logs query is present
|
||||
var fields map[string]v3.AttributeKey
|
||||
fields, err = aH.getLogFieldsV3(ctx, queryRangeParams)
|
||||
logsFields, err := aH.reader.GetLogFields(r.Context())
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, nil)
|
||||
return
|
||||
}
|
||||
fields := model.GetLogFieldsV3(r.Context(), queryRangeParams, logsFields)
|
||||
logsv3.Enrich(queryRangeParams, fields)
|
||||
}
|
||||
|
||||
|
||||
@@ -94,11 +94,11 @@ func Enrich(params *v3.QueryRangeParamsV3, fields map[string]v3.AttributeKey) {
|
||||
if query.Expression != queryName && query.DataSource != v3.DataSourceLogs {
|
||||
continue
|
||||
}
|
||||
enrichLogsQuery(query, fields)
|
||||
EnrichLogsQuery(query, fields)
|
||||
}
|
||||
}
|
||||
|
||||
func enrichLogsQuery(query *v3.BuilderQuery, fields map[string]v3.AttributeKey) error {
|
||||
func EnrichLogsQuery(query *v3.BuilderQuery, fields map[string]v3.AttributeKey) error {
|
||||
// enrich aggregation attribute
|
||||
if query.AggregateAttribute.Key != "" {
|
||||
query.AggregateAttribute = enrichFieldWithMetadata(query.AggregateAttribute, fields)
|
||||
|
||||
@@ -545,25 +545,29 @@ func Enrich(params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) {
|
||||
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
for _, query := range params.CompositeQuery.BuilderQueries {
|
||||
if query.DataSource == v3.DataSourceTraces {
|
||||
// enrich aggregate attribute
|
||||
query.AggregateAttribute = enrichKeyWithMetadata(query.AggregateAttribute, keys)
|
||||
// enrich filter items
|
||||
if query.Filters != nil && len(query.Filters.Items) > 0 {
|
||||
for idx, filter := range query.Filters.Items {
|
||||
query.Filters.Items[idx].Key = enrichKeyWithMetadata(filter.Key, keys)
|
||||
}
|
||||
}
|
||||
// enrich group by
|
||||
for idx, groupBy := range query.GroupBy {
|
||||
query.GroupBy[idx] = enrichKeyWithMetadata(groupBy, keys)
|
||||
}
|
||||
// enrich order by
|
||||
query.OrderBy = enrichOrderBy(query.OrderBy, keys)
|
||||
// enrich select columns
|
||||
for idx, selectColumn := range query.SelectColumns {
|
||||
query.SelectColumns[idx] = enrichKeyWithMetadata(selectColumn, keys)
|
||||
}
|
||||
EnrichTracesQuery(query, keys)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func EnrichTracesQuery(query *v3.BuilderQuery, keys map[string]v3.AttributeKey) {
|
||||
// enrich aggregate attribute
|
||||
query.AggregateAttribute = enrichKeyWithMetadata(query.AggregateAttribute, keys)
|
||||
// enrich filter items
|
||||
if query.Filters != nil && len(query.Filters.Items) > 0 {
|
||||
for idx, filter := range query.Filters.Items {
|
||||
query.Filters.Items[idx].Key = enrichKeyWithMetadata(filter.Key, keys)
|
||||
}
|
||||
}
|
||||
// enrich group by
|
||||
for idx, groupBy := range query.GroupBy {
|
||||
query.GroupBy[idx] = enrichKeyWithMetadata(groupBy, keys)
|
||||
}
|
||||
// enrich order by
|
||||
query.OrderBy = enrichOrderBy(query.OrderBy, keys)
|
||||
// enrich select columns
|
||||
for idx, selectColumn := range query.SelectColumns {
|
||||
query.SelectColumns[idx] = enrichKeyWithMetadata(selectColumn, keys)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,39 @@ import (
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var TracesListViewDefaultSelectedColumns = []v3.AttributeKey{
|
||||
{
|
||||
Key: "serviceName",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
{
|
||||
Key: "name",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
{
|
||||
Key: "durationNano",
|
||||
DataType: v3.AttributeKeyDataTypeArrayFloat64,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
{
|
||||
Key: "httpMethod",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
{
|
||||
Key: "responseStatusCode",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
}
|
||||
|
||||
// check if traceId filter is used in traces query and return the list of traceIds
|
||||
func TraceIdFilterUsedWithEqual(params *v3.QueryRangeParamsV3) (bool, []string) {
|
||||
compositeQuery := params.CompositeQuery
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
@@ -73,183 +70,3 @@ func LCMList(nums []int64) int64 {
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// TODO(srikanthccv): move the custom function in threshold_rule.go to here
|
||||
func PrepareLinksToTraces(ts time.Time, filterItems []v3.FilterItem) string {
|
||||
|
||||
start := ts.Add(-time.Minute * 15)
|
||||
end := ts.Add(time.Minute * 15)
|
||||
|
||||
// Traces list view expects time in nanoseconds
|
||||
tr := v3.URLShareableTimeRange{
|
||||
Start: start.UnixNano(),
|
||||
End: end.UnixNano(),
|
||||
PageSize: 100,
|
||||
}
|
||||
|
||||
options := v3.URLShareableOptions{
|
||||
MaxLines: 2,
|
||||
Format: "list",
|
||||
SelectColumns: constants.TracesListViewDefaultSelectedColumns,
|
||||
}
|
||||
|
||||
period, _ := json.Marshal(tr)
|
||||
urlEncodedTimeRange := url.QueryEscape(string(period))
|
||||
|
||||
urlData := v3.URLShareableCompositeQuery{
|
||||
QueryType: string(v3.QueryTypeBuilder),
|
||||
Builder: v3.URLShareableBuilderQuery{
|
||||
QueryData: []v3.BuilderQuery{
|
||||
{
|
||||
DataSource: v3.DataSourceTraces,
|
||||
QueryName: "A",
|
||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
||||
AggregateAttribute: v3.AttributeKey{},
|
||||
Filters: &v3.FilterSet{
|
||||
Items: filterItems,
|
||||
Operator: "AND",
|
||||
},
|
||||
Expression: "A",
|
||||
Disabled: false,
|
||||
Having: []v3.Having{},
|
||||
StepInterval: 60,
|
||||
OrderBy: []v3.OrderBy{
|
||||
{
|
||||
ColumnName: "timestamp",
|
||||
Order: "desc",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
QueryFormulas: make([]string, 0),
|
||||
},
|
||||
}
|
||||
|
||||
data, _ := json.Marshal(urlData)
|
||||
compositeQuery := url.QueryEscape(url.QueryEscape(string(data)))
|
||||
|
||||
optionsData, _ := json.Marshal(options)
|
||||
urlEncodedOptions := url.QueryEscape(string(optionsData))
|
||||
|
||||
return fmt.Sprintf("compositeQuery=%s&timeRange=%s&startTime=%d&endTime=%d&options=%s", compositeQuery, urlEncodedTimeRange, tr.Start, tr.End, urlEncodedOptions)
|
||||
}
|
||||
|
||||
func PrepareLinksToLogs(ts time.Time, filterItems []v3.FilterItem) string {
|
||||
start := ts.Add(-time.Minute * 15)
|
||||
end := ts.Add(time.Minute * 15)
|
||||
|
||||
// Logs list view expects time in milliseconds
|
||||
// Logs list view expects time in milliseconds
|
||||
tr := v3.URLShareableTimeRange{
|
||||
Start: start.UnixMilli(),
|
||||
End: end.UnixMilli(),
|
||||
PageSize: 100,
|
||||
}
|
||||
|
||||
options := v3.URLShareableOptions{
|
||||
MaxLines: 2,
|
||||
Format: "list",
|
||||
SelectColumns: []v3.AttributeKey{},
|
||||
}
|
||||
|
||||
period, _ := json.Marshal(tr)
|
||||
urlEncodedTimeRange := url.QueryEscape(string(period))
|
||||
|
||||
urlData := v3.URLShareableCompositeQuery{
|
||||
QueryType: string(v3.QueryTypeBuilder),
|
||||
Builder: v3.URLShareableBuilderQuery{
|
||||
QueryData: []v3.BuilderQuery{
|
||||
{
|
||||
DataSource: v3.DataSourceLogs,
|
||||
QueryName: "A",
|
||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
||||
AggregateAttribute: v3.AttributeKey{},
|
||||
Filters: &v3.FilterSet{
|
||||
Items: filterItems,
|
||||
Operator: "AND",
|
||||
},
|
||||
Expression: "A",
|
||||
Disabled: false,
|
||||
Having: []v3.Having{},
|
||||
StepInterval: 60,
|
||||
OrderBy: []v3.OrderBy{
|
||||
{
|
||||
ColumnName: "timestamp",
|
||||
Order: "desc",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
QueryFormulas: make([]string, 0),
|
||||
},
|
||||
}
|
||||
|
||||
data, _ := json.Marshal(urlData)
|
||||
compositeQuery := url.QueryEscape(url.QueryEscape(string(data)))
|
||||
|
||||
optionsData, _ := json.Marshal(options)
|
||||
urlEncodedOptions := url.QueryEscape(string(optionsData))
|
||||
|
||||
return fmt.Sprintf("compositeQuery=%s&timeRange=%s&startTime=%d&endTime=%d&options=%s", compositeQuery, urlEncodedTimeRange, tr.Start, tr.End, urlEncodedOptions)
|
||||
}
|
||||
|
||||
// The following function is used to prepare the where clause for the query
|
||||
// `lbls` contains the key value pairs of the labels from the result of the query
|
||||
// We iterate over the where clause and replace the labels with the actual values
|
||||
// There are two cases:
|
||||
// 1. The label is present in the where clause
|
||||
// 2. The label is not present in the where clause
|
||||
//
|
||||
// Example for case 2:
|
||||
// Latency by serviceName without any filter
|
||||
// In this case, for each service with latency > threshold we send a notification
|
||||
// The expectation will be that clicking on the related traces for service A, will
|
||||
// take us to the traces page with the filter serviceName=A
|
||||
// So for all the missing labels in the where clause, we add them as key = value
|
||||
//
|
||||
// Example for case 1:
|
||||
// Severity text IN (WARN, ERROR)
|
||||
// In this case, the Severity text will appear in the `lbls` if it were part of the group
|
||||
// by clause, in which case we replace it with the actual value for the notification
|
||||
// i.e Severity text = WARN
|
||||
// If the Severity text is not part of the group by clause, then we add it as it is
|
||||
func PrepareFilters(labels map[string]string, filters []v3.FilterItem) []v3.FilterItem {
|
||||
var filterItems []v3.FilterItem
|
||||
|
||||
added := make(map[string]struct{})
|
||||
|
||||
for _, item := range filters {
|
||||
exists := false
|
||||
for key, value := range labels {
|
||||
if item.Key.Key == key {
|
||||
// if the label is present in the where clause, replace it with key = value
|
||||
filterItems = append(filterItems, v3.FilterItem{
|
||||
Key: item.Key,
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: value,
|
||||
})
|
||||
exists = true
|
||||
added[key] = struct{}{}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !exists {
|
||||
// if the label is not present in the where clause, add it as it is
|
||||
filterItems = append(filterItems, item)
|
||||
}
|
||||
}
|
||||
|
||||
// add the labels which are not present in the where clause
|
||||
for key, value := range labels {
|
||||
if _, ok := added[key]; !ok {
|
||||
filterItems = append(filterItems, v3.FilterItem{
|
||||
Key: v3.AttributeKey{Key: key},
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return filterItems
|
||||
}
|
||||
|
||||
@@ -401,39 +401,6 @@ const TIMESTAMP = "timestamp"
|
||||
const FirstQueryGraphLimit = "first_query_graph_limit"
|
||||
const SecondQueryGraphLimit = "second_query_graph_limit"
|
||||
|
||||
var TracesListViewDefaultSelectedColumns = []v3.AttributeKey{
|
||||
{
|
||||
Key: "serviceName",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
{
|
||||
Key: "name",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
{
|
||||
Key: "durationNano",
|
||||
DataType: v3.AttributeKeyDataTypeArrayFloat64,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
{
|
||||
Key: "httpMethod",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
{
|
||||
Key: "responseStatusCode",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: true,
|
||||
},
|
||||
}
|
||||
|
||||
const DefaultFilterSuggestionsAttributesLimit = 50
|
||||
const MaxFilterSuggestionsAttributesLimit = 100
|
||||
const DefaultFilterSuggestionsExamplesLimit = 2
|
||||
|
||||
203
pkg/query-service/contextlinks/links.go
Normal file
203
pkg/query-service/contextlinks/links.go
Normal file
@@ -0,0 +1,203 @@
|
||||
package contextlinks
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func PrepareLinksToTraces(start, end time.Time, filterItems []v3.FilterItem) string {
|
||||
|
||||
// Traces list view expects time in nanoseconds
|
||||
tr := v3.URLShareableTimeRange{
|
||||
Start: start.UnixNano(),
|
||||
End: end.UnixNano(),
|
||||
PageSize: 100,
|
||||
}
|
||||
|
||||
options := v3.URLShareableOptions{
|
||||
MaxLines: 2,
|
||||
Format: "list",
|
||||
SelectColumns: tracesV3.TracesListViewDefaultSelectedColumns,
|
||||
}
|
||||
|
||||
period, _ := json.Marshal(tr)
|
||||
urlEncodedTimeRange := url.QueryEscape(string(period))
|
||||
|
||||
builderQuery := v3.BuilderQuery{
|
||||
DataSource: v3.DataSourceTraces,
|
||||
QueryName: "A",
|
||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
||||
AggregateAttribute: v3.AttributeKey{},
|
||||
Filters: &v3.FilterSet{
|
||||
Items: filterItems,
|
||||
Operator: "AND",
|
||||
},
|
||||
Expression: "A",
|
||||
Disabled: false,
|
||||
Having: []v3.Having{},
|
||||
StepInterval: 60,
|
||||
OrderBy: []v3.OrderBy{
|
||||
{
|
||||
ColumnName: "timestamp",
|
||||
Order: "desc",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
urlData := v3.URLShareableCompositeQuery{
|
||||
QueryType: string(v3.QueryTypeBuilder),
|
||||
Builder: v3.URLShareableBuilderQuery{
|
||||
QueryData: []v3.BuilderQuery{
|
||||
builderQuery,
|
||||
},
|
||||
QueryFormulas: make([]string, 0),
|
||||
},
|
||||
}
|
||||
|
||||
data, _ := json.Marshal(urlData)
|
||||
compositeQuery := url.QueryEscape(url.QueryEscape(string(data)))
|
||||
|
||||
optionsData, _ := json.Marshal(options)
|
||||
urlEncodedOptions := url.QueryEscape(string(optionsData))
|
||||
|
||||
return fmt.Sprintf("compositeQuery=%s&timeRange=%s&startTime=%d&endTime=%d&options=%s", compositeQuery, urlEncodedTimeRange, tr.Start, tr.End, urlEncodedOptions)
|
||||
}
|
||||
|
||||
func PrepareLinksToLogs(start, end time.Time, filterItems []v3.FilterItem) string {
|
||||
|
||||
// Logs list view expects time in milliseconds
|
||||
tr := v3.URLShareableTimeRange{
|
||||
Start: start.UnixMilli(),
|
||||
End: end.UnixMilli(),
|
||||
PageSize: 100,
|
||||
}
|
||||
|
||||
options := v3.URLShareableOptions{
|
||||
MaxLines: 2,
|
||||
Format: "list",
|
||||
SelectColumns: []v3.AttributeKey{},
|
||||
}
|
||||
|
||||
period, _ := json.Marshal(tr)
|
||||
urlEncodedTimeRange := url.QueryEscape(string(period))
|
||||
|
||||
builderQuery := v3.BuilderQuery{
|
||||
DataSource: v3.DataSourceLogs,
|
||||
QueryName: "A",
|
||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
||||
AggregateAttribute: v3.AttributeKey{},
|
||||
Filters: &v3.FilterSet{
|
||||
Items: filterItems,
|
||||
Operator: "AND",
|
||||
},
|
||||
Expression: "A",
|
||||
Disabled: false,
|
||||
Having: []v3.Having{},
|
||||
StepInterval: 60,
|
||||
OrderBy: []v3.OrderBy{
|
||||
{
|
||||
ColumnName: "timestamp",
|
||||
Order: "desc",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
urlData := v3.URLShareableCompositeQuery{
|
||||
QueryType: string(v3.QueryTypeBuilder),
|
||||
Builder: v3.URLShareableBuilderQuery{
|
||||
QueryData: []v3.BuilderQuery{
|
||||
builderQuery,
|
||||
},
|
||||
QueryFormulas: make([]string, 0),
|
||||
},
|
||||
}
|
||||
|
||||
data, _ := json.Marshal(urlData)
|
||||
compositeQuery := url.QueryEscape(url.QueryEscape(string(data)))
|
||||
|
||||
optionsData, _ := json.Marshal(options)
|
||||
urlEncodedOptions := url.QueryEscape(string(optionsData))
|
||||
|
||||
return fmt.Sprintf("compositeQuery=%s&timeRange=%s&startTime=%d&endTime=%d&options=%s", compositeQuery, urlEncodedTimeRange, tr.Start, tr.End, urlEncodedOptions)
|
||||
}
|
||||
|
||||
// The following function is used to prepare the where clause for the query
|
||||
// `lbls` contains the key value pairs of the labels from the result of the query
|
||||
// We iterate over the where clause and replace the labels with the actual values
|
||||
// There are two cases:
|
||||
// 1. The label is present in the where clause
|
||||
// 2. The label is not present in the where clause
|
||||
//
|
||||
// Example for case 2:
|
||||
// Latency by serviceName without any filter
|
||||
// In this case, for each service with latency > threshold we send a notification
|
||||
// The expectation will be that clicking on the related traces for service A, will
|
||||
// take us to the traces page with the filter serviceName=A
|
||||
// So for all the missing labels in the where clause, we add them as key = value
|
||||
//
|
||||
// Example for case 1:
|
||||
// Severity text IN (WARN, ERROR)
|
||||
// In this case, the Severity text will appear in the `lbls` if it were part of the group
|
||||
// by clause, in which case we replace it with the actual value for the notification
|
||||
// i.e Severity text = WARN
|
||||
// If the Severity text is not part of the group by clause, then we add it as it is
|
||||
func PrepareFilters(labels map[string]string, whereClauseItems []v3.FilterItem, groupByItems []v3.AttributeKey, keys map[string]v3.AttributeKey) []v3.FilterItem {
|
||||
var filterItems []v3.FilterItem
|
||||
|
||||
added := make(map[string]struct{})
|
||||
|
||||
for _, item := range whereClauseItems {
|
||||
exists := false
|
||||
for key, value := range labels {
|
||||
if item.Key.Key == key {
|
||||
// if the label is present in the where clause, replace it with key = value
|
||||
filterItems = append(filterItems, v3.FilterItem{
|
||||
Key: item.Key,
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: value,
|
||||
})
|
||||
exists = true
|
||||
added[key] = struct{}{}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !exists {
|
||||
// if there is no label for the filter item, add it as it is
|
||||
filterItems = append(filterItems, item)
|
||||
}
|
||||
}
|
||||
|
||||
// if there are labels which are not part of the where clause, but
|
||||
// exist in the result, then they could be part of the group by clause
|
||||
for key, value := range labels {
|
||||
if _, ok := added[key]; !ok {
|
||||
// start by taking the attribute key from the keys map, if not present, create a new one
|
||||
attributeKey, ok := keys[key]
|
||||
if !ok {
|
||||
attributeKey = v3.AttributeKey{Key: key}
|
||||
}
|
||||
|
||||
// if there is a group by item with the same key, use that instead
|
||||
for _, groupByItem := range groupByItems {
|
||||
if groupByItem.Key == key {
|
||||
attributeKey = groupByItem
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
filterItems = append(filterItems, v3.FilterItem{
|
||||
Key: attributeKey,
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return filterItems
|
||||
}
|
||||
@@ -24,42 +24,62 @@ type Manager interface {
|
||||
TestReceiver(receiver *Receiver) *model.ApiError
|
||||
}
|
||||
|
||||
func New(url string) (Manager, error) {
|
||||
func defaultOptions() []ManagerOptions {
|
||||
return []ManagerOptions{
|
||||
WithURL(constants.GetAlertManagerApiPrefix()),
|
||||
WithChannelApiPath(constants.AmChannelApiPath),
|
||||
}
|
||||
}
|
||||
|
||||
if url == "" {
|
||||
url = constants.GetAlertManagerApiPrefix()
|
||||
type ManagerOptions func(m *manager) error
|
||||
|
||||
func New(opts ...ManagerOptions) (Manager, error) {
|
||||
m := &manager{}
|
||||
|
||||
newOpts := defaultOptions()
|
||||
newOpts = append(newOpts, opts...)
|
||||
|
||||
for _, opt := range newOpts {
|
||||
err := opt(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
urlParsed, err := neturl.Parse(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
return &manager{
|
||||
url: url,
|
||||
parsedURL: urlParsed,
|
||||
}, nil
|
||||
func WithURL(url string) ManagerOptions {
|
||||
return func(m *manager) error {
|
||||
m.url = url
|
||||
parsedURL, err := neturl.Parse(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m.parsedURL = parsedURL
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func WithChannelApiPath(path string) ManagerOptions {
|
||||
return func(m *manager) error {
|
||||
m.channelApiPath = path
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
type manager struct {
|
||||
url string
|
||||
parsedURL *neturl.URL
|
||||
url string
|
||||
parsedURL *neturl.URL
|
||||
channelApiPath string
|
||||
}
|
||||
|
||||
func prepareAmChannelApiURL() string {
|
||||
basePath := constants.GetAlertManagerApiPrefix()
|
||||
AmChannelApiPath := constants.AmChannelApiPath
|
||||
|
||||
if len(AmChannelApiPath) > 0 && rune(AmChannelApiPath[0]) == rune('/') {
|
||||
AmChannelApiPath = AmChannelApiPath[1:]
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s%s", basePath, AmChannelApiPath)
|
||||
func (m *manager) prepareAmChannelApiURL() string {
|
||||
return fmt.Sprintf("%s%s", m.url, m.channelApiPath)
|
||||
}
|
||||
|
||||
func prepareTestApiURL() string {
|
||||
basePath := constants.GetAlertManagerApiPrefix()
|
||||
return fmt.Sprintf("%s%s", basePath, "v1/testReceiver")
|
||||
func (m *manager) prepareTestApiURL() string {
|
||||
return fmt.Sprintf("%s%s", m.url, "v1/testReceiver")
|
||||
}
|
||||
|
||||
func (m *manager) URL() *neturl.URL {
|
||||
@@ -79,7 +99,7 @@ func (m *manager) AddRoute(receiver *Receiver) *model.ApiError {
|
||||
|
||||
receiverString, _ := json.Marshal(receiver)
|
||||
|
||||
amURL := prepareAmChannelApiURL()
|
||||
amURL := m.prepareAmChannelApiURL()
|
||||
response, err := http.Post(amURL, contentType, bytes.NewBuffer(receiverString))
|
||||
|
||||
if err != nil {
|
||||
@@ -97,7 +117,7 @@ func (m *manager) AddRoute(receiver *Receiver) *model.ApiError {
|
||||
func (m *manager) EditRoute(receiver *Receiver) *model.ApiError {
|
||||
receiverString, _ := json.Marshal(receiver)
|
||||
|
||||
amURL := prepareAmChannelApiURL()
|
||||
amURL := m.prepareAmChannelApiURL()
|
||||
req, err := http.NewRequest(http.MethodPut, amURL, bytes.NewBuffer(receiverString))
|
||||
|
||||
if err != nil {
|
||||
@@ -126,7 +146,7 @@ func (m *manager) DeleteRoute(name string) *model.ApiError {
|
||||
values := map[string]string{"name": name}
|
||||
requestData, _ := json.Marshal(values)
|
||||
|
||||
amURL := prepareAmChannelApiURL()
|
||||
amURL := m.prepareAmChannelApiURL()
|
||||
req, err := http.NewRequest(http.MethodDelete, amURL, bytes.NewBuffer(requestData))
|
||||
|
||||
if err != nil {
|
||||
@@ -156,7 +176,7 @@ func (m *manager) TestReceiver(receiver *Receiver) *model.ApiError {
|
||||
|
||||
receiverBytes, _ := json.Marshal(receiver)
|
||||
|
||||
amTestURL := prepareTestApiURL()
|
||||
amTestURL := m.prepareTestApiURL()
|
||||
response, err := http.Post(amTestURL, contentType, bytes.NewBuffer(receiverBytes))
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -295,7 +295,7 @@ func newAlertmanagerSet(urls []string, timeout time.Duration, logger log.Logger)
|
||||
|
||||
ams := []Manager{}
|
||||
for _, u := range urls {
|
||||
am, err := New(u)
|
||||
am, err := New(WithURL(u))
|
||||
if err != nil {
|
||||
level.Error(s.logger).Log(fmt.Sprintf("invalid alert manager url %s: %s", u, err))
|
||||
} else {
|
||||
|
||||
@@ -8,18 +8,11 @@ import (
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/util/stats"
|
||||
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
type Reader interface {
|
||||
GetChannel(id string) (*model.ChannelItem, *model.ApiError)
|
||||
GetChannels() (*[]model.ChannelItem, *model.ApiError)
|
||||
DeleteChannel(id string) *model.ApiError
|
||||
CreateChannel(receiver *am.Receiver) (*am.Receiver, *model.ApiError)
|
||||
EditChannel(receiver *am.Receiver, id string) (*am.Receiver, *model.ApiError)
|
||||
|
||||
GetInstantQueryMetricsResult(ctx context.Context, query *model.InstantQueryMetricsParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
|
||||
GetQueryRangeResult(ctx context.Context, query *model.QueryRangeParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
|
||||
GetServiceOverview(ctx context.Context, query *model.GetServiceOverviewParams, skipConfig *model.SkipConfig) (*[]model.ServiceOverviewItem, *model.ApiError)
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
type LogsLiveTailClientV2 struct {
|
||||
Name string
|
||||
Logs chan *SignozLogV2
|
||||
@@ -21,3 +28,48 @@ type QueryProgress struct {
|
||||
|
||||
ElapsedMs uint64 `json:"elapsed_ms"`
|
||||
}
|
||||
|
||||
func GetLogFieldsV3(ctx context.Context, queryRangeParams *v3.QueryRangeParamsV3, fields *GetFieldsResponse) map[string]v3.AttributeKey {
|
||||
data := map[string]v3.AttributeKey{}
|
||||
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
|
||||
if query.DataSource == v3.DataSourceLogs {
|
||||
|
||||
// top level fields meta will always be present in the frontend. (can be support for that as enchancement)
|
||||
getType := func(t string) (v3.AttributeKeyType, bool) {
|
||||
if t == "attributes" {
|
||||
return v3.AttributeKeyTypeTag, false
|
||||
} else if t == "resources" {
|
||||
return v3.AttributeKeyTypeResource, false
|
||||
}
|
||||
return "", true
|
||||
}
|
||||
|
||||
for _, selectedField := range fields.Selected {
|
||||
fieldType, pass := getType(selectedField.Type)
|
||||
if pass {
|
||||
continue
|
||||
}
|
||||
data[selectedField.Name] = v3.AttributeKey{
|
||||
Key: selectedField.Name,
|
||||
Type: fieldType,
|
||||
DataType: v3.AttributeKeyDataType(strings.ToLower(selectedField.DataType)),
|
||||
IsColumn: true,
|
||||
}
|
||||
}
|
||||
for _, interestingField := range fields.Interesting {
|
||||
fieldType, pass := getType(interestingField.Type)
|
||||
if pass {
|
||||
continue
|
||||
}
|
||||
data[interestingField.Name] = v3.AttributeKey{
|
||||
Key: interestingField.Name,
|
||||
Type: fieldType,
|
||||
DataType: v3.AttributeKeyDataType(strings.ToLower(interestingField.DataType)),
|
||||
IsColumn: false,
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
@@ -618,6 +618,7 @@ type AlertsInfo struct {
|
||||
LogsBasedAlerts int `json:"logsBasedAlerts"`
|
||||
MetricBasedAlerts int `json:"metricBasedAlerts"`
|
||||
TracesBasedAlerts int `json:"tracesBasedAlerts"`
|
||||
TotalChannels int `json:"totalChannels"`
|
||||
SlackChannels int `json:"slackChannels"`
|
||||
WebHookChannels int `json:"webHookChannels"`
|
||||
PagerDutyChannels int `json:"pagerDutyChannels"`
|
||||
|
||||
@@ -370,6 +370,22 @@ type QueryRangeParamsV3 struct {
|
||||
FormatForWeb bool `json:"formatForWeb,omitempty"`
|
||||
}
|
||||
|
||||
func (q *QueryRangeParamsV3) Clone() *QueryRangeParamsV3 {
|
||||
if q == nil {
|
||||
return nil
|
||||
}
|
||||
return &QueryRangeParamsV3{
|
||||
Start: q.Start,
|
||||
End: q.End,
|
||||
Step: q.Step,
|
||||
CompositeQuery: q.CompositeQuery.Clone(),
|
||||
Variables: q.Variables,
|
||||
NoCache: q.NoCache,
|
||||
Version: q.Version,
|
||||
FormatForWeb: q.FormatForWeb,
|
||||
}
|
||||
}
|
||||
|
||||
type PromQuery struct {
|
||||
Query string `json:"query"`
|
||||
Stats string `json:"stats,omitempty"`
|
||||
@@ -377,6 +393,18 @@ type PromQuery struct {
|
||||
Legend string `json:"legend,omitempty"`
|
||||
}
|
||||
|
||||
func (p *PromQuery) Clone() *PromQuery {
|
||||
if p == nil {
|
||||
return nil
|
||||
}
|
||||
return &PromQuery{
|
||||
Query: p.Query,
|
||||
Stats: p.Stats,
|
||||
Disabled: p.Disabled,
|
||||
Legend: p.Legend,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *PromQuery) Validate() error {
|
||||
if p == nil {
|
||||
return nil
|
||||
@@ -395,6 +423,16 @@ type ClickHouseQuery struct {
|
||||
Legend string `json:"legend,omitempty"`
|
||||
}
|
||||
|
||||
func (c *ClickHouseQuery) Clone() *ClickHouseQuery {
|
||||
if c == nil {
|
||||
return nil
|
||||
}
|
||||
return &ClickHouseQuery{
|
||||
Query: c.Query,
|
||||
Disabled: c.Disabled,
|
||||
Legend: c.Legend,
|
||||
}
|
||||
}
|
||||
func (c *ClickHouseQuery) Validate() error {
|
||||
if c == nil {
|
||||
return nil
|
||||
@@ -420,6 +458,43 @@ type CompositeQuery struct {
|
||||
FillGaps bool `json:"fillGaps,omitempty"`
|
||||
}
|
||||
|
||||
func (c *CompositeQuery) Clone() *CompositeQuery {
|
||||
if c == nil {
|
||||
return nil
|
||||
}
|
||||
var builderQueries map[string]*BuilderQuery
|
||||
if c.BuilderQueries != nil {
|
||||
builderQueries = make(map[string]*BuilderQuery)
|
||||
for name, query := range c.BuilderQueries {
|
||||
builderQueries[name] = query.Clone()
|
||||
}
|
||||
}
|
||||
var clickHouseQueries map[string]*ClickHouseQuery
|
||||
if c.ClickHouseQueries != nil {
|
||||
clickHouseQueries = make(map[string]*ClickHouseQuery)
|
||||
for name, query := range c.ClickHouseQueries {
|
||||
clickHouseQueries[name] = query.Clone()
|
||||
}
|
||||
}
|
||||
var promQueries map[string]*PromQuery
|
||||
if c.PromQueries != nil {
|
||||
promQueries = make(map[string]*PromQuery)
|
||||
for name, query := range c.PromQueries {
|
||||
promQueries[name] = query.Clone()
|
||||
}
|
||||
}
|
||||
return &CompositeQuery{
|
||||
BuilderQueries: builderQueries,
|
||||
ClickHouseQueries: clickHouseQueries,
|
||||
PromQueries: promQueries,
|
||||
PanelType: c.PanelType,
|
||||
QueryType: c.QueryType,
|
||||
Unit: c.Unit,
|
||||
FillGaps: c.FillGaps,
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (c *CompositeQuery) EnabledQueries() int {
|
||||
count := 0
|
||||
switch c.QueryType {
|
||||
@@ -645,6 +720,7 @@ const (
|
||||
FunctionNameMedian5 FunctionName = "median5"
|
||||
FunctionNameMedian7 FunctionName = "median7"
|
||||
FunctionNameTimeShift FunctionName = "timeShift"
|
||||
FunctionNameAnomaly FunctionName = "anomaly"
|
||||
)
|
||||
|
||||
func (f FunctionName) Validate() error {
|
||||
@@ -664,7 +740,8 @@ func (f FunctionName) Validate() error {
|
||||
FunctionNameMedian3,
|
||||
FunctionNameMedian5,
|
||||
FunctionNameMedian7,
|
||||
FunctionNameTimeShift:
|
||||
FunctionNameTimeShift,
|
||||
FunctionNameAnomaly:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("invalid function name: %s", f)
|
||||
@@ -672,33 +749,68 @@ func (f FunctionName) Validate() error {
|
||||
}
|
||||
|
||||
type Function struct {
|
||||
Name FunctionName `json:"name"`
|
||||
Args []interface{} `json:"args,omitempty"`
|
||||
Name FunctionName `json:"name"`
|
||||
Args []interface{} `json:"args,omitempty"`
|
||||
NamedArgs map[string]interface{} `json:"namedArgs,omitempty"`
|
||||
}
|
||||
|
||||
type BuilderQuery struct {
|
||||
QueryName string `json:"queryName"`
|
||||
StepInterval int64 `json:"stepInterval"`
|
||||
DataSource DataSource `json:"dataSource"`
|
||||
AggregateOperator AggregateOperator `json:"aggregateOperator"`
|
||||
AggregateAttribute AttributeKey `json:"aggregateAttribute,omitempty"`
|
||||
Temporality Temporality `json:"temporality,omitempty"`
|
||||
Filters *FilterSet `json:"filters,omitempty"`
|
||||
GroupBy []AttributeKey `json:"groupBy,omitempty"`
|
||||
Expression string `json:"expression"`
|
||||
Disabled bool `json:"disabled"`
|
||||
Having []Having `json:"having,omitempty"`
|
||||
Legend string `json:"legend,omitempty"`
|
||||
Limit uint64 `json:"limit"`
|
||||
Offset uint64 `json:"offset"`
|
||||
PageSize uint64 `json:"pageSize"`
|
||||
OrderBy []OrderBy `json:"orderBy,omitempty"`
|
||||
ReduceTo ReduceToOperator `json:"reduceTo,omitempty"`
|
||||
SelectColumns []AttributeKey `json:"selectColumns,omitempty"`
|
||||
TimeAggregation TimeAggregation `json:"timeAggregation,omitempty"`
|
||||
SpaceAggregation SpaceAggregation `json:"spaceAggregation,omitempty"`
|
||||
Functions []Function `json:"functions,omitempty"`
|
||||
ShiftBy int64
|
||||
QueryName string `json:"queryName"`
|
||||
StepInterval int64 `json:"stepInterval"`
|
||||
DataSource DataSource `json:"dataSource"`
|
||||
AggregateOperator AggregateOperator `json:"aggregateOperator"`
|
||||
AggregateAttribute AttributeKey `json:"aggregateAttribute,omitempty"`
|
||||
Temporality Temporality `json:"temporality,omitempty"`
|
||||
Filters *FilterSet `json:"filters,omitempty"`
|
||||
GroupBy []AttributeKey `json:"groupBy,omitempty"`
|
||||
Expression string `json:"expression"`
|
||||
Disabled bool `json:"disabled"`
|
||||
Having []Having `json:"having,omitempty"`
|
||||
Legend string `json:"legend,omitempty"`
|
||||
Limit uint64 `json:"limit"`
|
||||
Offset uint64 `json:"offset"`
|
||||
PageSize uint64 `json:"pageSize"`
|
||||
OrderBy []OrderBy `json:"orderBy,omitempty"`
|
||||
ReduceTo ReduceToOperator `json:"reduceTo,omitempty"`
|
||||
SelectColumns []AttributeKey `json:"selectColumns,omitempty"`
|
||||
TimeAggregation TimeAggregation `json:"timeAggregation,omitempty"`
|
||||
SpaceAggregation SpaceAggregation `json:"spaceAggregation,omitempty"`
|
||||
Functions []Function `json:"functions,omitempty"`
|
||||
ShiftBy int64
|
||||
IsAnomaly bool
|
||||
QueriesUsedInFormula []string
|
||||
}
|
||||
|
||||
func (b *BuilderQuery) Clone() *BuilderQuery {
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
return &BuilderQuery{
|
||||
QueryName: b.QueryName,
|
||||
StepInterval: b.StepInterval,
|
||||
DataSource: b.DataSource,
|
||||
AggregateOperator: b.AggregateOperator,
|
||||
AggregateAttribute: b.AggregateAttribute,
|
||||
Temporality: b.Temporality,
|
||||
Filters: b.Filters.Clone(),
|
||||
GroupBy: b.GroupBy,
|
||||
Expression: b.Expression,
|
||||
Disabled: b.Disabled,
|
||||
Having: b.Having,
|
||||
Legend: b.Legend,
|
||||
Limit: b.Limit,
|
||||
Offset: b.Offset,
|
||||
PageSize: b.PageSize,
|
||||
OrderBy: b.OrderBy,
|
||||
ReduceTo: b.ReduceTo,
|
||||
SelectColumns: b.SelectColumns,
|
||||
TimeAggregation: b.TimeAggregation,
|
||||
SpaceAggregation: b.SpaceAggregation,
|
||||
Functions: b.Functions,
|
||||
ShiftBy: b.ShiftBy,
|
||||
IsAnomaly: b.IsAnomaly,
|
||||
QueriesUsedInFormula: b.QueriesUsedInFormula,
|
||||
}
|
||||
}
|
||||
|
||||
// CanDefaultZero returns true if the missing value can be substituted by zero
|
||||
@@ -877,6 +989,16 @@ type FilterSet struct {
|
||||
Items []FilterItem `json:"items"`
|
||||
}
|
||||
|
||||
func (f *FilterSet) Clone() *FilterSet {
|
||||
if f == nil {
|
||||
return nil
|
||||
}
|
||||
return &FilterSet{
|
||||
Operator: f.Operator,
|
||||
Items: f.Items,
|
||||
}
|
||||
}
|
||||
|
||||
func (f *FilterSet) Validate() error {
|
||||
if f == nil {
|
||||
return nil
|
||||
@@ -1028,12 +1150,15 @@ type Table struct {
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
QueryName string `json:"queryName,omitempty"`
|
||||
Series []*Series `json:"series,omitempty"`
|
||||
PredictedSeries []*Series `json:"predictedSeries,omitempty"`
|
||||
AnomalyScores []*Series `json:"anomalyScores,omitempty"`
|
||||
List []*Row `json:"list,omitempty"`
|
||||
Table *Table `json:"table,omitempty"`
|
||||
QueryName string `json:"queryName,omitempty"`
|
||||
Series []*Series `json:"series,omitempty"`
|
||||
PredictedSeries []*Series `json:"predictedSeries,omitempty"`
|
||||
UpperBoundSeries []*Series `json:"upperBoundSeries,omitempty"`
|
||||
LowerBoundSeries []*Series `json:"lowerBoundSeries,omitempty"`
|
||||
AnomalyScores []*Series `json:"anomalyScores,omitempty"`
|
||||
List []*Row `json:"list,omitempty"`
|
||||
Table *Table `json:"table,omitempty"`
|
||||
IsAnomaly bool `json:"isAnomaly,omitempty"`
|
||||
}
|
||||
|
||||
type Series struct {
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -124,6 +125,47 @@ type RuleCondition struct {
|
||||
SelectedQuery string `json:"selectedQueryName,omitempty"`
|
||||
}
|
||||
|
||||
func (rc *RuleCondition) GetSelectedQueryName() string {
|
||||
if rc != nil {
|
||||
if rc.SelectedQuery != "" {
|
||||
return rc.SelectedQuery
|
||||
}
|
||||
|
||||
queryNames := map[string]struct{}{}
|
||||
|
||||
if rc.CompositeQuery != nil {
|
||||
if rc.QueryType() == v3.QueryTypeBuilder {
|
||||
for name := range rc.CompositeQuery.BuilderQueries {
|
||||
queryNames[name] = struct{}{}
|
||||
}
|
||||
} else if rc.QueryType() == v3.QueryTypeClickHouseSQL {
|
||||
for name := range rc.CompositeQuery.ClickHouseQueries {
|
||||
queryNames[name] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The following logic exists for backward compatibility
|
||||
// If there is no selected query, then
|
||||
// - check if F1 is present, if yes, return F1
|
||||
// - else return the query with max ascii value
|
||||
// this logic is not really correct. we should be considering
|
||||
// whether the query is enabled or not. but this is a temporary
|
||||
// fix to support backward compatibility
|
||||
if _, ok := queryNames["F1"]; ok {
|
||||
return "F1"
|
||||
}
|
||||
keys := make([]string, 0, len(queryNames))
|
||||
for k := range queryNames {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
return keys[len(keys)-1]
|
||||
}
|
||||
// This should never happen
|
||||
return ""
|
||||
}
|
||||
|
||||
func (rc *RuleCondition) IsValid() bool {
|
||||
|
||||
if rc.CompositeQuery == nil {
|
||||
|
||||
@@ -202,6 +202,21 @@ func (r *BaseRule) Unit() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (r *BaseRule) Timestamps(ts time.Time) (time.Time, time.Time) {
|
||||
start := ts.Add(-time.Duration(r.evalWindow)).UnixMilli()
|
||||
end := ts.UnixMilli()
|
||||
|
||||
if r.evalDelay > 0 {
|
||||
start = start - int64(r.evalDelay.Milliseconds())
|
||||
end = end - int64(r.evalDelay.Milliseconds())
|
||||
}
|
||||
// round to minute otherwise we could potentially miss data
|
||||
start = start - (start % (60 * 1000))
|
||||
end = end - (end % (60 * 1000))
|
||||
|
||||
return time.UnixMilli(start), time.UnixMilli(end)
|
||||
}
|
||||
|
||||
func (r *BaseRule) SetLastError(err error) {
|
||||
r.mtx.Lock()
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/jmoiron/sqlx"
|
||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.uber.org/zap"
|
||||
@@ -18,6 +19,12 @@ import (
|
||||
|
||||
// Data store to capture user alert rule settings
|
||||
type RuleDB interface {
|
||||
GetChannel(id string) (*model.ChannelItem, *model.ApiError)
|
||||
GetChannels() (*[]model.ChannelItem, *model.ApiError)
|
||||
DeleteChannel(id string) *model.ApiError
|
||||
CreateChannel(receiver *am.Receiver) (*am.Receiver, *model.ApiError)
|
||||
EditChannel(receiver *am.Receiver, id string) (*am.Receiver, *model.ApiError)
|
||||
|
||||
// CreateRuleTx stores rule in the db and returns tx and group name (on success)
|
||||
CreateRuleTx(ctx context.Context, rule string) (int64, Tx, error)
|
||||
|
||||
@@ -68,13 +75,15 @@ type Tx interface {
|
||||
|
||||
type ruleDB struct {
|
||||
*sqlx.DB
|
||||
alertManager am.Manager
|
||||
}
|
||||
|
||||
// todo: move init methods for creating tables
|
||||
|
||||
func NewRuleDB(db *sqlx.DB) RuleDB {
|
||||
func NewRuleDB(db *sqlx.DB, alertManager am.Manager) RuleDB {
|
||||
return &ruleDB{
|
||||
db,
|
||||
alertManager,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -303,6 +312,229 @@ func (r *ruleDB) EditPlannedMaintenance(ctx context.Context, maintenance Planned
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func getChannelType(receiver *am.Receiver) string {
|
||||
|
||||
if receiver.EmailConfigs != nil {
|
||||
return "email"
|
||||
}
|
||||
if receiver.OpsGenieConfigs != nil {
|
||||
return "opsgenie"
|
||||
}
|
||||
if receiver.PagerdutyConfigs != nil {
|
||||
return "pagerduty"
|
||||
}
|
||||
if receiver.PushoverConfigs != nil {
|
||||
return "pushover"
|
||||
}
|
||||
if receiver.SNSConfigs != nil {
|
||||
return "sns"
|
||||
}
|
||||
if receiver.SlackConfigs != nil {
|
||||
return "slack"
|
||||
}
|
||||
if receiver.VictorOpsConfigs != nil {
|
||||
return "victorops"
|
||||
}
|
||||
if receiver.WebhookConfigs != nil {
|
||||
return "webhook"
|
||||
}
|
||||
if receiver.WechatConfigs != nil {
|
||||
return "wechat"
|
||||
}
|
||||
if receiver.MSTeamsConfigs != nil {
|
||||
return "msteams"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (r *ruleDB) GetChannel(id string) (*model.ChannelItem, *model.ApiError) {
|
||||
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
channel := model.ChannelItem{}
|
||||
|
||||
query := "SELECT id, created_at, updated_at, name, type, data data FROM notification_channels WHERE id=?;"
|
||||
|
||||
stmt, err := r.Preparex(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in preparing sql query for GetChannel", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
err = stmt.Get(&channel, idInt)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in getting channel with id", zap.Int("id", idInt), zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return &channel, nil
|
||||
}
|
||||
|
||||
func (r *ruleDB) DeleteChannel(id string) *model.ApiError {
|
||||
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
|
||||
channelToDelete, apiErrorObj := r.GetChannel(id)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
return apiErrorObj
|
||||
}
|
||||
|
||||
tx, err := r.Begin()
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
{
|
||||
stmt, err := tx.Prepare(`DELETE FROM notification_channels WHERE id=$1;`)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in preparing statement for INSERT to notification_channels", zap.Error(err))
|
||||
tx.Rollback()
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
if _, err := stmt.Exec(idInt); err != nil {
|
||||
zap.L().Error("Error in Executing prepared statement for INSERT to notification_channels", zap.Error(err))
|
||||
tx.Rollback() // return an error too, we may want to wrap them
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
}
|
||||
|
||||
apiError := r.alertManager.DeleteRoute(channelToDelete.Name)
|
||||
if apiError != nil {
|
||||
tx.Rollback()
|
||||
return apiError
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
zap.L().Error("Error in committing transaction for DELETE command to notification_channels", zap.Error(err))
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ruleDB) GetChannels() (*[]model.ChannelItem, *model.ApiError) {
|
||||
|
||||
channels := []model.ChannelItem{}
|
||||
|
||||
query := "SELECT id, created_at, updated_at, name, type, data data FROM notification_channels"
|
||||
|
||||
err := r.Select(&channels, query)
|
||||
|
||||
zap.L().Info(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return &channels, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ruleDB) EditChannel(receiver *am.Receiver, id string) (*am.Receiver, *model.ApiError) {
|
||||
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
|
||||
channel, apiErrObj := r.GetChannel(id)
|
||||
|
||||
if apiErrObj != nil {
|
||||
return nil, apiErrObj
|
||||
}
|
||||
if channel.Name != receiver.Name {
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("channel name cannot be changed")}
|
||||
}
|
||||
|
||||
tx, err := r.Begin()
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
channel_type := getChannelType(receiver)
|
||||
|
||||
receiverString, _ := json.Marshal(receiver)
|
||||
|
||||
{
|
||||
stmt, err := tx.Prepare(`UPDATE notification_channels SET updated_at=$1, type=$2, data=$3 WHERE id=$4;`)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in preparing statement for UPDATE to notification_channels", zap.Error(err))
|
||||
tx.Rollback()
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
if _, err := stmt.Exec(time.Now(), channel_type, string(receiverString), idInt); err != nil {
|
||||
zap.L().Error("Error in Executing prepared statement for UPDATE to notification_channels", zap.Error(err))
|
||||
tx.Rollback() // return an error too, we may want to wrap them
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
}
|
||||
|
||||
apiError := r.alertManager.EditRoute(receiver)
|
||||
if apiError != nil {
|
||||
tx.Rollback()
|
||||
return nil, apiError
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
zap.L().Error("Error in committing transaction for INSERT to notification_channels", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return receiver, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ruleDB) CreateChannel(receiver *am.Receiver) (*am.Receiver, *model.ApiError) {
|
||||
|
||||
channel_type := getChannelType(receiver)
|
||||
|
||||
receiverString, _ := json.Marshal(receiver)
|
||||
|
||||
tx, err := r.Begin()
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
{
|
||||
stmt, err := tx.Prepare(`INSERT INTO notification_channels (created_at, updated_at, name, type, data) VALUES($1,$2,$3,$4,$5);`)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in preparing statement for INSERT to notification_channels", zap.Error(err))
|
||||
tx.Rollback()
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
if _, err := stmt.Exec(time.Now(), time.Now(), receiver.Name, channel_type, string(receiverString)); err != nil {
|
||||
zap.L().Error("Error in Executing prepared statement for INSERT to notification_channels", zap.Error(err))
|
||||
tx.Rollback() // return an error too, we may want to wrap them
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
}
|
||||
|
||||
apiError := r.alertManager.AddRoute(receiver)
|
||||
if apiError != nil {
|
||||
tx.Rollback()
|
||||
return nil, apiError
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
zap.L().Error("Error in committing transaction for INSERT to notification_channels", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return receiver, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ruleDB) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
|
||||
alertsInfo := model.AlertsInfo{}
|
||||
// fetch alerts from rules db
|
||||
@@ -353,5 +585,31 @@ func (r *ruleDB) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
|
||||
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
|
||||
}
|
||||
alertsInfo.AlertNames = alertNames
|
||||
|
||||
channels, _ := r.GetChannels()
|
||||
if channels != nil {
|
||||
alertsInfo.TotalChannels = len(*channels)
|
||||
for _, channel := range *channels {
|
||||
if channel.Type == "slack" {
|
||||
alertsInfo.SlackChannels = alertsInfo.SlackChannels + 1
|
||||
}
|
||||
if channel.Type == "webhook" {
|
||||
alertsInfo.WebHookChannels = alertsInfo.WebHookChannels + 1
|
||||
}
|
||||
if channel.Type == "email" {
|
||||
alertsInfo.EmailChannels = alertsInfo.EmailChannels + 1
|
||||
}
|
||||
if channel.Type == "pagerduty" {
|
||||
alertsInfo.PagerDutyChannels = alertsInfo.PagerDutyChannels + 1
|
||||
}
|
||||
if channel.Type == "opsgenie" {
|
||||
alertsInfo.OpsGenieChannels = alertsInfo.OpsGenieChannels + 1
|
||||
}
|
||||
if channel.Type == "msteams" {
|
||||
alertsInfo.MSTeamsChannels = alertsInfo.MSTeamsChannels + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &alertsInfo, nil
|
||||
}
|
||||
|
||||
@@ -190,7 +190,12 @@ func NewManager(o *ManagerOptions) (*Manager, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
db := NewRuleDB(o.DBConn)
|
||||
amManager, err := am.New()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
db := NewRuleDB(o.DBConn, amManager)
|
||||
|
||||
telemetry.GetInstance().SetAlertsInfoCallback(db.GetAlertsInfo)
|
||||
|
||||
|
||||
@@ -6,9 +6,7 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"text/template"
|
||||
"time"
|
||||
"unicode"
|
||||
@@ -16,6 +14,7 @@ import (
|
||||
"go.uber.org/zap"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
"go.signoz.io/signoz/pkg/query-service/contextlinks"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
||||
|
||||
@@ -31,6 +30,7 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/timestamp"
|
||||
|
||||
logsv3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3"
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/formatter"
|
||||
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
@@ -53,6 +53,10 @@ type ThresholdRule struct {
|
||||
querier interfaces.Querier
|
||||
// querierV2 is used for alerts created after the introduction of new metrics query builder
|
||||
querierV2 interfaces.Querier
|
||||
|
||||
// used for attribute metadata enrichment for logs and traces
|
||||
logsKeys map[string]v3.AttributeKey
|
||||
spansKeys map[string]v3.AttributeKey
|
||||
}
|
||||
|
||||
func NewThresholdRule(
|
||||
@@ -164,16 +168,8 @@ func (r *ThresholdRule) prepareQueryRange(ts time.Time) (*v3.QueryRangeParamsV3,
|
||||
|
||||
zap.L().Info("prepareQueryRange", zap.Int64("ts", ts.UnixMilli()), zap.Int64("evalWindow", r.evalWindow.Milliseconds()), zap.Int64("evalDelay", r.evalDelay.Milliseconds()))
|
||||
|
||||
start := ts.Add(-time.Duration(r.evalWindow)).UnixMilli()
|
||||
end := ts.UnixMilli()
|
||||
|
||||
if r.evalDelay > 0 {
|
||||
start = start - int64(r.evalDelay.Milliseconds())
|
||||
end = end - int64(r.evalDelay.Milliseconds())
|
||||
}
|
||||
// round to minute otherwise we could potentially miss data
|
||||
start = start - (start % (60 * 1000))
|
||||
end = end - (end % (60 * 1000))
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
start, end := startTs.UnixMilli(), endTs.UnixMilli()
|
||||
|
||||
if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL {
|
||||
params := &v3.QueryRangeParamsV3{
|
||||
@@ -239,245 +235,76 @@ func (r *ThresholdRule) prepareQueryRange(ts time.Time) (*v3.QueryRangeParamsV3,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// The following function is used to prepare the where clause for the query
|
||||
// `lbls` contains the key value pairs of the labels from the result of the query
|
||||
// We iterate over the where clause and replace the labels with the actual values
|
||||
// There are two cases:
|
||||
// 1. The label is present in the where clause
|
||||
// 2. The label is not present in the where clause
|
||||
//
|
||||
// Example for case 2:
|
||||
// Latency by serviceName without any filter
|
||||
// In this case, for each service with latency > threshold we send a notification
|
||||
// The expectation will be that clicking on the related traces for service A, will
|
||||
// take us to the traces page with the filter serviceName=A
|
||||
// So for all the missing labels in the where clause, we add them as key = value
|
||||
//
|
||||
// Example for case 1:
|
||||
// Severity text IN (WARN, ERROR)
|
||||
// In this case, the Severity text will appear in the `lbls` if it were part of the group
|
||||
// by clause, in which case we replace it with the actual value for the notification
|
||||
// i.e Severity text = WARN
|
||||
// If the Severity text is not part of the group by clause, then we add it as it is
|
||||
func (r *ThresholdRule) fetchFilters(selectedQuery string, lbls labels.Labels) []v3.FilterItem {
|
||||
var filterItems []v3.FilterItem
|
||||
|
||||
added := make(map[string]struct{})
|
||||
|
||||
if r.ruleCondition.CompositeQuery.QueryType == v3.QueryTypeBuilder &&
|
||||
r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery] != nil &&
|
||||
r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery].Filters != nil {
|
||||
|
||||
for _, item := range r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery].Filters.Items {
|
||||
exists := false
|
||||
for _, label := range lbls {
|
||||
if item.Key.Key == label.Name {
|
||||
// if the label is present in the where clause, replace it with key = value
|
||||
filterItems = append(filterItems, v3.FilterItem{
|
||||
Key: item.Key,
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: label.Value,
|
||||
})
|
||||
exists = true
|
||||
added[label.Name] = struct{}{}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !exists {
|
||||
// if the label is not present in the where clause, add it as it is
|
||||
filterItems = append(filterItems, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add the labels which are not present in the where clause
|
||||
for _, label := range lbls {
|
||||
if _, ok := added[label.Name]; !ok {
|
||||
filterItems = append(filterItems, v3.FilterItem{
|
||||
Key: v3.AttributeKey{Key: label.Name},
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: label.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return filterItems
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToLogs(ts time.Time, lbls labels.Labels) string {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
start := time.UnixMilli(qr.Start)
|
||||
end := time.UnixMilli(qr.End)
|
||||
|
||||
// TODO(srikanthccv): handle formula queries
|
||||
if selectedQuery < "A" || selectedQuery > "Z" {
|
||||
return ""
|
||||
}
|
||||
|
||||
q, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
|
||||
if q == nil {
|
||||
return ""
|
||||
}
|
||||
// Logs list view expects time in milliseconds
|
||||
tr := v3.URLShareableTimeRange{
|
||||
Start: q.Start,
|
||||
End: q.End,
|
||||
PageSize: 100,
|
||||
|
||||
if q.DataSource != v3.DataSourceLogs {
|
||||
return ""
|
||||
}
|
||||
|
||||
options := v3.URLShareableOptions{
|
||||
MaxLines: 2,
|
||||
Format: "list",
|
||||
SelectColumns: []v3.AttributeKey{},
|
||||
queryFilter := []v3.FilterItem{}
|
||||
if q.Filters != nil {
|
||||
queryFilter = q.Filters.Items
|
||||
}
|
||||
|
||||
period, _ := json.Marshal(tr)
|
||||
urlEncodedTimeRange := url.QueryEscape(string(period))
|
||||
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.logsKeys)
|
||||
|
||||
filterItems := r.fetchFilters(selectedQuery, lbls)
|
||||
urlData := v3.URLShareableCompositeQuery{
|
||||
QueryType: string(v3.QueryTypeBuilder),
|
||||
Builder: v3.URLShareableBuilderQuery{
|
||||
QueryData: []v3.BuilderQuery{
|
||||
{
|
||||
DataSource: v3.DataSourceLogs,
|
||||
QueryName: "A",
|
||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
||||
AggregateAttribute: v3.AttributeKey{},
|
||||
Filters: &v3.FilterSet{
|
||||
Items: filterItems,
|
||||
Operator: "AND",
|
||||
},
|
||||
Expression: "A",
|
||||
Disabled: false,
|
||||
Having: []v3.Having{},
|
||||
StepInterval: 60,
|
||||
OrderBy: []v3.OrderBy{
|
||||
{
|
||||
ColumnName: "timestamp",
|
||||
Order: "desc",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
QueryFormulas: make([]string, 0),
|
||||
},
|
||||
}
|
||||
|
||||
data, _ := json.Marshal(urlData)
|
||||
compositeQuery := url.QueryEscape(url.QueryEscape(string(data)))
|
||||
|
||||
optionsData, _ := json.Marshal(options)
|
||||
urlEncodedOptions := url.QueryEscape(string(optionsData))
|
||||
|
||||
return fmt.Sprintf("compositeQuery=%s&timeRange=%s&startTime=%d&endTime=%d&options=%s", compositeQuery, urlEncodedTimeRange, tr.Start, tr.End, urlEncodedOptions)
|
||||
return contextlinks.PrepareLinksToLogs(start, end, filterItems)
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToTraces(ts time.Time, lbls labels.Labels) string {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
start := time.UnixMilli(qr.Start)
|
||||
end := time.UnixMilli(qr.End)
|
||||
|
||||
// TODO(srikanthccv): handle formula queries
|
||||
if selectedQuery < "A" || selectedQuery > "Z" {
|
||||
return ""
|
||||
}
|
||||
|
||||
q, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
|
||||
if q == nil {
|
||||
return ""
|
||||
}
|
||||
// Traces list view expects time in nanoseconds
|
||||
tr := v3.URLShareableTimeRange{
|
||||
Start: q.Start * time.Second.Microseconds(),
|
||||
End: q.End * time.Second.Microseconds(),
|
||||
PageSize: 100,
|
||||
|
||||
if q.DataSource != v3.DataSourceTraces {
|
||||
return ""
|
||||
}
|
||||
|
||||
options := v3.URLShareableOptions{
|
||||
MaxLines: 2,
|
||||
Format: "list",
|
||||
SelectColumns: constants.TracesListViewDefaultSelectedColumns,
|
||||
queryFilter := []v3.FilterItem{}
|
||||
if q.Filters != nil {
|
||||
queryFilter = q.Filters.Items
|
||||
}
|
||||
|
||||
period, _ := json.Marshal(tr)
|
||||
urlEncodedTimeRange := url.QueryEscape(string(period))
|
||||
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.spansKeys)
|
||||
|
||||
filterItems := r.fetchFilters(selectedQuery, lbls)
|
||||
urlData := v3.URLShareableCompositeQuery{
|
||||
QueryType: string(v3.QueryTypeBuilder),
|
||||
Builder: v3.URLShareableBuilderQuery{
|
||||
QueryData: []v3.BuilderQuery{
|
||||
{
|
||||
DataSource: v3.DataSourceTraces,
|
||||
QueryName: "A",
|
||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
||||
AggregateAttribute: v3.AttributeKey{},
|
||||
Filters: &v3.FilterSet{
|
||||
Items: filterItems,
|
||||
Operator: "AND",
|
||||
},
|
||||
Expression: "A",
|
||||
Disabled: false,
|
||||
Having: []v3.Having{},
|
||||
StepInterval: 60,
|
||||
OrderBy: []v3.OrderBy{
|
||||
{
|
||||
ColumnName: "timestamp",
|
||||
Order: "desc",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
QueryFormulas: make([]string, 0),
|
||||
},
|
||||
}
|
||||
|
||||
data, _ := json.Marshal(urlData)
|
||||
compositeQuery := url.QueryEscape(url.QueryEscape(string(data)))
|
||||
|
||||
optionsData, _ := json.Marshal(options)
|
||||
urlEncodedOptions := url.QueryEscape(string(optionsData))
|
||||
|
||||
return fmt.Sprintf("compositeQuery=%s&timeRange=%s&startTime=%d&endTime=%d&options=%s", compositeQuery, urlEncodedTimeRange, tr.Start, tr.End, urlEncodedOptions)
|
||||
return contextlinks.PrepareLinksToTraces(start, end, filterItems)
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) GetSelectedQuery() string {
|
||||
if r.ruleCondition != nil {
|
||||
if r.ruleCondition.SelectedQuery != "" {
|
||||
return r.ruleCondition.SelectedQuery
|
||||
}
|
||||
|
||||
queryNames := map[string]struct{}{}
|
||||
|
||||
if r.ruleCondition.CompositeQuery != nil {
|
||||
if r.ruleCondition.QueryType() == v3.QueryTypeBuilder {
|
||||
for name := range r.ruleCondition.CompositeQuery.BuilderQueries {
|
||||
queryNames[name] = struct{}{}
|
||||
}
|
||||
} else if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL {
|
||||
for name := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
|
||||
queryNames[name] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The following logic exists for backward compatibility
|
||||
// If there is no selected query, then
|
||||
// - check if F1 is present, if yes, return F1
|
||||
// - else return the query with max ascii value
|
||||
// this logic is not really correct. we should be considering
|
||||
// whether the query is enabled or not. but this is a temporary
|
||||
// fix to support backward compatibility
|
||||
if _, ok := queryNames["F1"]; ok {
|
||||
return "F1"
|
||||
}
|
||||
keys := make([]string, 0, len(queryNames))
|
||||
for k := range queryNames {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
return keys[len(keys)-1]
|
||||
}
|
||||
// This should never happen
|
||||
return ""
|
||||
return r.ruleCondition.GetSelectedQueryName()
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time) (Vector, error) {
|
||||
@@ -492,11 +319,37 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time) (Vec
|
||||
}
|
||||
|
||||
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(params) {
|
||||
// Note: Sending empty fields key because enrichment is only needed for json
|
||||
// TODO: Add support for attribute enrichment later
|
||||
logsv3.Enrich(params, map[string]v3.AttributeKey{})
|
||||
hasLogsQuery := false
|
||||
hasTracesQuery := false
|
||||
for _, query := range params.CompositeQuery.BuilderQueries {
|
||||
if query.DataSource == v3.DataSourceLogs {
|
||||
hasLogsQuery = true
|
||||
}
|
||||
if query.DataSource == v3.DataSourceTraces {
|
||||
hasTracesQuery = true
|
||||
}
|
||||
}
|
||||
|
||||
if hasLogsQuery {
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(params) {
|
||||
logsFields, err := r.reader.GetLogFields(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
logsKeys := model.GetLogFieldsV3(ctx, params, logsFields)
|
||||
r.logsKeys = logsKeys
|
||||
logsv3.Enrich(params, logsKeys)
|
||||
}
|
||||
}
|
||||
|
||||
if hasTracesQuery {
|
||||
spanKeys, err := r.reader.GetSpanAttributeKeys(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.spansKeys = spanKeys
|
||||
tracesV3.Enrich(params, spanKeys)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -654,11 +507,13 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
if r.typ == AlertTypeTraces {
|
||||
link := r.prepareLinksToTraces(ts, smpl.MetricOrig)
|
||||
if link != "" && r.hostFromSource() != "" {
|
||||
zap.L().Info("adding traces link to annotations", zap.String("link", fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)))
|
||||
annotations = append(annotations, labels.Label{Name: "related_traces", Value: fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)})
|
||||
}
|
||||
} else if r.typ == AlertTypeLogs {
|
||||
link := r.prepareLinksToLogs(ts, smpl.MetricOrig)
|
||||
if link != "" && r.hostFromSource() != "" {
|
||||
zap.L().Info("adding logs link to annotations", zap.String("link", fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)))
|
||||
annotations = append(annotations, labels.Label{Name: "related_logs", Value: fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1303,12 +1303,23 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
t.Errorf("an error '%s' was not expected when opening a stub database connection", err)
|
||||
}
|
||||
|
||||
metaCols := make([]cmock.ColumnType, 0)
|
||||
metaCols = append(metaCols, cmock.ColumnType{Name: "DISTINCT(tagKey)", Type: "String"})
|
||||
metaCols = append(metaCols, cmock.ColumnType{Name: "tagType", Type: "String"})
|
||||
metaCols = append(metaCols, cmock.ColumnType{Name: "dataType", Type: "String"})
|
||||
metaCols = append(metaCols, cmock.ColumnType{Name: "isColumn", Type: "Bool"})
|
||||
|
||||
cols := make([]cmock.ColumnType, 0)
|
||||
cols = append(cols, cmock.ColumnType{Name: "value", Type: "Float64"})
|
||||
cols = append(cols, cmock.ColumnType{Name: "attr", Type: "String"})
|
||||
cols = append(cols, cmock.ColumnType{Name: "timestamp", Type: "String"})
|
||||
|
||||
for idx, c := range testCases {
|
||||
metaRows := cmock.NewRows(metaCols, c.metaValues)
|
||||
mock.
|
||||
ExpectQuery("SELECT DISTINCT(tagKey), tagType, dataType, isColumn FROM archiveNamespace.span_attributes_keys").
|
||||
WillReturnRows(metaRows)
|
||||
|
||||
rows := cmock.NewRows(cols, c.values)
|
||||
|
||||
// We are testing the eval logic after the query is run
|
||||
@@ -1402,12 +1413,38 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
t.Errorf("an error '%s' was not expected when opening a stub database connection", err)
|
||||
}
|
||||
|
||||
attrMetaCols := make([]cmock.ColumnType, 0)
|
||||
attrMetaCols = append(attrMetaCols, cmock.ColumnType{Name: "name", Type: "String"})
|
||||
attrMetaCols = append(attrMetaCols, cmock.ColumnType{Name: "dataType", Type: "String"})
|
||||
|
||||
resourceMetaCols := make([]cmock.ColumnType, 0)
|
||||
resourceMetaCols = append(resourceMetaCols, cmock.ColumnType{Name: "name", Type: "String"})
|
||||
resourceMetaCols = append(resourceMetaCols, cmock.ColumnType{Name: "dataType", Type: "String"})
|
||||
|
||||
createTableCols := make([]cmock.ColumnType, 0)
|
||||
createTableCols = append(createTableCols, cmock.ColumnType{Name: "statement", Type: "String"})
|
||||
|
||||
cols := make([]cmock.ColumnType, 0)
|
||||
cols = append(cols, cmock.ColumnType{Name: "value", Type: "Float64"})
|
||||
cols = append(cols, cmock.ColumnType{Name: "attr", Type: "String"})
|
||||
cols = append(cols, cmock.ColumnType{Name: "timestamp", Type: "String"})
|
||||
|
||||
for idx, c := range testCases {
|
||||
attrMetaRows := cmock.NewRows(attrMetaCols, c.attrMetaValues)
|
||||
mock.
|
||||
ExpectSelect("SELECT DISTINCT name, datatype from signoz_logs.distributed_logs_attribute_keys group by name, datatype").
|
||||
WillReturnRows(attrMetaRows)
|
||||
|
||||
resourceMetaRows := cmock.NewRows(resourceMetaCols, c.resourceMetaValues)
|
||||
mock.
|
||||
ExpectSelect("SELECT DISTINCT name, datatype from signoz_logs.distributed_logs_resource_keys group by name, datatype").
|
||||
WillReturnRows(resourceMetaRows)
|
||||
|
||||
createTableRows := cmock.NewRows(createTableCols, c.createTableValues)
|
||||
mock.
|
||||
ExpectSelect("SHOW CREATE TABLE signoz_logs.logs").
|
||||
WillReturnRows(createTableRows)
|
||||
|
||||
rows := cmock.NewRows(cols, c.values)
|
||||
|
||||
// We are testing the eval logic after the query is run
|
||||
|
||||
@@ -4,14 +4,18 @@ import "time"
|
||||
|
||||
var (
|
||||
testCases = []struct {
|
||||
targetUnit string
|
||||
yAxisUnit string
|
||||
values [][]interface{}
|
||||
expectAlerts int
|
||||
compareOp string
|
||||
matchType string
|
||||
target float64
|
||||
summaryAny []string
|
||||
targetUnit string
|
||||
yAxisUnit string
|
||||
values [][]interface{}
|
||||
metaValues [][]interface{}
|
||||
attrMetaValues [][]interface{}
|
||||
resourceMetaValues [][]interface{}
|
||||
createTableValues [][]interface{}
|
||||
expectAlerts int
|
||||
compareOp string
|
||||
matchType string
|
||||
target float64
|
||||
summaryAny []string
|
||||
}{
|
||||
{
|
||||
targetUnit: "s",
|
||||
@@ -23,10 +27,16 @@ var (
|
||||
{float64(299316000), "attr", time.Now().Add(3 * time.Second)}, // 0.3 seconds
|
||||
{float64(66640400.00000001), "attr", time.Now().Add(4 * time.Second)}, // 0.06 seconds
|
||||
},
|
||||
expectAlerts: 0,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 1, // 1 second
|
||||
metaValues: [][]interface{}{},
|
||||
createTableValues: [][]interface{}{
|
||||
{"statement"},
|
||||
},
|
||||
attrMetaValues: [][]interface{}{},
|
||||
resourceMetaValues: [][]interface{}{},
|
||||
expectAlerts: 0,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 1, // 1 second
|
||||
},
|
||||
{
|
||||
targetUnit: "ms",
|
||||
@@ -38,10 +48,16 @@ var (
|
||||
{float64(299316000), "attr", time.Now().Add(3 * time.Second)}, // 299.31 ms
|
||||
{float64(66640400.00000001), "attr", time.Now().Add(4 * time.Second)}, // 66.64 ms
|
||||
},
|
||||
expectAlerts: 4,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 200, // 200 ms
|
||||
metaValues: [][]interface{}{},
|
||||
createTableValues: [][]interface{}{
|
||||
{"statement"},
|
||||
},
|
||||
attrMetaValues: [][]interface{}{},
|
||||
resourceMetaValues: [][]interface{}{},
|
||||
expectAlerts: 4,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 200, // 200 ms
|
||||
summaryAny: []string{
|
||||
"observed metric value is 299 ms",
|
||||
"the observed metric value is 573 ms",
|
||||
@@ -59,10 +75,16 @@ var (
|
||||
{float64(299316000), "attr", time.Now().Add(3 * time.Second)}, // 0.3 GB
|
||||
{float64(66640400.00000001), "attr", time.Now().Add(4 * time.Second)}, // 66.64 MB
|
||||
},
|
||||
expectAlerts: 0,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 200, // 200 GB
|
||||
metaValues: [][]interface{}{},
|
||||
createTableValues: [][]interface{}{
|
||||
{"statement"},
|
||||
},
|
||||
attrMetaValues: [][]interface{}{},
|
||||
resourceMetaValues: [][]interface{}{},
|
||||
expectAlerts: 0,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 200, // 200 GB
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -325,65 +325,46 @@ func createTelemetry() {
|
||||
if err == nil {
|
||||
dashboardsInfo, err := telemetry.dashboardsInfoCallback(ctx)
|
||||
if err == nil {
|
||||
channels, err := telemetry.reader.GetChannels()
|
||||
savedViewsInfo, err := telemetry.savedViewsInfoCallback(ctx)
|
||||
if err == nil {
|
||||
for _, channel := range *channels {
|
||||
switch channel.Type {
|
||||
case "slack":
|
||||
alertsInfo.SlackChannels++
|
||||
case "webhook":
|
||||
alertsInfo.WebHookChannels++
|
||||
case "pagerduty":
|
||||
alertsInfo.PagerDutyChannels++
|
||||
case "opsgenie":
|
||||
alertsInfo.OpsGenieChannels++
|
||||
case "email":
|
||||
alertsInfo.EmailChannels++
|
||||
case "msteams":
|
||||
alertsInfo.MSTeamsChannels++
|
||||
}
|
||||
dashboardsAlertsData := map[string]interface{}{
|
||||
"totalDashboards": dashboardsInfo.TotalDashboards,
|
||||
"totalDashboardsWithPanelAndName": dashboardsInfo.TotalDashboardsWithPanelAndName,
|
||||
"dashboardNames": dashboardsInfo.DashboardNames,
|
||||
"alertNames": alertsInfo.AlertNames,
|
||||
"logsBasedPanels": dashboardsInfo.LogsBasedPanels,
|
||||
"metricBasedPanels": dashboardsInfo.MetricBasedPanels,
|
||||
"tracesBasedPanels": dashboardsInfo.TracesBasedPanels,
|
||||
"dashboardsWithTSV2": dashboardsInfo.QueriesWithTSV2,
|
||||
"dashboardWithLogsChQuery": dashboardsInfo.DashboardsWithLogsChQuery,
|
||||
"totalAlerts": alertsInfo.TotalAlerts,
|
||||
"alertsWithTSV2": alertsInfo.AlertsWithTSV2,
|
||||
"logsBasedAlerts": alertsInfo.LogsBasedAlerts,
|
||||
"metricBasedAlerts": alertsInfo.MetricBasedAlerts,
|
||||
"tracesBasedAlerts": alertsInfo.TracesBasedAlerts,
|
||||
"totalChannels": alertsInfo.TotalChannels,
|
||||
"totalSavedViews": savedViewsInfo.TotalSavedViews,
|
||||
"logsSavedViews": savedViewsInfo.LogsSavedViews,
|
||||
"tracesSavedViews": savedViewsInfo.TracesSavedViews,
|
||||
"slackChannels": alertsInfo.SlackChannels,
|
||||
"webHookChannels": alertsInfo.WebHookChannels,
|
||||
"pagerDutyChannels": alertsInfo.PagerDutyChannels,
|
||||
"opsGenieChannels": alertsInfo.OpsGenieChannels,
|
||||
"emailChannels": alertsInfo.EmailChannels,
|
||||
"msteamsChannels": alertsInfo.MSTeamsChannels,
|
||||
"metricsBuilderQueries": alertsInfo.MetricsBuilderQueries,
|
||||
"metricsClickHouseQueries": alertsInfo.MetricsClickHouseQueries,
|
||||
"metricsPrometheusQueries": alertsInfo.MetricsPrometheusQueries,
|
||||
"spanMetricsPrometheusQueries": alertsInfo.SpanMetricsPrometheusQueries,
|
||||
"alertsWithLogsChQuery": alertsInfo.AlertsWithLogsChQuery,
|
||||
}
|
||||
savedViewsInfo, err := telemetry.savedViewsInfoCallback(ctx)
|
||||
if err == nil {
|
||||
dashboardsAlertsData := map[string]interface{}{
|
||||
"totalDashboards": dashboardsInfo.TotalDashboards,
|
||||
"totalDashboardsWithPanelAndName": dashboardsInfo.TotalDashboardsWithPanelAndName,
|
||||
"dashboardNames": dashboardsInfo.DashboardNames,
|
||||
"alertNames": alertsInfo.AlertNames,
|
||||
"logsBasedPanels": dashboardsInfo.LogsBasedPanels,
|
||||
"metricBasedPanels": dashboardsInfo.MetricBasedPanels,
|
||||
"tracesBasedPanels": dashboardsInfo.TracesBasedPanels,
|
||||
"dashboardsWithTSV2": dashboardsInfo.QueriesWithTSV2,
|
||||
"dashboardWithLogsChQuery": dashboardsInfo.DashboardsWithLogsChQuery,
|
||||
"totalAlerts": alertsInfo.TotalAlerts,
|
||||
"alertsWithTSV2": alertsInfo.AlertsWithTSV2,
|
||||
"logsBasedAlerts": alertsInfo.LogsBasedAlerts,
|
||||
"metricBasedAlerts": alertsInfo.MetricBasedAlerts,
|
||||
"tracesBasedAlerts": alertsInfo.TracesBasedAlerts,
|
||||
"totalChannels": len(*channels),
|
||||
"totalSavedViews": savedViewsInfo.TotalSavedViews,
|
||||
"logsSavedViews": savedViewsInfo.LogsSavedViews,
|
||||
"tracesSavedViews": savedViewsInfo.TracesSavedViews,
|
||||
"slackChannels": alertsInfo.SlackChannels,
|
||||
"webHookChannels": alertsInfo.WebHookChannels,
|
||||
"pagerDutyChannels": alertsInfo.PagerDutyChannels,
|
||||
"opsGenieChannels": alertsInfo.OpsGenieChannels,
|
||||
"emailChannels": alertsInfo.EmailChannels,
|
||||
"msteamsChannels": alertsInfo.MSTeamsChannels,
|
||||
"metricsBuilderQueries": alertsInfo.MetricsBuilderQueries,
|
||||
"metricsClickHouseQueries": alertsInfo.MetricsClickHouseQueries,
|
||||
"metricsPrometheusQueries": alertsInfo.MetricsPrometheusQueries,
|
||||
"spanMetricsPrometheusQueries": alertsInfo.SpanMetricsPrometheusQueries,
|
||||
"alertsWithLogsChQuery": alertsInfo.AlertsWithLogsChQuery,
|
||||
}
|
||||
// send event only if there are dashboards or alerts or channels
|
||||
if (dashboardsInfo.TotalDashboards > 0 || alertsInfo.TotalAlerts > 0 || len(*channels) > 0 || savedViewsInfo.TotalSavedViews > 0) && apiErr == nil {
|
||||
for _, user := range users {
|
||||
if user.Email == DEFAULT_CLOUD_EMAIL {
|
||||
continue
|
||||
}
|
||||
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, user.Email, false, false)
|
||||
// send event only if there are dashboards or alerts or channels
|
||||
if (dashboardsInfo.TotalDashboards > 0 || alertsInfo.TotalAlerts > 0 || alertsInfo.TotalChannels > 0 || savedViewsInfo.TotalSavedViews > 0) && apiErr == nil {
|
||||
for _, user := range users {
|
||||
if user.Email == DEFAULT_CLOUD_EMAIL {
|
||||
continue
|
||||
}
|
||||
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, user.Email, false, false)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -467,11 +448,9 @@ func getOutboundIP() string {
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
ipBody, err := io.ReadAll(resp.Body)
|
||||
if err == nil {
|
||||
ipBody, err := io.ReadAll(resp.Body)
|
||||
if err == nil {
|
||||
ip = ipBody
|
||||
}
|
||||
ip = ipBody
|
||||
}
|
||||
|
||||
return string(ip)
|
||||
|
||||
@@ -186,7 +186,7 @@ func (tb *FilterSuggestionsTestBed) mockAttribValuesQueryResponse(
|
||||
{Type: "Nullable(Float64)", Name: "float64TagValue"},
|
||||
}
|
||||
|
||||
expectedAttribKeysInQuery := []string{}
|
||||
expectedAttribKeysInQuery := []any{}
|
||||
mockResultRows := [][]any{}
|
||||
for idx, attrib := range expectedAttribs {
|
||||
expectedAttribKeysInQuery = append(expectedAttribKeysInQuery, attrib.Key)
|
||||
@@ -198,8 +198,8 @@ func (tb *FilterSuggestionsTestBed) mockAttribValuesQueryResponse(
|
||||
}
|
||||
|
||||
tb.mockClickhouse.ExpectQuery(
|
||||
"select.*tagKey.*stringTagValue.*int64TagValue.*float64TagValue.*distributed_tag_attributes.*tagKey.*in.*",
|
||||
).WithArgs(expectedAttribKeysInQuery).WillReturnRows(mockhouse.NewRows(resultCols, mockResultRows))
|
||||
"select.*tagKey.*stringTagValue.*int64TagValue.*float64TagValue.*distributed_tag_attributes.*tagKey",
|
||||
).WithArgs(expectedAttribKeysInQuery...).WillReturnRows(mockhouse.NewRows(resultCols, mockResultRows))
|
||||
}
|
||||
|
||||
type FilterSuggestionsTestBed struct {
|
||||
|
||||
1
signoz-core-ui
Submodule
1
signoz-core-ui
Submodule
Submodule signoz-core-ui added at f8c925d842
Reference in New Issue
Block a user