Compare commits

..

43 Commits

Author SHA1 Message Date
Abhishek Kumar Singh
694d9958db improv: integrated origin field extraction and updated tests to check for origin fields 2025-11-18 15:03:24 +05:30
Abhishek Kumar Singh
addee4c0a5 feat: added origin field extractor for ch query 2025-11-18 14:36:03 +05:30
Abhishek Kumar Singh
f10cf7ac04 refactor: code organisation 2025-11-17 16:27:17 +05:30
Abhishek Kumar Singh
b336678639 fix: CH test cases 2025-11-17 15:01:32 +05:30
Abhishek Kumar Singh
c438b3444e refactor: removed GroupBy from FilterResult 2025-11-17 14:34:46 +05:30
Abhishek Kumar Singh
b624414507 feat: extract column origin from subquery and join before searching directly 2025-11-17 13:42:47 +05:30
Abhishek Kumar Singh
bde7963444 feat: implemented extractOriginFromSelectItem which will find the given columnName till the very end to return the origin column with given name 2025-11-17 09:00:18 +05:30
Abhishek Kumar Singh
2df93ff217 feat: extract column origin from query and add in column info 2025-11-16 10:20:38 +05:30
Abhishek Kumar Singh
f496a6ecde improv: updated result for queryfilterextractor to return column with alias 2025-11-16 08:58:33 +05:30
Abhishek Kumar Singh
599e230a72 feat: added NewExtractor function for creating extractor 2025-11-13 13:52:32 +05:30
Abhishek Kumar Singh
9a0e32ff3b refactor: removed redundant non nil checks 2025-11-13 13:41:51 +05:30
Abhishek Kumar Singh
5fe2732698 refactor: removed unused extractFromAnyFunction 2025-11-13 13:20:59 +05:30
Abhishek Kumar Singh
4993a44ecc refactor: removed unused cases + added comments 2025-11-13 12:59:35 +05:30
Abhishek Kumar Singh
ebd575a16b chore: comments + remove usage of seen map in extractGroupFromGroupByClause 2025-11-12 19:26:44 +05:30
Abhishek Kumar Singh
666582337e feat: support for CTE in clickhouse queryfilterextractor 2025-11-12 18:58:30 +05:30
Abhishek Kumar Singh
23512ab05c feat: added support for promql in queryfilterextractor 2025-11-10 20:50:42 +05:30
Abhishek Kumar Singh
1423749529 feat: added filter extractor interface and clickhouse impl with tests 2025-11-10 20:05:39 +05:30
Vikrant Gupta
4437630127 fix(tokenizer): do not retry 401 email_password session request (#9541) 2025-11-10 14:04:16 +00:00
Yunus M
89639b239e feat: convert duration ms to string to be passed to getYAxisFormattedValue (#9539) 2025-11-10 18:03:32 +05:30
Yunus M
785ae9f0bd feat: pass email if username is not set - pylon (#9526) 2025-11-10 17:30:32 +05:30
Abhi kumar
8752022cef fix: updated dashboard panel colors for better contrast ratio (#9500)
* fix: updated dashboard panel colors for better contrast ratio

* chore: preetier fix

* feat: added changes for the tooltip to follow cursor
2025-11-06 17:17:33 +05:30
Aditya Singh
c7e4a9c45d Fix: uplot dense points selection (#9469)
* feat: fix uplot focused series logic selection

* fix: stop propogation only if drilldown enabled

* feat: minor refactor

* feat: minor refactor

* feat: minor refactor

* feat: minor refactor

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-11-06 11:14:02 +00:00
primus-bot[bot]
bf92c92204 chore(release): bump to v0.100.1 (#9499)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-11-06 13:22:09 +05:30
Srikanth Chekuri
bd63633be7 fix: do not format for non aggregation columns (#9492) 2025-11-05 19:24:56 +05:30
Nikhil Mantri
1158e1199b Fix: filter with time in span scope condition builder (#9426) 2025-11-05 13:11:36 +05:30
primus-bot[bot]
0a60c49314 chore(release): bump to v0.100.0 (#9488)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
Co-authored-by: Priyanshu Shrivastava <priyanshu@signoz.io>
2025-11-05 12:06:42 +05:30
Ekansh Gupta
c25e3beb81 feat: changed descirption of span percentile calculation (#9487) 2025-11-05 06:23:24 +00:00
SagarRajput-7
c9e0f2b9ca fix: removed cleanup variable url function to avoid url reseting (#9449) 2025-11-05 00:33:11 +05:30
Abhi kumar
6d831849c1 perf: optimize tooltip plugin with caching, memoization, and improved… (#9421)
* perf: optimize tooltip plugin with caching, memoization, and improved DOM operations

* perf(uplot): optimize tooltip with focused sorting and O(n²) to O(n) reduction

* perf(uplot): optimize threshold rendering with batched canvas operations

* chore: pr review changes

* chore: removed last index check for tooltip generation

* chore: shifted to rendering only one points when hovered

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-11-04 17:34:15 +00:00
aniketio-ctrl
83eeb46f99 feat(sqlstore): added sql formatter for json (#9420)
* chore: added sql formatter for json

* chore: updated json extract columns

* chore: added apend ident

* chore: resolved pr comments

* chore: resolved pr comments

* chore: resolved pr comments

* chore: resolved pr comments

* chore: minor changes

* chore: minor changes

* chore: minor changes

* chore: minor changes

* chore: resolve comments

* chore: added append value

* chore: added append value

* chore: added append value

* chore: added append value

* chore: added append value

* chore: added append value

* chore: added append value

* chore: added append value

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-11-04 22:05:23 +05:30
Shaheer Kochai
287558dc9d refactor: migrate External API's top 10 errors query_range request to v5 (#9476)
* feat: migrate top 10 errors query_range request to v5

* chore: remove unnecessary tests

* chore: improve the top error tests

* fix: send status_message EXISTS only if the toggle is on

* fix: get the count value and simplify the null check

* fix: send has_error = true

* chore: fall back to url.full if url.path doesn't exist

* refactor: address the PR review requested changes

* chore: add test to check if we're sending the correct filters

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2025-11-04 20:09:32 +05:30
Yunus M
83aad793c2 fix: alignment issues in home page (#9459) 2025-11-04 13:13:01 +05:30
Shaheer Kochai
3eff689c85 fix: fix the issue of save button incorrectly enabled when cold_storage_ttl_days is -1 (#9458)
* fix: logs retention save button enabled when S3 disabled

* test: add test for save button state when S3 is disabled
2025-11-04 12:10:17 +05:30
Yunus M
f5bcd65e2e feat: update styles for percentile value (#9477)
* feat: update styles for percentile value

* feat: reset data on span change, remove unnecessary useMemo
2025-11-03 23:40:02 +05:30
Yunus M
e7772d93af fix: flaky multi ingestion settings test (#9478) 2025-11-03 22:21:13 +05:30
swapnil-signoz
bbf987ebd7 fix: removing duplicate creation of user if user does not exist already (#9455)
* fix: removing duplicate creation of user if user does not exist already

* test: adding api test case

* fix: updated test cases

* fix: remove unnecessary logging and clean up connection params API

* feat: add gateway fixture and integrate with signoz for connection parameters

* feat: add cloudintegrations to the test job matrix in integrationci.yaml

* fix: remove outdated comments from make_http_mocks fixture

* fix: remove deprecated ZeusURL from build configurations
2025-11-03 16:45:08 +05:30
Nityananda Gohain
105c3a3b8c fix: return coldstorage -1 if not set for logs (#9471) 2025-11-03 08:10:53 +00:00
Aditya Singh
c1a4a5b8db Log Details minor ui fix (#9463)
* feat: fix copy btn styles

* feat: minor refactor
2025-11-03 11:59:06 +05:30
aniketio-ctrl
c9591f4341 fix: formatted threshold unit in description and summary (#9350) 2025-11-02 14:27:21 +00:00
Yunus M
fd216fdee1 feat(meter): add ability to query meter data across product modules (#9142)
* feat: enable users to query meter specific data in alerts

* feat: enable metrics / meter selection in alerts and dashboards

* feat: enable setting alerts for ingestion limits

* feat: set where clause when setting alert for ingestion key

* feat(meter): handle the where clause changes

* feat: remove add alert for infinite values

* feat: add unit test cases for set alert flow

* feat: handle inital and onchange state for meter source

* feat: pass thresholds array from ingestion settings

* feat: derive source from value change rather than local state

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-11-02 19:02:56 +05:30
Yunus M
f5bf4293a1 feat: span percentile - UI (#9397)
* feat: show span percentile in span details

* feat: resource attribute selection for span percentile

* feat: wait for 2 secs for the first fetch of span percentile

* feat: add unit test cases for span percentiles

* feat: use style tokens

* feat: remove redundant test assertion

* chore: resolve conflicts

* feat: reset initial wait state on span change

* feat: update payload , endpoint as per new backend changes

* feat: address review comments

* feat: fetch span percentile without specific resource attributes - first time
2025-11-01 22:57:36 +05:30
Shaheer Kochai
155a44a25d feat: add support for infra metrics in trace details (#8911)
* feat: add support for infra metrics in trace details v2

* fix: adjust the empty state if the data source is traces

* refactor: logLineTimestamp prop to timestamp

* chore: write tests for span infra metrics

* chore: return search from useLocation mock

* chore: address review changes to move inline options to useMemo

* refactor: simplify infrastructure metadata extraction logic in SpanRelatedSignals

* refactor: extract infrastructure metadata logic into utility function

* test(infraMetrics): club the similar tests

* fix: improve logs and infra tabs switching assertions

* feat: update Infra option icon to Metrics in SpanDetailsDrawer

* chore: change infra to metrics in span details drawer

* fix: fix the failing tests

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2025-11-01 21:26:05 +04:30
Vishal Sharma
4b21c9d5f9 feat: add result count to data source search analytics event (#9444) 2025-10-31 12:35:24 +00:00
141 changed files with 9277 additions and 1639 deletions

View File

@@ -42,7 +42,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.8
container_name: schema-migrator-sync
command:
- sync
@@ -55,7 +55,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.8
container_name: schema-migrator-async
command:
- async

View File

@@ -107,7 +107,6 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'

View File

@@ -106,7 +106,6 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'

View File

@@ -17,6 +17,7 @@ jobs:
- bootstrap
- passwordauthn
- callbackauthn
- cloudintegrations
- querier
- ttl
sqlstore-provider:

View File

@@ -31,7 +31,6 @@ builds:
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
mod_timestamp: "{{ .CommitTimestamp }}"

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.99.0
image: signoz/signoz:v0.100.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -209,7 +209,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.7
image: signoz/signoz-otel-collector:v0.129.8
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -233,7 +233,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.8
deploy:
restart_policy:
condition: on-failure

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.99.0
image: signoz/signoz:v0.100.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -150,7 +150,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.7
image: signoz/signoz-otel-collector:v0.129.8
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -176,7 +176,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.8
deploy:
restart_policy:
condition: on-failure

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.99.0}
image: signoz/signoz:${VERSION:-v0.100.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -213,7 +213,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.8}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -239,7 +239,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
container_name: schema-migrator-sync
command:
- sync
@@ -250,7 +250,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
container_name: schema-migrator-async
command:
- async

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.99.0}
image: signoz/signoz:${VERSION:-v0.100.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.8}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
container_name: schema-migrator-async
command:
- async

View File

@@ -10,7 +10,6 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
@@ -77,7 +76,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
ingestionUrl, signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't deduce ingestion url and signoz api url",
@@ -186,48 +185,37 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
return cloudIntegrationUser, nil
}
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
string, string, *basemodel.ApiError,
) {
url := fmt.Sprintf(
"%s%s",
strings.TrimSuffix(constants.ZeusURL, "/"),
"/v2/deployments/me",
)
// TODO: remove this struct from here
type deploymentResponse struct {
Status string `json:"status"`
Error string `json:"error"`
Data struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
} `json:"data"`
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
}
resp, apiErr := requestAndParseResponse[deploymentResponse](
ctx, url, map[string]string{"X-Signoz-Cloud-Api-Key": licenseKey}, nil,
)
if apiErr != nil {
return "", "", basemodel.WrapApiError(
apiErr, "couldn't query for deployment info",
)
}
if resp.Status != "success" {
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
if err != nil {
return "", "", basemodel.InternalError(fmt.Errorf(
"couldn't query for deployment info: status: %s, error: %s",
resp.Status, resp.Error,
"couldn't query for deployment info: error: %w", err,
))
}
regionDns := resp.Data.ClusterInfo.Region.DNS
deploymentName := resp.Data.Name
resp := new(deploymentResponse)
err = json.Unmarshal(respBytes, resp)
if err != nil {
return "", "", basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal deployment info response: error: %w", err,
))
}
regionDns := resp.ClusterInfo.Region.DNS
deploymentName := resp.Name
if len(regionDns) < 1 || len(deploymentName) < 1 {
// Fail early if actual response structure and expectation here ever diverge

View File

@@ -10,9 +10,6 @@ var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
// this is set via build time variable
var ZeusURL = "https://api.signoz.cloud"
func GetOrDefaultEnv(key string, fallback string) string {
v := os.Getenv(key)
if len(v) == 0 {

View File

@@ -0,0 +1,153 @@
package postgressqlstore
import (
"strings"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun/schema"
)
type formatter struct {
bunf schema.Formatter
}
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
return &formatter{bunf: schema.NewFormatter(dialect)}
}
func (f *formatter) JSONExtractString(column, path string) []byte {
var sql []byte
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgres(path)...)
return sql
}
func (f *formatter) JSONType(column, path string) []byte {
var sql []byte
sql = append(sql, "jsonb_typeof("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONIsArray(column, path string) []byte {
var sql []byte
sql = append(sql, f.JSONType(column, path)...)
sql = append(sql, " = "...)
sql = schema.Append(f.bunf, sql, "array")
return sql
}
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_array_elements("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias)
}
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_array_elements_text("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, append([]byte(alias), "::text"...)
}
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_each("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, append([]byte(alias), ".key"...)
}
func (f *formatter) JSONArrayAgg(expression string) []byte {
var sql []byte
sql = append(sql, "jsonb_agg("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
var sql []byte
sql = append(sql, "jsonb_build_array("...)
for idx, value := range values {
if idx > 0 {
sql = append(sql, ", "...)
}
sql = schema.Append(f.bunf, sql, value)
}
sql = append(sql, ')')
return sql
}
func (f *formatter) TextToJsonColumn(column string) []byte {
var sql []byte
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, "::jsonb"...)
return sql
}
func (f *formatter) convertJSONPathToPostgres(jsonPath string) []byte {
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
}
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) []byte {
path := strings.TrimPrefix(strings.TrimPrefix(jsonPath, "$"), ".")
if path == "" {
return nil
}
parts := strings.Split(path, ".")
var validParts []string
for _, part := range parts {
if part != "" {
validParts = append(validParts, part)
}
}
if len(validParts) == 0 {
return nil
}
var result []byte
for idx, part := range validParts {
if idx == len(validParts)-1 {
if asText {
result = append(result, "->>"...)
} else {
result = append(result, "->"...)
}
result = schema.Append(f.bunf, result, part)
return result
}
result = append(result, "->"...)
result = schema.Append(f.bunf, result, part)
}
return result
}
func (f *formatter) LowerExpression(expression string) []byte {
var sql []byte
sql = append(sql, "lower("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}

View File

@@ -0,0 +1,500 @@
package postgressqlstore
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/uptrace/bun/dialect/pgdialect"
)
func TestJSONExtractString(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `"data"->>'field'`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.name",
expected: `"metadata"->'user'->>'name'`,
},
{
name: "deeply nested path",
column: "json_col",
path: "$.level1.level2.level3",
expected: `"json_col"->'level1'->'level2'->>'level3'`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `"json_col"`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `"data"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONExtractString(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONType(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `jsonb_typeof("data"->'field')`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.age",
expected: `jsonb_typeof("metadata"->'user'->'age')`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `jsonb_typeof("json_col")`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `jsonb_typeof("data")`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONType(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONIsArray(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.items",
expected: `jsonb_typeof("data"->'items') = 'array'`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.tags",
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `jsonb_typeof("json_col") = 'array'`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `jsonb_typeof("data") = 'array'`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONIsArray(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayElements(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "elem",
expected: `jsonb_array_elements("data") AS "elem"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "elem",
expected: `jsonb_array_elements("data") AS "elem"`,
},
{
name: "nested path",
column: "metadata",
path: "$.items",
alias: "item",
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
},
{
name: "deeply nested path",
column: "json_col",
path: "$.user.tags",
alias: "tag",
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayOfStrings(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "str",
expected: `jsonb_array_elements_text("data") AS "str"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "str",
expected: `jsonb_array_elements_text("data") AS "str"`,
},
{
name: "nested path",
column: "metadata",
path: "$.strings",
alias: "s",
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONKeys(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "k",
expected: `jsonb_each("data") AS "k"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "k",
expected: `jsonb_each("data") AS "k"`,
},
{
name: "nested path",
column: "metadata",
path: "$.object",
alias: "key",
expected: `jsonb_each("metadata"->'object') AS "key"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayAgg(t *testing.T) {
tests := []struct {
name string
expression string
expected string
}{
{
name: "simple column",
expression: "id",
expected: "jsonb_agg(id)",
},
{
name: "expression with function",
expression: "DISTINCT name",
expected: "jsonb_agg(DISTINCT name)",
},
{
name: "complex expression",
expression: "data->>'field'",
expected: "jsonb_agg(data->>'field')",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONArrayAgg(tt.expression))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayLiteral(t *testing.T) {
tests := []struct {
name string
values []string
expected string
}{
{
name: "empty array",
values: []string{},
expected: "jsonb_build_array()",
},
{
name: "single value",
values: []string{"value1"},
expected: "jsonb_build_array('value1')",
},
{
name: "multiple values",
values: []string{"value1", "value2", "value3"},
expected: "jsonb_build_array('value1', 'value2', 'value3')",
},
{
name: "values with special characters",
values: []string{"test", "with space", "with-dash"},
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONArrayLiteral(tt.values...))
assert.Equal(t, tt.expected, got)
})
}
}
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
tests := []struct {
name string
jsonPath string
asText bool
expected string
}{
{
name: "simple path as text",
jsonPath: "$.field",
asText: true,
expected: "->>'field'",
},
{
name: "simple path as json",
jsonPath: "$.field",
asText: false,
expected: "->'field'",
},
{
name: "nested path as text",
jsonPath: "$.user.name",
asText: true,
expected: "->'user'->>'name'",
},
{
name: "nested path as json",
jsonPath: "$.user.name",
asText: false,
expected: "->'user'->'name'",
},
{
name: "deeply nested as text",
jsonPath: "$.a.b.c.d",
asText: true,
expected: "->'a'->'b'->'c'->>'d'",
},
{
name: "root path",
jsonPath: "$",
asText: true,
expected: "",
},
{
name: "empty path",
jsonPath: "",
asText: true,
expected: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New()).(*formatter)
got := string(f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText))
assert.Equal(t, tt.expected, got)
})
}
}
func TestTextToJsonColumn(t *testing.T) {
tests := []struct {
name string
column string
expected string
}{
{
name: "simple column name",
column: "data",
expected: `"data"::jsonb`,
},
{
name: "column with underscore",
column: "user_data",
expected: `"user_data"::jsonb`,
},
{
name: "column with special characters",
column: "json-col",
expected: `"json-col"::jsonb`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.TextToJsonColumn(tt.column))
assert.Equal(t, tt.expected, got)
})
}
}
func TestLowerExpression(t *testing.T) {
tests := []struct {
name string
expr string
expected string
}{
{
name: "simple column name",
expr: "name",
expected: "lower(name)",
},
{
name: "quoted column identifier",
expr: `"column_name"`,
expected: `lower("column_name")`,
},
{
name: "jsonb text extraction",
expr: "data->>'field'",
expected: "lower(data->>'field')",
},
{
name: "nested jsonb extraction",
expr: "metadata->'user'->>'name'",
expected: "lower(metadata->'user'->>'name')",
},
{
name: "jsonb_typeof expression",
expr: "jsonb_typeof(data->'field')",
expected: "lower(jsonb_typeof(data->'field'))",
},
{
name: "string concatenation",
expr: "first_name || ' ' || last_name",
expected: "lower(first_name || ' ' || last_name)",
},
{
name: "CAST expression",
expr: "CAST(value AS TEXT)",
expected: "lower(CAST(value AS TEXT))",
},
{
name: "COALESCE expression",
expr: "COALESCE(name, 'default')",
expected: "lower(COALESCE(name, 'default'))",
},
{
name: "subquery column",
expr: "users.email",
expected: "lower(users.email)",
},
{
name: "quoted identifier with special chars",
expr: `"user-name"`,
expected: `lower("user-name")`,
},
{
name: "jsonb to text cast",
expr: "data::text",
expected: "lower(data::text)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.LowerExpression(tt.expr))
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -15,10 +15,11 @@ import (
)
type provider struct {
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
formatter sqlstore.SQLFormatter
}
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
@@ -55,11 +56,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
sqldb := stdlib.OpenDBFromPool(pool)
pgDialect := pgdialect.New()
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
return &provider{
settings: settings,
sqldb: sqldb,
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
dialect: new(dialect),
settings: settings,
sqldb: sqldb,
bundb: bunDB,
dialect: new(dialect),
formatter: newFormatter(bunDB.Dialect()),
}, nil
}
@@ -75,6 +79,10 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}
func (provider *provider) Formatter() sqlstore.SQLFormatter {
return provider.formatter
}
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
return provider.bundb.BunDBCtx(ctx)
}

View File

@@ -274,7 +274,7 @@ function App(): JSX.Element {
chat_settings: {
app_id: process.env.PYLON_APP_ID,
email: user.email,
name: user.displayName,
name: user.displayName || user.email,
},
};
}

View File

@@ -86,8 +86,9 @@ const interceptorRejected = async (
if (
response.status === 401 &&
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
// if the session rotate call or the create session errors out with 401 or the delete sessions call returns 401 then we do not retry!
response.config.url !== '/sessions/rotate' &&
response.config.url !== '/sessions/email_password' &&
!(
response.config.url === '/sessions' && response.config.method === 'delete'
)

View File

@@ -0,0 +1,28 @@
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import {
GetSpanPercentilesProps,
GetSpanPercentilesResponseDataProps,
} from 'types/api/trace/getSpanPercentiles';
const getSpanPercentiles = async (
props: GetSpanPercentilesProps,
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
try {
const response = await ApiBaseInstance.post('/span_percentile', {
...props,
});
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};
export default getSpanPercentiles;

View File

@@ -11,7 +11,7 @@ import {
export const getQueryRangeV5 = async (
props: QueryRangePayloadV5,
version: string,
signal: AbortSignal,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
try {

View File

@@ -132,9 +132,9 @@
justify-content: center;
}
.json-action-btn {
.log-detail-drawer__actions {
display: flex;
gap: 8px;
gap: 4px;
}
}

View File

@@ -319,31 +319,35 @@ function LogDetailInner({
</Radio.Button>
</Radio.Group>
{selectedView === VIEW_TYPES.JSON && (
<div className="json-action-btn">
<div className="log-detail-drawer__actions">
{selectedView === VIEW_TYPES.CONTEXT && (
<Tooltip
title="Show Filters"
placement="topLeft"
aria-label="Show Filters"
>
<Button
className="action-btn"
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
</Tooltip>
)}
<Tooltip
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
placement="topLeft"
aria-label={
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
}
>
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={handleJSONCopy}
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
/>
</div>
)}
{selectedView === VIEW_TYPES.CONTEXT && (
<Button
className="action-btn"
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
)}
<Tooltip title="Copy Log Link" placement="left" aria-label="Copy Log Link">
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={onLogCopy}
/>
</Tooltip>
</Tooltip>
</div>
</div>
{isFilterVisible && contextQuery?.builder.queryData[0] && (
<div className="log-detail-drawer-query-container">
@@ -383,7 +387,8 @@ function LogDetailInner({
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
logLineTimestamp={log.timestamp.toString()}
timestamp={log.timestamp.toString()}
dataSource={DataSource.LOGS}
/>
)}
</Drawer>

View File

@@ -398,7 +398,7 @@
}
.qb-search-container {
.metrics-select-container {
.metrics-container {
margin-bottom: 12px;
}
}

View File

@@ -22,6 +22,8 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
showOnlyWhereClause = false,
showTraceOperator = false,
version,
onSignalSourceChange,
signalSourceChangeEnabled = false,
}: QueryBuilderProps): JSX.Element {
const {
currentQuery,
@@ -175,6 +177,8 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
queryVariant={config?.queryVariant || 'dropdown'}
showOnlyWhereClause={showOnlyWhereClause}
isListViewPanel={isListViewPanel}
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
signalSourceChangeEnabled={signalSourceChangeEnabled}
/>
) : (
currentQuery.builder.queryData.map((query, index) => (
@@ -193,7 +197,9 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
queryVariant={config?.queryVariant || 'dropdown'}
showOnlyWhereClause={showOnlyWhereClause}
isListViewPanel={isListViewPanel}
signalSource={config?.signalSource || ''}
signalSource={query.source as 'meter' | ''}
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
signalSourceChangeEnabled={signalSourceChangeEnabled}
/>
))
)}

View File

@@ -1,5 +1,14 @@
.metrics-select-container {
.metrics-source-select-container {
margin-bottom: 8px;
display: flex;
flex-direction: row;
align-items: flex-start;
gap: 8px;
width: 100%;
.source-selector {
width: 120px;
}
.ant-select-selector {
width: 100%;
@@ -42,7 +51,7 @@
}
.lightMode {
.metrics-select-container {
.metrics-source-select-container {
.ant-select-selector {
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100);

View File

@@ -1,21 +1,39 @@
import './MetricsSelect.styles.scss';
import { Select } from 'antd';
import {
initialQueriesMap,
initialQueryMeterWithType,
PANEL_TYPES,
} from 'constants/queryBuilder';
import { AggregatorFilter } from 'container/QueryBuilder/filters';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
import { memo, useCallback, useState } from 'react';
import { memo, useCallback, useMemo, useState } from 'react';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import { SelectOption } from 'types/common/select';
export const SOURCE_OPTIONS: SelectOption<string, string>[] = [
{ value: 'metrics', label: 'Metrics' },
{ value: 'meter', label: 'Meter' },
];
export const MetricsSelect = memo(function MetricsSelect({
query,
index,
version,
signalSource,
onSignalSourceChange,
signalSourceChangeEnabled = false,
}: {
query: IBuilderQuery;
index: number;
version: string;
signalSource: 'meter' | '';
onSignalSourceChange: (value: string) => void;
signalSourceChangeEnabled: boolean;
}): JSX.Element {
const [attributeKeys, setAttributeKeys] = useState<BaseAutocompleteData[]>([]);
@@ -31,8 +49,67 @@ export const MetricsSelect = memo(function MetricsSelect({
},
[handleChangeAggregatorAttribute, attributeKeys],
);
const { updateAllQueriesOperators, handleSetQueryData } = useQueryBuilder();
const source = useMemo(
() => (signalSource === 'meter' ? 'meter' : 'metrics'),
[signalSource],
);
const defaultMeterQuery = useMemo(
() =>
updateAllQueriesOperators(
initialQueryMeterWithType,
PANEL_TYPES.BAR,
DataSource.METRICS,
'meter' as 'meter' | '',
),
[updateAllQueriesOperators],
);
const defaultMetricsQuery = useMemo(
() =>
updateAllQueriesOperators(
initialQueriesMap.metrics,
PANEL_TYPES.BAR,
DataSource.METRICS,
'',
),
[updateAllQueriesOperators],
);
const handleSignalSourceChange = (value: string): void => {
onSignalSourceChange(value);
handleSetQueryData(
index,
value === 'meter'
? {
...defaultMeterQuery.builder.queryData[0],
source: 'meter',
queryName: query.queryName,
}
: {
...defaultMetricsQuery.builder.queryData[0],
source: '',
queryName: query.queryName,
},
);
};
return (
<div className="metrics-select-container">
<div className="metrics-source-select-container">
{signalSourceChangeEnabled && (
<Select
className="source-selector"
placeholder="Source"
options={SOURCE_OPTIONS}
value={source}
defaultValue="metrics"
onChange={handleSignalSourceChange}
/>
)}
<AggregatorFilter
onChange={handleAggregatorAttributeChange}
query={query}

View File

@@ -33,7 +33,13 @@ export const QueryV2 = memo(function QueryV2({
showOnlyWhereClause = false,
signalSource = '',
isMultiQueryAllowed = false,
}: QueryProps & { ref: React.RefObject<HTMLDivElement> }): JSX.Element {
onSignalSourceChange,
signalSourceChangeEnabled = false,
}: QueryProps & {
ref: React.RefObject<HTMLDivElement>;
onSignalSourceChange: (value: string) => void;
signalSourceChangeEnabled: boolean;
}): JSX.Element {
const { cloneQuery, panelType } = useQueryBuilder();
const showFunctions = query?.functions?.length > 0;
@@ -207,12 +213,14 @@ export const QueryV2 = memo(function QueryV2({
<div className="qb-elements-container">
<div className="qb-search-container">
{dataSource === DataSource.METRICS && (
<div className="metrics-select-container">
<div className="metrics-container">
<MetricsSelect
query={query}
index={index}
version={ENTITY_VERSION_V5}
signalSource={signalSource as 'meter' | ''}
onSignalSourceChange={onSignalSourceChange}
signalSourceChangeEnabled={signalSourceChangeEnabled}
/>
</div>
)}
@@ -258,7 +266,7 @@ export const QueryV2 = memo(function QueryV2({
panelType={panelType}
query={query}
index={index}
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}`}
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}-${signalSource}`}
version="v4"
signalSource={signalSource as 'meter' | ''}
/>

View File

@@ -24,6 +24,7 @@ export const DATE_TIME_FORMATS = {
TIME_SECONDS: 'HH:mm:ss',
TIME_UTC: 'HH:mm:ss (UTC Z)',
TIME_UTC_MS: 'HH:mm:ss.SSS (UTC Z)',
TIME_SPAN_PERCENTILE: 'HH:mm:ss MMM DD',
// Short date formats
DATE_SHORT: 'MM/DD',

View File

@@ -90,4 +90,7 @@ export const REACT_QUERY_KEY = {
// Routing Policies Query Keys
GET_ROUTING_POLICIES: 'GET_ROUTING_POLICIES',
// Span Percentiles Query Keys
GET_SPAN_PERCENTILES: 'GET_SPAN_PERCENTILES',
} as const;

View File

@@ -3,4 +3,5 @@ export const USER_PREFERENCES = {
NAV_SHORTCUTS: 'nav_shortcuts',
LAST_SEEN_CHANGELOG_VERSION: 'last_seen_changelog_version',
SPAN_DETAILS_PINNED_ATTRIBUTES: 'span_details_pinned_attributes',
SPAN_PERCENTILE_RESOURCE_ATTRIBUTES: 'span_percentile_resource_attributes',
};

View File

@@ -1,8 +1,10 @@
import { LoadingOutlined } from '@ant-design/icons';
import { Spin, Switch, Table, Tooltip, Typography } from 'antd';
import { getQueryRangeV5 } from 'api/v5/queryRange/getQueryRange';
import { MetricRangePayloadV5, ScalarData } from 'api/v5/v5';
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
import { withErrorBoundary } from 'components/ErrorBoundaryHOC';
import { DEFAULT_ENTITY_VERSION, ENTITY_VERSION_V4 } from 'constants/app';
import { ENTITY_VERSION_V4, ENTITY_VERSION_V5 } from 'constants/app';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import {
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
@@ -11,13 +13,12 @@ import {
getTopErrorsColumnsConfig,
getTopErrorsCoRelationQueryFilters,
getTopErrorsQueryPayload,
TopErrorsResponseRow,
} from 'container/ApiMonitoring/utils';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { Info } from 'lucide-react';
import { useMemo, useState } from 'react';
import { useQueries } from 'react-query';
import { SuccessResponse } from 'types/api';
import { QueryFunctionContext, useQueries, useQuery } from 'react-query';
import { SuccessResponse, SuccessResponseV2 } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
@@ -46,7 +47,7 @@ function TopErrors({
true,
);
const queryPayloads = useMemo(
const queryPayload = useMemo(
() =>
getTopErrorsQueryPayload(
domainName,
@@ -82,37 +83,34 @@ function TopErrors({
],
);
const topErrorsDataQueries = useQueries(
queryPayloads.map((payload) => ({
queryKey: [
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
payload,
DEFAULT_ENTITY_VERSION,
showStatusCodeErrors,
],
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
GetMetricQueryRange(payload, DEFAULT_ENTITY_VERSION),
enabled: !!payload,
staleTime: 0,
cacheTime: 0,
})),
);
const topErrorsDataQuery = topErrorsDataQueries[0];
const {
data: topErrorsData,
isLoading,
isRefetching,
isError,
refetch,
} = topErrorsDataQuery;
} = useQuery({
queryKey: [
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
queryPayload,
ENTITY_VERSION_V5,
showStatusCodeErrors,
],
queryFn: ({
signal,
}: QueryFunctionContext): Promise<SuccessResponseV2<MetricRangePayloadV5>> =>
getQueryRangeV5(queryPayload, ENTITY_VERSION_V5, signal),
enabled: !!queryPayload,
staleTime: 0,
cacheTime: 0,
});
const topErrorsColumnsConfig = useMemo(() => getTopErrorsColumnsConfig(), []);
const formattedTopErrorsData = useMemo(
() =>
formatTopErrorsDataForTable(
topErrorsData?.payload?.data?.result as TopErrorsResponseRow[],
topErrorsData?.data?.data?.data?.results[0] as ScalarData,
),
[topErrorsData],
);

View File

@@ -8,7 +8,6 @@ import {
endPointStatusCodeColumns,
extractPortAndEndpoint,
formatDataForTable,
formatTopErrorsDataForTable,
getAllEndpointsWidgetData,
getCustomFiltersForBarChart,
getEndPointDetailsQueryPayload,
@@ -23,8 +22,6 @@ import {
getStatusCodeBarChartWidgetData,
getTopErrorsColumnsConfig,
getTopErrorsCoRelationQueryFilters,
getTopErrorsQueryPayload,
TopErrorsResponseRow,
} from '../utils';
import { APIMonitoringColumnsMock } from './mock';
@@ -344,49 +341,6 @@ describe('API Monitoring Utils', () => {
});
});
describe('formatTopErrorsDataForTable', () => {
it('should format top errors data correctly', () => {
// Arrange
const inputData = [
{
metric: {
[SPAN_ATTRIBUTES.URL_PATH]: '/api/test',
[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]: '500',
status_message: 'Internal Server Error',
},
values: [[1000000100, '10']],
queryName: 'A',
legend: 'Test Legend',
},
];
// Act
const result = formatTopErrorsDataForTable(
inputData as TopErrorsResponseRow[],
);
// Assert
expect(result).toBeDefined();
expect(result.length).toBe(1);
// Check first item is formatted correctly
expect(result[0].endpointName).toBe('/api/test');
expect(result[0].statusCode).toBe('500');
expect(result[0].statusMessage).toBe('Internal Server Error');
expect(result[0].count).toBe('10');
expect(result[0].key).toBeDefined();
});
it('should handle empty input', () => {
// Act
const result = formatTopErrorsDataForTable(undefined);
// Assert
expect(result).toBeDefined();
expect(result).toEqual([]);
});
});
describe('getTopErrorsColumnsConfig', () => {
it('should return column configuration with expected fields', () => {
// Act
@@ -453,72 +407,6 @@ describe('API Monitoring Utils', () => {
});
});
describe('getTopErrorsQueryPayload', () => {
it('should create correct query payload with filters', () => {
// Arrange
const domainName = 'test-domain';
const start = 1000000000;
const end = 1000010000;
const filters = {
items: [
{
id: 'test-filter',
key: {
dataType: DataTypes.String,
key: 'test-key',
type: '',
},
op: '=',
value: 'test-value',
},
],
op: 'AND',
};
// Act
const result = getTopErrorsQueryPayload(
domainName,
start,
end,
filters as IBuilderQuery['filters'],
);
// Assert
expect(result).toBeDefined();
expect(result.length).toBeGreaterThan(0);
// Verify query params
expect(result[0].start).toBe(start);
expect(result[0].end).toBe(end);
// Verify correct structure
expect(result[0].graphType).toBeDefined();
expect(result[0].query).toBeDefined();
expect(result[0].query.builder).toBeDefined();
expect(result[0].query.builder.queryData).toBeDefined();
// Verify domain filter is included
const queryData = result[0].query.builder.queryData[0];
expect(queryData.filters).toBeDefined();
// Check for domain filter
const domainFilter = queryData.filters?.items?.find(
// eslint-disable-next-line sonarjs/no-identical-functions
(item) =>
item.key &&
item.key.key === SPAN_ATTRIBUTES.SERVER_NAME &&
item.value === domainName,
);
expect(domainFilter).toBeDefined();
// Check that custom filters were included
const testFilter = queryData.filters?.items?.find(
(item) => item.id === 'test-filter',
);
expect(testFilter).toBeDefined();
});
});
// Add new tests for EndPointDetails utility functions
describe('extractPortAndEndpoint', () => {
it('should extract port and endpoint from a valid URL', () => {

View File

@@ -1,14 +1,6 @@
import { fireEvent, render, screen, within } from '@testing-library/react';
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import {
formatTopErrorsDataForTable,
getEndPointDetailsQueryPayload,
getTopErrorsColumnsConfig,
getTopErrorsCoRelationQueryFilters,
getTopErrorsQueryPayload,
} from 'container/ApiMonitoring/utils';
import { useQueries } from 'react-query';
import { rest, server } from 'mocks-server/server';
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
import { DataSource } from 'types/common/queryBuilder';
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
@@ -35,28 +27,15 @@ jest.mock(
}),
);
// Mock dependencies
jest.mock('react-query', () => ({
...jest.requireActual('react-query'),
useQueries: jest.fn(),
}));
jest.mock('components/CeleryTask/useNavigateToExplorer', () => ({
useNavigateToExplorer: jest.fn(),
}));
jest.mock('container/ApiMonitoring/utils', () => ({
END_POINT_DETAILS_QUERY_KEYS_ARRAY: ['key1', 'key2', 'key3', 'key4', 'key5'],
formatTopErrorsDataForTable: jest.fn(),
getEndPointDetailsQueryPayload: jest.fn(),
getTopErrorsColumnsConfig: jest.fn(),
getTopErrorsCoRelationQueryFilters: jest.fn(),
getTopErrorsQueryPayload: jest.fn(),
}));
describe('TopErrors', () => {
const TABLE_BODY_SELECTOR = '.ant-table-tbody';
const V5_QUERY_RANGE_API_PATH = '*/api/v5/query_range';
const mockProps = {
// eslint-disable-next-line sonarjs/no-duplicate-string
domainName: 'test-domain',
timeRange: {
startTime: 1000000000,
@@ -68,75 +47,72 @@ describe('TopErrors', () => {
},
};
// Setup basic mocks
// Helper function to wait for table data to load
const waitForTableDataToLoad = async (
container: HTMLElement,
): Promise<void> => {
await waitFor(() => {
const tableBody = container.querySelector(TABLE_BODY_SELECTOR);
expect(tableBody).not.toBeNull();
if (tableBody) {
expect(
within(tableBody as HTMLElement).queryByText('/api/test'),
).toBeInTheDocument();
}
});
};
beforeEach(() => {
jest.clearAllMocks();
// Mock getTopErrorsColumnsConfig
(getTopErrorsColumnsConfig as jest.Mock).mockReturnValue([
{
title: 'Endpoint',
dataIndex: 'endpointName',
key: 'endpointName',
},
{
title: 'Status Code',
dataIndex: 'statusCode',
key: 'statusCode',
},
{
title: 'Status Message',
dataIndex: 'statusMessage',
key: 'statusMessage',
},
{
title: 'Count',
dataIndex: 'count',
key: 'count',
},
]);
// Mock useNavigateToExplorer
(useNavigateToExplorer as jest.Mock).mockReturnValue(jest.fn());
// Mock useQueries
(useQueries as jest.Mock).mockImplementation((queryConfigs) => {
// For topErrorsDataQueries
if (
queryConfigs.length === 1 &&
queryConfigs[0].queryKey &&
queryConfigs[0].queryKey[0] === REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN
) {
return [
{
// Mock V5 API endpoint for top errors
server.use(
rest.post(V5_QUERY_RANGE_API_PATH, (_req, res, ctx) =>
res(
ctx.status(200),
ctx.json({
data: {
payload: {
data: {
result: [
{
metric: {
'http.url': '/api/test',
status_code: '500',
// eslint-disable-next-line sonarjs/no-duplicate-string
status_message: 'Internal Server Error',
data: {
results: [
{
columns: [
{
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
values: [[1000000100, '10']],
queryName: 'A',
legend: 'Test Legend',
},
],
},
{
name: 'response_status_code',
fieldDataType: 'string',
fieldContext: 'span',
},
{
name: 'status_message',
fieldDataType: 'string',
fieldContext: 'span',
},
{ name: 'count()', fieldDataType: 'int64', fieldContext: '' },
],
// eslint-disable-next-line sonarjs/no-duplicate-string
data: [['/api/test', '500', 'Internal Server Error', 10]],
},
],
},
},
isLoading: false,
isRefetching: false,
isError: false,
refetch: jest.fn(),
},
];
}
}),
),
),
);
// For endPointDropDownDataQueries
return [
{
data: {
// Mock V4 API endpoint for dropdown data
server.use(
rest.post('*/api/v1/query_range', (_req, res, ctx) =>
res(
ctx.status(200),
ctx.json({
payload: {
data: {
result: [
@@ -153,62 +129,13 @@ describe('TopErrors', () => {
],
},
},
},
isLoading: false,
isRefetching: false,
isError: false,
},
];
});
// Mock formatTopErrorsDataForTable
(formatTopErrorsDataForTable as jest.Mock).mockReturnValue([
{
key: '1',
endpointName: '/api/test',
statusCode: '500',
statusMessage: 'Internal Server Error',
count: 10,
},
]);
// Mock getTopErrorsQueryPayload
(getTopErrorsQueryPayload as jest.Mock).mockReturnValue([
{
queryName: 'TopErrorsQuery',
start: mockProps.timeRange.startTime,
end: mockProps.timeRange.endTime,
step: 60,
},
]);
// Mock getEndPointDetailsQueryPayload
(getEndPointDetailsQueryPayload as jest.Mock).mockReturnValue([
{},
{},
{
queryName: 'EndpointDropdownQuery',
start: mockProps.timeRange.startTime,
end: mockProps.timeRange.endTime,
step: 60,
},
]);
// Mock useNavigateToExplorer
(useNavigateToExplorer as jest.Mock).mockReturnValue(jest.fn());
// Mock getTopErrorsCoRelationQueryFilters
(getTopErrorsCoRelationQueryFilters as jest.Mock).mockReturnValue({
items: [
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
],
op: 'AND',
});
}),
),
),
);
});
it('renders component correctly', () => {
it('renders component correctly', async () => {
// eslint-disable-next-line react/jsx-props-no-spreading
const { container } = render(<TopErrors {...mockProps} />);
@@ -216,10 +143,11 @@ describe('TopErrors', () => {
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
expect(screen.getByText('Status Message Exists')).toBeInTheDocument();
// Find the table row and verify content
const tableBody = container.querySelector('.ant-table-tbody');
expect(tableBody).not.toBeNull();
// Wait for data to load
await waitForTableDataToLoad(container);
// Find the table row and verify content
const tableBody = container.querySelector(TABLE_BODY_SELECTOR);
if (tableBody) {
const row = within(tableBody as HTMLElement).getByRole('row');
expect(within(row).getByText('/api/test')).toBeInTheDocument();
@@ -228,35 +156,40 @@ describe('TopErrors', () => {
}
});
it('renders error state when isError is true', () => {
// Mock useQueries to return isError: true
(useQueries as jest.Mock).mockImplementationOnce(() => [
{
isError: true,
refetch: jest.fn(),
},
]);
it('renders error state when API fails', async () => {
// Mock API to return error
server.use(
rest.post(V5_QUERY_RANGE_API_PATH, (_req, res, ctx) =>
res(ctx.status(500), ctx.json({ error: 'Internal Server Error' })),
),
);
// eslint-disable-next-line react/jsx-props-no-spreading
render(<TopErrors {...mockProps} />);
// Error state should be shown with the actual text displayed in the UI
expect(
screen.getByText('Uh-oh :/ We ran into an error.'),
).toBeInTheDocument();
// Wait for error state
await waitFor(() => {
expect(
screen.getByText('Uh-oh :/ We ran into an error.'),
).toBeInTheDocument();
});
expect(screen.getByText('Please refresh this panel.')).toBeInTheDocument();
expect(screen.getByText('Refresh this panel')).toBeInTheDocument();
});
it('handles row click correctly', () => {
it('handles row click correctly', async () => {
const navigateMock = jest.fn();
(useNavigateToExplorer as jest.Mock).mockReturnValue(navigateMock);
// eslint-disable-next-line react/jsx-props-no-spreading
const { container } = render(<TopErrors {...mockProps} />);
// Wait for data to load
await waitForTableDataToLoad(container);
// Find and click on the table cell containing the endpoint
const tableBody = container.querySelector('.ant-table-tbody');
const tableBody = container.querySelector(TABLE_BODY_SELECTOR);
expect(tableBody).not.toBeNull();
if (tableBody) {
@@ -267,11 +200,28 @@ describe('TopErrors', () => {
// Check if navigateToExplorer was called with correct params
expect(navigateMock).toHaveBeenCalledWith({
filters: [
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
],
filters: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: 'http.url' }),
op: '=',
value: '/api/test',
}),
expect.objectContaining({
key: expect.objectContaining({ key: 'has_error' }),
op: '=',
value: 'true',
}),
expect.objectContaining({
key: expect.objectContaining({ key: 'net.peer.name' }),
op: '=',
value: 'test-domain',
}),
expect.objectContaining({
key: expect.objectContaining({ key: 'response_status_code' }),
op: '=',
value: '500',
}),
]),
dataSource: DataSource.TRACES,
startTime: mockProps.timeRange.startTime,
endTime: mockProps.timeRange.endTime,
@@ -279,24 +229,34 @@ describe('TopErrors', () => {
});
});
it('updates endpoint filter when dropdown value changes', () => {
it('updates endpoint filter when dropdown value changes', async () => {
// eslint-disable-next-line react/jsx-props-no-spreading
render(<TopErrors {...mockProps} />);
// Wait for initial load
await waitFor(() => {
expect(screen.getByRole('combobox')).toBeInTheDocument();
});
// Find the dropdown
const dropdown = screen.getByRole('combobox');
// Mock the change
fireEvent.change(dropdown, { target: { value: '/api/new-endpoint' } });
// Check if getTopErrorsQueryPayload was called with updated parameters
expect(getTopErrorsQueryPayload).toHaveBeenCalled();
// Component should re-render with new filter
expect(dropdown).toBeInTheDocument();
});
it('handles status message toggle correctly', () => {
it('handles status message toggle correctly', async () => {
// eslint-disable-next-line react/jsx-props-no-spreading
render(<TopErrors {...mockProps} />);
// Wait for initial load
await waitFor(() => {
expect(screen.getByRole('switch')).toBeInTheDocument();
});
// Find the toggle switch
const toggle = screen.getByRole('switch');
expect(toggle).toBeInTheDocument();
@@ -307,69 +267,104 @@ describe('TopErrors', () => {
// Click the toggle to turn it off
fireEvent.click(toggle);
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=false
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
mockProps.domainName,
mockProps.timeRange.startTime,
mockProps.timeRange.endTime,
expect.any(Object),
false,
);
// Title should change
expect(screen.getByText('All Errors')).toBeInTheDocument();
await waitFor(() => {
expect(screen.getByText('All Errors')).toBeInTheDocument();
});
// Click the toggle to turn it back on
fireEvent.click(toggle);
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=true
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
mockProps.domainName,
mockProps.timeRange.startTime,
mockProps.timeRange.endTime,
expect.any(Object),
true,
);
// Title should change back
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
await waitFor(() => {
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
});
});
it('includes toggle state in query key for cache busting', () => {
it('includes toggle state in query key for cache busting', async () => {
// eslint-disable-next-line react/jsx-props-no-spreading
render(<TopErrors {...mockProps} />);
const toggle = screen.getByRole('switch');
// Wait for initial load
await waitFor(() => {
expect(screen.getByRole('switch')).toBeInTheDocument();
});
// Initial query should include showStatusCodeErrors=true
expect(useQueries).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({
queryKey: expect.arrayContaining([
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
expect.any(Object),
expect.any(String),
true,
]),
}),
]),
);
const toggle = screen.getByRole('switch');
// Click toggle
fireEvent.click(toggle);
// Query should be called with showStatusCodeErrors=false in key
expect(useQueries).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({
queryKey: expect.arrayContaining([
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
expect.any(Object),
expect.any(String),
false,
]),
}),
]),
// Wait for title to change, indicating query was refetched with new key
await waitFor(() => {
expect(screen.getByText('All Errors')).toBeInTheDocument();
});
// The fact that data refetches when toggle changes proves the query key includes the toggle state
expect(toggle).toBeInTheDocument();
});
it('sends query_range v5 API call with required filters including has_error', async () => {
let capturedRequest: any;
// Override the v5 API mock to capture the request
server.use(
rest.post(V5_QUERY_RANGE_API_PATH, async (req, res, ctx) => {
capturedRequest = await req.json();
return res(
ctx.status(200),
ctx.json({
data: {
data: {
results: [
{
columns: [
{
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
{
name: 'response_status_code',
fieldDataType: 'string',
fieldContext: 'span',
},
{
name: 'status_message',
fieldDataType: 'string',
fieldContext: 'span',
},
{ name: 'count()', fieldDataType: 'int64', fieldContext: '' },
],
data: [['/api/test', '500', 'Internal Server Error', 10]],
},
],
},
},
}),
);
}),
);
// eslint-disable-next-line react/jsx-props-no-spreading
render(<TopErrors {...mockProps} />);
// Wait for the API call to be made
await waitFor(() => {
expect(capturedRequest).toBeDefined();
});
// Extract the filter expression from the captured request
const filterExpression =
capturedRequest.compositeQuery.queries[0].spec.filter.expression;
// Verify all required filters are present
expect(filterExpression).toContain(`kind_string = 'Client'`);
expect(filterExpression).toContain(`(http.url EXISTS OR url.full EXISTS)`);
expect(filterExpression).toContain(
`(net.peer.name = 'test-domain' OR server.address = 'test-domain')`,
);
expect(filterExpression).toContain(`has_error = true`);
expect(filterExpression).toContain(`status_message EXISTS`); // toggle is on by default
});
});

View File

@@ -2,6 +2,7 @@
import { Color } from '@signozhq/design-tokens';
import { Progress, Tag, Tooltip } from 'antd';
import { ColumnType } from 'antd/es/table';
import { convertFiltersToExpressionWithExistingQuery } from 'components/QueryBuilderV2/utils';
import {
FiltersType,
IQuickFiltersConfig,
@@ -27,6 +28,11 @@ import {
OrderByPayload,
TagFilterItem,
} from 'types/api/queryBuilder/queryBuilderData';
import {
ColumnDescriptor,
QueryRangePayloadV5,
ScalarData,
} from 'types/api/v5/queryRange';
import { QueryData } from 'types/api/widgets/getQuery';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
@@ -40,6 +46,9 @@ import {
EndPointsResponseRow,
} from './types';
export const isEmptyFilterValue = (value: unknown): boolean =>
value === '' || value === null || value === undefined || value === 'n/a';
export const ApiMonitoringQuickFiltersConfig: IQuickFiltersConfig[] = [
{
type: FiltersType.CHECKBOX,
@@ -816,153 +825,100 @@ export const getEndPointsQueryPayload = (
];
};
// eslint-disable-next-line sonarjs/cognitive-complexity
function buildFilterExpression(
domainName: string,
filters: IBuilderQuery['filters'],
showStatusCodeErrors: boolean,
): string {
const baseFilterParts = [
`kind_string = 'Client'`,
`(http.url EXISTS OR url.full EXISTS)`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`has_error = true`,
];
if (showStatusCodeErrors) {
baseFilterParts.push('status_message EXISTS');
}
const filterExpression = baseFilterParts.join(' AND ');
if (!filters) {
return filterExpression;
}
const { filter } = convertFiltersToExpressionWithExistingQuery(
filters,
filterExpression,
);
return filter.expression;
}
export const getTopErrorsQueryPayload = (
domainName: string,
start: number,
end: number,
filters: IBuilderQuery['filters'],
showStatusCodeErrors = true,
): GetQueryResultsProps[] => [
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TABLE,
query: {
builder: {
queryData: [
{
dataSource: DataSource.TRACES,
queryName: 'A',
aggregateOperator: 'count',
aggregateAttribute: {
id: '------false',
dataType: DataTypes.String,
key: '',
type: '',
},
timeAggregation: 'rate',
spaceAggregation: 'sum',
functions: [],
filters: {
op: 'AND',
items: [
{
id: '04da97bd',
key: {
key: 'kind_string',
dataType: DataTypes.String,
type: '',
},
op: '=',
value: 'Client',
},
{
id: 'b1af6bdb',
key: {
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'tag',
},
op: 'exists',
value: '',
},
...(showStatusCodeErrors
? [
{
id: '75d65388',
key: {
key: 'status_message',
dataType: DataTypes.String,
type: '',
},
op: 'exists',
value: '',
},
]
: []),
{
id: '4872bf91',
key: {
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: 'tag',
},
op: '=',
value: domainName,
},
{
id: 'ab4c885d',
key: {
key: 'has_error',
dataType: DataTypes.bool,
type: '',
},
op: '=',
value: true,
},
...(filters?.items || []),
],
},
expression: 'A',
disabled: false,
stepInterval: 60,
having: [],
limit: 10,
orderBy: [
{
columnName: 'timestamp',
order: 'desc',
},
],
groupBy: [
{
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'tag',
},
{
dataType: DataTypes.String,
key: 'response_status_code',
type: '',
id: 'response_status_code--string----true',
},
{
key: 'status_message',
dataType: DataTypes.String,
type: '',
},
],
legend: '',
reduceTo: 'avg',
},
],
queryFormulas: [],
queryTraceOperator: [],
},
clickhouse_sql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
],
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
promql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
): QueryRangePayloadV5 => {
const filterExpression = buildFilterExpression(
domainName,
filters,
showStatusCodeErrors,
);
return {
schemaVersion: 'v1',
start,
end,
step: 240,
},
];
requestType: 'scalar',
compositeQuery: {
queries: [
{
type: 'builder_query',
spec: {
name: 'A',
signal: 'traces',
stepInterval: 60,
disabled: false,
aggregations: [{ expression: 'count()' }],
filter: { expression: filterExpression },
groupBy: [
{
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
{
name: 'url.full',
fieldDataType: 'string',
fieldContext: 'attribute',
},
{
name: 'response_status_code',
fieldDataType: 'string',
fieldContext: 'span',
},
{
name: 'status_message',
fieldDataType: 'string',
fieldContext: 'span',
},
],
limit: 10,
order: [
{
key: {
name: 'count()',
},
direction: 'desc',
},
],
},
},
],
},
formatOptions: { formatTableResultForUI: true, fillGaps: false },
variables: {},
};
};
export interface EndPointsTableRowData {
key: string;
@@ -1242,63 +1198,55 @@ export const formatEndPointsDataForTable = (
return formattedData;
};
export interface TopErrorsResponseRow {
metric: {
[SPAN_ATTRIBUTES.URL_PATH]: string;
[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]: string;
status_message: string;
};
values: [number, string][];
queryName: string;
legend: string;
}
export type TopErrorsResponseRow = ScalarData;
export interface TopErrorsTableRowData {
key: string;
endpointName: string;
statusCode: string;
statusMessage: string;
count: number | string;
count: string;
}
/**
* Returns '-' if value is empty, otherwise returns value as string
*/
export function getDisplayValue(value: unknown): string {
return isEmptyFilterValue(value) ? '-' : String(value);
}
export const formatTopErrorsDataForTable = (
data: TopErrorsResponseRow[] | undefined,
scalarResult: TopErrorsResponseRow | undefined,
): TopErrorsTableRowData[] => {
if (!data) return [];
if (!scalarResult?.data) return [];
return data.map((row) => ({
key: v4(),
endpointName:
row.metric[SPAN_ATTRIBUTES.URL_PATH] === 'n/a' ||
row.metric[SPAN_ATTRIBUTES.URL_PATH] === undefined
? '-'
: row.metric[SPAN_ATTRIBUTES.URL_PATH],
statusCode:
row.metric[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE] === 'n/a' ||
row.metric[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE] === undefined
? '-'
: row.metric[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE],
statusMessage:
row.metric.status_message === 'n/a' ||
row.metric.status_message === undefined
? '-'
: row.metric.status_message,
count:
row.values &&
row.values[0] &&
row.values[0][1] !== undefined &&
row.values[0][1] !== 'n/a'
? row.values[0][1]
: '-',
}));
const columns = scalarResult.columns || [];
const rows = scalarResult.data || [];
return rows.map((rowData: unknown[]) => {
const rowObj: Record<string, unknown> = {};
columns.forEach((col: ColumnDescriptor, index: number) => {
rowObj[col.name] = rowData[index];
});
return {
key: v4(),
endpointName: getDisplayValue(
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
),
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
statusMessage: getDisplayValue(rowObj.status_message),
count: getDisplayValue(rowObj.__result_0),
};
});
};
export const getTopErrorsCoRelationQueryFilters = (
domainName: string,
endPointName: string,
statusCode: string,
): IBuilderQuery['filters'] => ({
items: [
): IBuilderQuery['filters'] => {
const items: TagFilterItem[] = [
{
id: 'ea16470b',
key: {
@@ -1330,7 +1278,10 @@ export const getTopErrorsCoRelationQueryFilters = (
op: '=',
value: domainName,
},
{
];
if (statusCode !== '-') {
items.push({
id: 'f6891e27',
key: {
key: 'response_status_code',
@@ -1340,10 +1291,14 @@ export const getTopErrorsCoRelationQueryFilters = (
},
op: '=',
value: statusCode,
},
],
op: 'AND',
});
});
}
return {
items,
op: 'AND',
};
};
export const getTopErrorsColumnsConfig = (): ColumnType<TopErrorsTableRowData>[] => [
{

View File

@@ -11,12 +11,14 @@ import { v4 } from 'uuid';
import { useCreateAlertState } from '../context';
import {
INITIAL_EVALUATION_WINDOW_STATE,
INITIAL_INFO_THRESHOLD,
INITIAL_RANDOM_THRESHOLD,
INITIAL_WARNING_THRESHOLD,
THRESHOLD_MATCH_TYPE_OPTIONS,
THRESHOLD_OPERATOR_OPTIONS,
} from '../context/constants';
import { AlertThresholdMatchType } from '../context/types';
import EvaluationSettings from '../EvaluationSettings/EvaluationSettings';
import ThresholdItem from './ThresholdItem';
import { AnomalyAndThresholdProps, UpdateThreshold } from './types';
@@ -38,12 +40,12 @@ function AlertThreshold({
alertState,
thresholdState,
setThresholdState,
setEvaluationWindow,
notificationSettings,
setNotificationSettings,
} = useCreateAlertState();
const { currentQuery } = useQueryBuilder();
const queryNames = getQueryNames(currentQuery);
useEffect(() => {
@@ -160,6 +162,54 @@ function AlertThreshold({
}),
);
const handleSetEvaluationDetailsForMeter = (): void => {
setEvaluationWindow({
type: 'SET_INITIAL_STATE_FOR_METER',
});
setThresholdState({
type: 'SET_MATCH_TYPE',
payload: AlertThresholdMatchType.IN_TOTAL,
});
};
const handleSelectedQueryChange = (value: string): void => {
// loop through currenttQuery and find the query that matches the selected query
const query = currentQuery?.builder?.queryData.find(
(query) => query.queryName === value,
);
const currentSelectedQuery = currentQuery?.builder?.queryData.find(
(query) => query.queryName === thresholdState.selectedQuery,
);
const newSelectedQuerySource = query?.source || '';
const currentSelectedQuerySource = currentSelectedQuery?.source || '';
if (newSelectedQuerySource === currentSelectedQuerySource) {
setThresholdState({
type: 'SET_SELECTED_QUERY',
payload: value,
});
return;
}
if (newSelectedQuerySource === 'meter') {
handleSetEvaluationDetailsForMeter();
} else {
setEvaluationWindow({
type: 'SET_INITIAL_STATE',
payload: INITIAL_EVALUATION_WINDOW_STATE,
});
}
setThresholdState({
type: 'SET_SELECTED_QUERY',
payload: value,
});
};
return (
<div
className={classNames(
@@ -175,12 +225,7 @@ function AlertThreshold({
</Typography.Text>
<Select
value={thresholdState.selectedQuery}
onChange={(value): void => {
setThresholdState({
type: 'SET_SELECTED_QUERY',
payload: value,
});
}}
onChange={handleSelectedQueryChange}
style={{ width: 80 }}
options={queryNames}
data-testid="alert-threshold-query-select"

View File

@@ -10,6 +10,7 @@ import { getEvaluationWindowTypeText, getTimeframeText } from './utils';
function EvaluationSettings(): JSX.Element {
const { evaluationWindow, setEvaluationWindow } = useCreateAlertState();
const [
isEvaluationWindowPopoverOpen,
setIsEvaluationWindowPopoverOpen,

View File

@@ -24,7 +24,11 @@ import {
INITIAL_EVALUATION_WINDOW_STATE,
INITIAL_NOTIFICATION_SETTINGS_STATE,
} from './constants';
import { ICreateAlertContextProps, ICreateAlertProviderProps } from './types';
import {
AlertThresholdMatchType,
ICreateAlertContextProps,
ICreateAlertProviderProps,
} from './types';
import {
advancedOptionsReducer,
alertCreationReducer,
@@ -67,6 +71,7 @@ export function CreateAlertProvider(
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
const location = useLocation();
const queryParams = new URLSearchParams(location.search);
const thresholdsFromURL = queryParams.get(QueryParams.thresholds);
const [alertType, setAlertType] = useState<AlertTypes>(() => {
if (isEditMode) {
@@ -122,7 +127,28 @@ export function CreateAlertProvider(
setThresholdState({
type: 'RESET',
});
}, [alertType]);
if (thresholdsFromURL) {
try {
const thresholds = JSON.parse(thresholdsFromURL);
setThresholdState({
type: 'SET_THRESHOLDS',
payload: thresholds,
});
} catch (error) {
console.error('Error parsing thresholds from URL:', error);
}
setEvaluationWindow({
type: 'SET_INITIAL_STATE_FOR_METER',
});
setThresholdState({
type: 'SET_MATCH_TYPE',
payload: AlertThresholdMatchType.IN_TOTAL,
});
}
}, [alertType, thresholdsFromURL]);
useEffect(() => {
if (isEditMode && initialAlertState) {

View File

@@ -237,6 +237,7 @@ export type EvaluationWindowAction =
}
| { type: 'SET_EVALUATION_CADENCE_MODE'; payload: EvaluationCadenceMode }
| { type: 'SET_INITIAL_STATE'; payload: EvaluationWindowState }
| { type: 'SET_INITIAL_STATE_FOR_METER' }
| { type: 'RESET' };
export type EvaluationCadenceMode = 'default' | 'custom' | 'rrule';

View File

@@ -1,3 +1,5 @@
import { UTC_TIMEZONE } from 'components/CustomTimePicker/timezoneUtils';
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { QueryParams } from 'constants/query';
import {
alertDefaults,
@@ -11,6 +13,7 @@ import { AlertDef } from 'types/api/alerts/def';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import { CumulativeWindowTimeframes } from '../EvaluationSettings/types';
import {
INITIAL_ADVANCED_OPTIONS_STATE,
INITIAL_ALERT_STATE,
@@ -210,6 +213,18 @@ export const evaluationWindowReducer = (
return INITIAL_EVALUATION_WINDOW_STATE;
case 'SET_INITIAL_STATE':
return action.payload;
case 'SET_INITIAL_STATE_FOR_METER':
return {
...state,
windowType: 'cumulative',
timeframe: CumulativeWindowTimeframes.CURRENT_DAY,
startingAt: {
time: '00:00:00',
number: '0',
timezone: UTC_TIMEZONE.value,
unit: UniversalYAxisUnit.MINUTES,
},
};
default:
return state;
}

View File

@@ -36,6 +36,7 @@ function QuerySection({
// init namespace for translations
const { t } = useTranslation('alerts');
const [currentTab, setCurrentTab] = useState(queryCategory);
const [signalSource, setSignalSource] = useState<string>('metrics');
const handleQueryCategoryChange = (queryType: string): void => {
setQueryCategory(queryType as EQueryType);
@@ -48,12 +49,17 @@ function QuerySection({
const isDarkMode = useIsDarkMode();
const handleSignalSourceChange = (value: string): void => {
setSignalSource(value);
};
const renderMetricUI = (): JSX.Element => (
<QueryBuilderV2
panelType={panelType}
config={{
queryVariant: 'static',
initialDataSource: ALERTS_DATA_SOURCE_MAP[alertType],
signalSource: signalSource === 'meter' ? 'meter' : '',
}}
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
showFunctions={
@@ -62,6 +68,8 @@ function QuerySection({
alertType === AlertTypes.LOGS_BASED_ALERT
}
version={alertDef.version || 'v3'}
onSignalSourceChange={handleSignalSourceChange}
signalSourceChangeEnabled
/>
);

View File

@@ -137,8 +137,7 @@ function GeneralSettings({
if (logsCurrentTTLValues) {
setLogsTotalRetentionPeriod(logsCurrentTTLValues.default_ttl_days * 24);
setLogsS3RetentionPeriod(
logsCurrentTTLValues.cold_storage_ttl_days &&
logsCurrentTTLValues.cold_storage_ttl_days > 0
logsCurrentTTLValues.cold_storage_ttl_days
? logsCurrentTTLValues.cold_storage_ttl_days * 24
: null,
);

View File

@@ -94,6 +94,9 @@ const mockDisksWithoutS3: IDiskType[] = [
];
describe('GeneralSettings - S3 Logs Retention', () => {
const BUTTON_SELECTOR = 'button[type="button"]';
const PRIMARY_BUTTON_CLASS = 'ant-btn-primary';
beforeEach(() => {
jest.clearAllMocks();
(setRetentionApiV2 as jest.Mock).mockResolvedValue({
@@ -155,10 +158,10 @@ describe('GeneralSettings - S3 Logs Retention', () => {
await user.type(s3Input, '5');
// Find the save button in the Logs card
const buttons = logsCard?.querySelectorAll('button[type="button"]');
const buttons = logsCard?.querySelectorAll(BUTTON_SELECTOR);
// The primary button should be the save button
const saveButton = Array.from(buttons || []).find((btn) =>
btn.className.includes('ant-btn-primary'),
btn.className.includes(PRIMARY_BUTTON_CLASS),
) as HTMLButtonElement;
expect(saveButton).toBeInTheDocument();
@@ -262,9 +265,9 @@ describe('GeneralSettings - S3 Logs Retention', () => {
await user.type(totalInput, '60');
// Find the save button
const buttons = logsCard?.querySelectorAll('button[type="button"]');
const buttons = logsCard?.querySelectorAll(BUTTON_SELECTOR);
const saveButton = Array.from(buttons || []).find((btn) =>
btn.className.includes('ant-btn-primary'),
btn.className.includes(PRIMARY_BUTTON_CLASS),
) as HTMLButtonElement;
expect(saveButton).toBeInTheDocument();
@@ -329,4 +332,59 @@ describe('GeneralSettings - S3 Logs Retention', () => {
expect(dropdowns?.[1]).toHaveTextContent('Days');
});
});
describe('Test 4: Save Button State with S3 Disabled', () => {
it('should disable save button when cold_storage_ttl_days is -1 and no changes made', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<GeneralSettings
metricsTtlValuesPayload={mockMetricsRetention}
tracesTtlValuesPayload={mockTracesRetention}
logsTtlValuesPayload={mockLogsRetentionWithoutS3}
getAvailableDiskPayload={mockDisksWithS3}
metricsTtlValuesRefetch={jest.fn()}
tracesTtlValuesRefetch={jest.fn()}
logsTtlValuesRefetch={jest.fn()}
/>,
);
// Find the Logs card
const logsCard = screen.getByText('Logs').closest('.ant-card');
expect(logsCard).toBeInTheDocument();
// Find the save button
const buttons = logsCard?.querySelectorAll(BUTTON_SELECTOR);
const saveButton = Array.from(buttons || []).find((btn) =>
btn.className.includes(PRIMARY_BUTTON_CLASS),
) as HTMLButtonElement;
expect(saveButton).toBeInTheDocument();
// Verify save button is disabled on initial load (no changes, S3 disabled with -1)
expect(saveButton).toBeDisabled();
// Find the total retention input
const inputs = logsCard?.querySelectorAll('input[type="text"]');
const totalInput = inputs?.[0] as HTMLInputElement;
// Change total retention value to trigger button enable
await user.clear(totalInput);
await user.type(totalInput, '60');
// Button should now be enabled after change
await waitFor(() => {
expect(saveButton).not.toBeDisabled();
});
// Revert to original value (30 days displays as 1 Month)
await user.clear(totalInput);
await user.type(totalInput, '1');
// Button should be disabled again (back to original state)
await waitFor(() => {
expect(saveButton).toBeDisabled();
});
});
});
});

View File

@@ -46,8 +46,7 @@ export const convertHoursValueToRelevantUnit = (
availableUnits?: ITimeUnit[],
): ITimeUnitConversion => {
const unitsToConsider = availableUnits?.length ? availableUnits : TimeUnits;
if (value) {
if (value >= 0) {
for (let idx = unitsToConsider.length - 1; idx >= 0; idx -= 1) {
const timeUnit = unitsToConsider[idx];
const convertedValue = timeUnit.multiplier * value;
@@ -62,7 +61,7 @@ export const convertHoursValueToRelevantUnit = (
}
// Fallback to the first available unit
return { value, timeUnitValue: unitsToConsider[0].value };
return { value: -1, timeUnitValue: unitsToConsider[0].value };
};
export const convertHoursValueToRelevantUnitString = (

View File

@@ -324,6 +324,7 @@ function FullView({
panelType={selectedPanelType}
version={selectedDashboard?.data?.version || 'v3'}
isListViewPanel={selectedPanelType === PANEL_TYPES.LIST}
signalSourceChangeEnabled
// filterConfigs={filterConfigs}
// queryComponents={queryComponents}
/>

View File

@@ -17,12 +17,6 @@ export const Card = styled(CardComponent)<CardProps>`
overflow: hidden;
border-radius: 3px;
border: 1px solid var(--bg-slate-500);
background: linear-gradient(
0deg,
rgba(171, 189, 255, 0) 0%,
rgba(171, 189, 255, 0) 100%
),
#0b0c0e;
${({ isDarkMode }): StyledCSS =>
!isDarkMode &&

View File

@@ -88,17 +88,13 @@ function GridTableComponent({
const newValue = { ...val };
Object.keys(val).forEach((k) => {
const unit = getColumnUnit(k, columnUnits);
// Apply formatting if:
// 1. Column has a unit defined, OR
// 2. decimalPrecision is specified (format all values)
const shouldFormat = unit || decimalPrecision !== undefined;
if (shouldFormat) {
if (unit) {
// the check below takes care of not adding units for rows that have n/a or null values
if (val[k] !== 'n/a' && val[k] !== null) {
newValue[k] = getYAxisFormattedValue(
String(val[k]),
unit || 'none',
unit,
decimalPrecision,
);
} else if (val[k] === null) {

View File

@@ -423,6 +423,7 @@
display: flex;
flex-direction: row;
gap: 14px;
align-items: flex-start;
.section-icon {
display: flex;
@@ -461,7 +462,6 @@
flex-direction: column;
gap: 14px;
width: 150px;
justify-content: flex-end;
.ant-btn {

View File

@@ -418,6 +418,11 @@
font-size: 12px;
font-weight: 600;
}
.set-alert-btn {
cursor: pointer;
margin-left: 24px;
}
}
}

View File

@@ -19,6 +19,7 @@ import {
TablePaginationConfig,
TableProps as AntDTableProps,
Tag,
Tooltip,
Typography,
} from 'antd';
import { NotificationInstance } from 'antd/es/notification/interface';
@@ -34,15 +35,20 @@ import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
import Tags from 'components/Tags/Tags';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { QueryParams } from 'constants/query';
import { initialQueryMeterWithType } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { INITIAL_ALERT_THRESHOLD_STATE } from 'container/CreateAlertV2/context/constants';
import dayjs from 'dayjs';
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
import useDebouncedFn from 'hooks/useDebouncedFunction';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import { useNotifications } from 'hooks/useNotifications';
import { isNil, isUndefined } from 'lodash-es';
import { cloneDeep, isNil, isUndefined } from 'lodash-es';
import {
ArrowUpRight,
BellPlus,
CalendarClock,
Check,
Copy,
@@ -60,6 +66,7 @@ import { useTimezone } from 'providers/Timezone';
import { ChangeEvent, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useMutation } from 'react-query';
import { useHistory } from 'react-router-dom';
import { useCopyToClipboard } from 'react-use';
import { ErrorResponse } from 'types/api';
import {
@@ -71,6 +78,7 @@ import {
IngestionKeyProps,
PaginationProps,
} from 'types/api/ingestionKeys/types';
import { MeterAggregateOperator } from 'types/common/queryBuilder';
import { USER_ROLES } from 'types/roles';
import { getDaysUntilExpiry } from 'utils/timeUtils';
@@ -170,6 +178,8 @@ function MultiIngestionSettings(): JSX.Element {
const { isEnterpriseSelfHostedUser } = useGetTenantLicense();
const history = useHistory();
const [
hasCreateLimitForIngestionKeyError,
setHasCreateLimitForIngestionKeyError,
@@ -694,6 +704,68 @@ function MultiIngestionSettings(): JSX.Element {
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const handleCreateAlert = (
APIKey: IngestionKeyProps,
signal: LimitProps,
): void => {
let metricName = '';
switch (signal.signal) {
case 'metrics':
metricName = 'signoz.meter.metric.datapoint.count';
break;
case 'traces':
metricName = 'signoz.meter.span.size';
break;
case 'logs':
metricName = 'signoz.meter.log.size';
break;
default:
return;
}
const threshold =
signal.signal === 'metrics'
? signal.config?.day?.count || 0
: signal.config?.day?.size || 0;
const query = {
...initialQueryMeterWithType,
builder: {
...initialQueryMeterWithType.builder,
queryData: [
{
...initialQueryMeterWithType.builder.queryData[0],
aggregations: [
{
...initialQueryMeterWithType.builder.queryData[0].aggregations?.[0],
metricName,
timeAggregation: MeterAggregateOperator.INCREASE,
spaceAggregation: MeterAggregateOperator.SUM,
},
],
filter: {
expression: `signoz.workspace.key.id='${APIKey.id}'`,
},
},
],
},
};
const stringifiedQuery = JSON.stringify(query);
const thresholds = cloneDeep(INITIAL_ALERT_THRESHOLD_STATE.thresholds);
thresholds[0].thresholdValue = threshold;
const URL = `${ROUTES.ALERTS_NEW}?showNewCreateAlertsPage=true&${
QueryParams.compositeQuery
}=${encodeURIComponent(stringifiedQuery)}&${
QueryParams.thresholds
}=${encodeURIComponent(JSON.stringify(thresholds))}`;
history.push(URL);
};
const columns: AntDTableProps<IngestionKeyProps>['columns'] = [
{
title: 'Ingestion Key',
@@ -1183,6 +1255,27 @@ function MultiIngestionSettings(): JSX.Element {
</>
))}
</div>
{((signalCfg.usesSize &&
limit?.config?.day?.size !== undefined) ||
(signalCfg.usesCount &&
limit?.config?.day?.count !== undefined)) && (
<Tooltip
title="Set alert on this limit"
placement="top"
arrow={false}
>
<Button
icon={<BellPlus size={14} color={Color.BG_CHERRY_400} />}
className="set-alert-btn periscope-btn ghost"
type="text"
data-testid={`set-alert-btn-${signalName}`}
onClick={(): void =>
handleCreateAlert(APIKey, limitsDict[signalName])
}
/>
</Tooltip>
)}
</div>
{/* SECOND limit usage/limit */}

View File

@@ -1,10 +1,60 @@
import { render, screen } from 'tests/test-utils';
import { QueryParams } from 'constants/query';
import { rest, server } from 'mocks-server/server';
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { LimitProps } from 'types/api/ingestionKeys/limits/types';
import {
AllIngestionKeyProps,
IngestionKeyProps,
} from 'types/api/ingestionKeys/types';
import MultiIngestionSettings from '../MultiIngestionSettings';
// Extend the existing types to include limits with proper structure
interface TestIngestionKeyProps extends Omit<IngestionKeyProps, 'limits'> {
limits?: LimitProps[];
}
interface TestAllIngestionKeyProps extends Omit<AllIngestionKeyProps, 'data'> {
data: TestIngestionKeyProps[];
}
// Mock useHistory.push to capture navigation URL used by MultiIngestionSettings
const mockPush = jest.fn() as jest.MockedFunction<(path: string) => void>;
jest.mock('react-router-dom', () => {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const actual = jest.requireActual('react-router-dom');
return {
...actual,
useHistory: (): { push: typeof mockPush } => ({ push: mockPush }),
};
});
// Mock deployments data hook to avoid unrelated network calls in this page
jest.mock(
'hooks/CustomDomain/useGetDeploymentsData',
(): Record<string, unknown> => ({
useGetDeploymentsData: (): {
data: undefined;
isLoading: boolean;
isFetching: boolean;
isError: boolean;
} => ({
data: undefined,
isLoading: false,
isFetching: false,
isError: false,
}),
}),
);
const TEST_CREATED_UPDATED = '2024-01-01T00:00:00Z';
const TEST_EXPIRES_AT = '2030-01-01T00:00:00Z';
const TEST_WORKSPACE_ID = 'w1';
const INGESTION_SETTINGS_ROUTE = '/ingestion-settings';
describe('MultiIngestionSettings Page', () => {
beforeEach(() => {
render(<MultiIngestionSettings />);
mockPush.mockClear();
});
afterEach(() => {
@@ -12,6 +62,10 @@ describe('MultiIngestionSettings Page', () => {
});
it('renders MultiIngestionSettings page without crashing', () => {
render(<MultiIngestionSettings />, undefined, {
initialRoute: INGESTION_SETTINGS_ROUTE,
});
expect(screen.getByText('Ingestion Keys')).toBeInTheDocument();
expect(
@@ -27,4 +81,181 @@ describe('MultiIngestionSettings Page', () => {
expect(aboutKeyslink).toHaveClass('learn-more');
expect(aboutKeyslink).toHaveAttribute('rel', 'noreferrer');
});
it('navigates to create alert with metrics count threshold', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
// Arrange API response with a metrics daily count limit so the alert button is visible
const response: TestAllIngestionKeyProps = {
status: 'success',
data: [
{
name: 'Key One',
expires_at: TEST_EXPIRES_AT,
value: 'secret',
workspace_id: TEST_WORKSPACE_ID,
id: 'k1',
created_at: TEST_CREATED_UPDATED,
updated_at: TEST_CREATED_UPDATED,
tags: [],
limits: [
{
id: 'l1',
signal: 'metrics',
config: { day: { count: 1000 } },
},
],
},
],
_pagination: { page: 1, per_page: 10, pages: 1, total: 1 },
};
server.use(
rest.get('*/workspaces/me/keys*', (_req, res, ctx) =>
res(ctx.status(200), ctx.json(response)),
),
);
// Render with initial route to test navigation
render(<MultiIngestionSettings />, undefined, {
initialRoute: INGESTION_SETTINGS_ROUTE,
});
// Wait for ingestion key to load and expand the row to show limits
await screen.findByText('Key One');
const expandButton = screen.getByRole('button', { name: /right Key One/i });
await user.click(expandButton);
// Wait for limits section to render and click metrics alert button by test id
await screen.findByText('LIMITS');
const metricsAlertBtn = (await screen.findByTestId(
'set-alert-btn-metrics',
)) as HTMLButtonElement;
await user.click(metricsAlertBtn);
// Wait for navigation to occur
await waitFor(() => {
expect(mockPush).toHaveBeenCalledTimes(1);
});
// Assert: navigation occurred with correct query parameters
const navigationCall = mockPush.mock.calls[0][0] as string;
// Check URL contains alerts/new route
expect(navigationCall).toContain('/alerts/new');
expect(navigationCall).toContain('showNewCreateAlertsPage=true');
// Parse query parameters
const urlParams = new URLSearchParams(navigationCall.split('?')[1]);
const thresholds = JSON.parse(urlParams.get(QueryParams.thresholds) || '{}');
expect(thresholds).toBeDefined();
expect(thresholds[0].thresholdValue).toBe(1000);
// Verify compositeQuery parameter exists and contains correct data
const compositeQuery = JSON.parse(
urlParams.get(QueryParams.compositeQuery) || '{}',
);
expect(compositeQuery.builder).toBeDefined();
expect(compositeQuery.builder.queryData).toBeDefined();
// Check that the query contains the correct filter expression for the key
const firstQueryData = compositeQuery.builder.queryData[0];
expect(firstQueryData.filter.expression).toContain(
"signoz.workspace.key.id='k1'",
);
// Verify metric name for metrics signal
expect(firstQueryData.aggregations[0].metricName).toBe(
'signoz.meter.metric.datapoint.count',
);
});
it('navigates to create alert for logs with size threshold', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
// Arrange API response with a logs daily size limit so the alert button is visible
const response: TestAllIngestionKeyProps = {
status: 'success',
data: [
{
name: 'Key Two',
expires_at: TEST_EXPIRES_AT,
value: 'secret',
workspace_id: TEST_WORKSPACE_ID,
id: 'k2',
created_at: TEST_CREATED_UPDATED,
updated_at: TEST_CREATED_UPDATED,
tags: [],
limits: [
{
id: 'l2',
signal: 'logs',
config: { day: { size: 2048 } },
},
],
},
],
_pagination: { page: 1, per_page: 10, pages: 1, total: 1 },
};
server.use(
rest.get('*/workspaces/me/keys*', (_req, res, ctx) =>
res(ctx.status(200), ctx.json(response)),
),
);
render(<MultiIngestionSettings />, undefined, {
initialRoute: INGESTION_SETTINGS_ROUTE,
});
// Wait for ingestion key to load and expand the row to show limits
await screen.findByText('Key Two');
const expandButton = screen.getByRole('button', { name: /right Key Two/i });
await user.click(expandButton);
// Wait for limits section to render and click logs alert button by test id
await screen.findByText('LIMITS');
const logsAlertBtn = (await screen.findByTestId(
'set-alert-btn-logs',
)) as HTMLButtonElement;
await user.click(logsAlertBtn);
// Wait for navigation to occur
await waitFor(() => {
expect(mockPush).toHaveBeenCalledTimes(1);
});
// Assert: navigation occurred with correct query parameters
const navigationCall = mockPush.mock.calls[0][0] as string;
// Check URL contains alerts/new route
expect(navigationCall).toContain('/alerts/new');
expect(navigationCall).toContain('showNewCreateAlertsPage=true');
// Parse query parameters
const urlParams = new URLSearchParams(navigationCall.split('?')[1]);
// Verify thresholds parameter
const thresholds = JSON.parse(urlParams.get(QueryParams.thresholds) || '{}');
expect(thresholds).toBeDefined();
expect(thresholds[0].thresholdValue).toBe(2048);
// Verify compositeQuery parameter exists and contains correct data
const compositeQuery = JSON.parse(
urlParams.get(QueryParams.compositeQuery) || '{}',
);
expect(compositeQuery.builder).toBeDefined();
expect(compositeQuery.builder.queryData).toBeDefined();
// Check that the query contains the correct filter expression for the key
const firstQueryData = compositeQuery.builder.queryData[0];
expect(firstQueryData.filter.expression).toContain(
"signoz.workspace.key.id='k2'",
);
// Verify metric name for logs signal
expect(firstQueryData.aggregations[0].metricName).toBe(
'signoz.meter.log.size',
);
});
});

View File

@@ -1,9 +1,11 @@
import './InfraMetrics.styles.scss';
import { Empty, Radio } from 'antd';
import { Empty } from 'antd';
import { RadioChangeEvent } from 'antd/lib';
import SignozRadioGroup from 'components/SignozRadioGroup/SignozRadioGroup';
import { History, Table } from 'lucide-react';
import { useState } from 'react';
import { useMemo, useState } from 'react';
import { DataSource } from 'types/common/queryBuilder';
import { VIEW_TYPES } from './constants';
import NodeMetrics from './NodeMetrics';
@@ -14,7 +16,8 @@ interface MetricsDataProps {
nodeName: string;
hostName: string;
clusterName: string;
logLineTimestamp: string;
timestamp: string;
dataSource: DataSource.LOGS | DataSource.TRACES;
}
function InfraMetrics({
@@ -22,22 +25,56 @@ function InfraMetrics({
nodeName,
hostName,
clusterName,
logLineTimestamp,
timestamp,
dataSource = DataSource.LOGS,
}: MetricsDataProps): JSX.Element {
const [selectedView, setSelectedView] = useState<string>(() =>
podName ? VIEW_TYPES.POD : VIEW_TYPES.NODE,
);
const viewOptions = useMemo(() => {
const options = [
{
label: (
<div className="view-title">
<Table size={14} />
Node
</div>
),
value: VIEW_TYPES.NODE,
},
];
if (podName) {
options.push({
label: (
<div className="view-title">
<History size={14} />
Pod
</div>
),
value: VIEW_TYPES.POD,
});
}
return options;
}, [podName]);
const handleModeChange = (e: RadioChangeEvent): void => {
setSelectedView(e.target.value);
};
if (!podName && !nodeName && !hostName) {
const emptyStateDescription =
dataSource === DataSource.TRACES
? 'No data available. Please select a span containing a pod, node, or host attributes to view metrics.'
: 'No data available. Please select a valid log line containing a pod, node, or host attributes to view metrics.';
return (
<div className="empty-container">
<Empty
image={Empty.PRESENTED_IMAGE_SIMPLE}
description="No data available. Please select a valid log line containing a pod, node, or host attributes to view metrics."
description={emptyStateDescription}
/>
</div>
);
@@ -45,46 +82,26 @@ function InfraMetrics({
return (
<div className="infra-metrics-container">
<Radio.Group
className="views-tabs"
onChange={handleModeChange}
<SignozRadioGroup
value={selectedView}
>
<Radio.Button
className={selectedView === VIEW_TYPES.NODE ? 'selected_view tab' : 'tab'}
value={VIEW_TYPES.NODE}
>
<div className="view-title">
<Table size={14} />
Node
</div>
</Radio.Button>
{podName && (
<Radio.Button
className={selectedView === VIEW_TYPES.POD ? 'selected_view tab' : 'tab'}
value={VIEW_TYPES.POD}
>
<div className="view-title">
<History size={14} />
Pod
</div>
</Radio.Button>
)}
</Radio.Group>
onChange={handleModeChange}
className="views-tabs"
options={viewOptions}
/>
{/* TODO(Rahul): Make a common config driven component for this and other infra metrics components */}
{selectedView === VIEW_TYPES.NODE && (
<NodeMetrics
nodeName={nodeName}
clusterName={clusterName}
hostName={hostName}
logLineTimestamp={logLineTimestamp}
timestamp={timestamp}
/>
)}
{selectedView === VIEW_TYPES.POD && podName && (
<PodMetrics
podName={podName}
clusterName={clusterName}
logLineTimestamp={logLineTimestamp}
timestamp={timestamp}
/>
)}
</div>

View File

@@ -29,15 +29,15 @@ function NodeMetrics({
nodeName,
clusterName,
hostName,
logLineTimestamp,
timestamp,
}: {
nodeName: string;
clusterName: string;
hostName: string;
logLineTimestamp: string;
timestamp: string;
}): JSX.Element {
const { start, end, verticalLineTimestamp } = useMemo(() => {
const logTimestamp = dayjs(logLineTimestamp);
const logTimestamp = dayjs(timestamp);
const now = dayjs();
const startTime = logTimestamp.subtract(3, 'hour');
@@ -50,7 +50,7 @@ function NodeMetrics({
end: endTime.unix(),
verticalLineTimestamp: logTimestamp.unix(),
};
}, [logLineTimestamp]);
}, [timestamp]);
const { featureFlags } = useAppContext();
const dotMetricsEnabled =

View File

@@ -23,14 +23,14 @@ import { getPodQueryPayload, podWidgetInfo } from './constants';
function PodMetrics({
podName,
clusterName,
logLineTimestamp,
timestamp,
}: {
podName: string;
clusterName: string;
logLineTimestamp: string;
timestamp: string;
}): JSX.Element {
const { start, end, verticalLineTimestamp } = useMemo(() => {
const logTimestamp = dayjs(logLineTimestamp);
const logTimestamp = dayjs(timestamp);
const now = dayjs();
const startTime = logTimestamp.subtract(3, 'hour');
@@ -43,7 +43,7 @@ function PodMetrics({
end: endTime.unix(),
verticalLineTimestamp: logTimestamp.unix(),
};
}, [logLineTimestamp]);
}, [timestamp]);
const legendScrollPositionRef = useRef<{
scrollTop: number;

View File

@@ -33,6 +33,7 @@ function Explorer(): JSX.Element {
handleRunQuery,
stagedQuery,
updateAllQueriesOperators,
handleSetQueryData,
currentQuery,
} = useQueryBuilder();
const { safeNavigate } = useSafeNavigate();
@@ -50,6 +51,15 @@ function Explorer(): JSX.Element {
[updateAllQueriesOperators],
);
useEffect(() => {
handleSetQueryData(0, {
...initialQueryMeterWithType.builder.queryData[0],
source: 'meter',
});
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const exportDefaultQuery = useMemo(
() =>
updateAllQueriesOperators(

View File

@@ -290,13 +290,6 @@ function Summary(): JSX.Element {
],
);
console.log({
isMetricsListDataEmpty,
isMetricsTreeMapDataEmpty,
treeMapData,
sec: treeMapData?.payload?.data[heatmapView],
});
return (
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<div className="metrics-explorer-summary-tab">

View File

@@ -1,3 +1,4 @@
/* eslint-disable sonarjs/no-identical-functions */
import { getNonIntegrationDashboardById } from 'mocks-server/__mockdata__/dashboards';
import { server } from 'mocks-server/server';
import { rest } from 'msw';
@@ -7,6 +8,16 @@ import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
import DashboardDescription from '..';
interface MockSafeNavigateReturn {
safeNavigate: jest.MockedFunction<(url: string) => void>;
}
const DASHBOARD_TEST_ID = 'dashboard-title';
const DASHBOARD_TITLE_TEXT = 'thor';
const DASHBOARD_PATH = '/dashboard/4';
const mockSafeNavigate = jest.fn();
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: jest.fn(),
@@ -26,20 +37,24 @@ jest.mock(
);
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): any => ({
safeNavigate: jest.fn(),
useSafeNavigate: (): MockSafeNavigateReturn => ({
safeNavigate: mockSafeNavigate,
}),
}));
describe('Dashboard landing page actions header tests', () => {
beforeEach(() => {
mockSafeNavigate.mockClear();
});
it('unlock dashboard should be disabled for integrations created dashboards', async () => {
const mockLocation = {
pathname: `${process.env.FRONTEND_API_ENDPOINT}/dashboard/4`,
pathname: `${process.env.FRONTEND_API_ENDPOINT}${DASHBOARD_PATH}`,
search: '',
};
(useLocation as jest.Mock).mockReturnValue(mockLocation);
const { getByTestId } = render(
<MemoryRouter initialEntries={['/dashboard/4']}>
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
<DashboardProvider>
<DashboardDescription
handle={{
@@ -54,7 +69,9 @@ describe('Dashboard landing page actions header tests', () => {
);
await waitFor(() =>
expect(getByTestId('dashboard-title')).toHaveTextContent('thor'),
expect(getByTestId(DASHBOARD_TEST_ID)).toHaveTextContent(
DASHBOARD_TITLE_TEXT,
),
);
const dashboardSettingsTrigger = getByTestId('options');
@@ -65,9 +82,10 @@ describe('Dashboard landing page actions header tests', () => {
await waitFor(() => expect(lockUnlockButton).toBeDisabled());
});
it('unlock dashboard should not be disabled for non integration created dashboards', async () => {
const mockLocation = {
pathname: `${process.env.FRONTEND_API_ENDPOINT}/dashboard/4`,
pathname: `${process.env.FRONTEND_API_ENDPOINT}${DASHBOARD_PATH}`,
search: '',
};
(useLocation as jest.Mock).mockReturnValue(mockLocation);
@@ -77,7 +95,7 @@ describe('Dashboard landing page actions header tests', () => {
),
);
const { getByTestId } = render(
<MemoryRouter initialEntries={['/dashboard/4']}>
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
<DashboardProvider>
<DashboardDescription
handle={{
@@ -92,7 +110,9 @@ describe('Dashboard landing page actions header tests', () => {
);
await waitFor(() =>
expect(getByTestId('dashboard-title')).toHaveTextContent('thor'),
expect(getByTestId(DASHBOARD_TEST_ID)).toHaveTextContent(
DASHBOARD_TITLE_TEXT,
),
);
const dashboardSettingsTrigger = getByTestId('options');
@@ -103,4 +123,58 @@ describe('Dashboard landing page actions header tests', () => {
await waitFor(() => expect(lockUnlockButton).not.toBeDisabled());
});
it('should navigate to dashboard list with correct params and exclude variables', async () => {
const dashboardUrlWithVariables = `${DASHBOARD_PATH}?variables=%7B%22var1%22%3A%22value1%22%7D&otherParam=test`;
const mockLocation = {
pathname: DASHBOARD_PATH,
search: '?variables=%7B%22var1%22%3A%22value1%22%7D&otherParam=test',
};
(useLocation as jest.Mock).mockReturnValue(mockLocation);
const { getByText } = render(
<MemoryRouter initialEntries={[dashboardUrlWithVariables]}>
<DashboardProvider>
<DashboardDescription
handle={{
active: false,
enter: (): Promise<void> => Promise.resolve(),
exit: (): Promise<void> => Promise.resolve(),
node: { current: null },
}}
/>
</DashboardProvider>
</MemoryRouter>,
);
await waitFor(() =>
expect(screen.getByTestId(DASHBOARD_TEST_ID)).toHaveTextContent(
DASHBOARD_TITLE_TEXT,
),
);
// Click the dashboard breadcrumb to navigate back to list
const dashboardButton = getByText('Dashboard /');
fireEvent.click(dashboardButton);
// Verify navigation was called with correct URL
expect(mockSafeNavigate).toHaveBeenCalledWith(
'/dashboard?columnKey=updatedAt&order=descend&page=1&search=',
);
// Ensure the URL contains only essential dashboard list params
const calledUrl = mockSafeNavigate.mock.calls[0][0] as string;
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
// Should have essential dashboard list params
expect(urlParams.get('columnKey')).toBe('updatedAt');
expect(urlParams.get('order')).toBe('descend');
expect(urlParams.get('page')).toBe('1');
expect(urlParams.get('search')).toBe('');
// Should NOT have variables or other dashboard-specific params
expect(urlParams.has('variables')).toBeFalsy();
expect(urlParams.has('relativeTime')).toBeFalsy();
});
});

View File

@@ -13,7 +13,6 @@ import {
} from 'antd';
import logEvent from 'api/common/logEvent';
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
import { QueryParams } from 'constants/query';
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { DeleteButton } from 'container/ListOfDashboard/TableComponents/DeleteButton';
@@ -22,7 +21,6 @@ import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import useComponentPermission from 'hooks/useComponentPermission';
import { useNotifications } from 'hooks/useNotifications';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import { isEmpty } from 'lodash-es';
import {
Check,
@@ -116,8 +114,6 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
const updateDashboardMutation = useUpdateDashboard();
const urlQuery = useUrlQuery();
const { user } = useAppContext();
const [editDashboard] = useComponentPermission(['edit_dashboard'], user.role);
const [isDashboardSettingsOpen, setIsDashbordSettingsOpen] = useState<boolean>(
@@ -291,13 +287,13 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
}
function goToListPage(): void {
urlQuery.set('columnKey', listSortOrder.columnKey as string);
urlQuery.set('order', listSortOrder.order as string);
urlQuery.set('page', listSortOrder.pagination as string);
urlQuery.set('search', listSortOrder.search as string);
urlQuery.delete(QueryParams.relativeTime);
const urlParams = new URLSearchParams();
urlParams.set('columnKey', listSortOrder.columnKey as string);
urlParams.set('order', listSortOrder.order as string);
urlParams.set('page', listSortOrder.pagination as string);
urlParams.set('search', listSortOrder.search as string);
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlQuery.toString()}`;
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
safeNavigate(generatedUrl);
}

View File

@@ -168,6 +168,7 @@ function QuerySection({
version={selectedDashboard?.data?.version || 'v3'}
isListViewPanel={selectedGraph === PANEL_TYPES.LIST}
queryComponents={queryComponents}
signalSourceChangeEnabled
/>
</div>
),

View File

@@ -347,6 +347,7 @@ function OnboardingAddDataSource(): JSX.Element {
`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.DATA_SOURCE_SEARCHED}`,
{
searchedDataSource: query,
resultCount: filteredDataSources.length,
},
);
}, 300);

View File

@@ -121,6 +121,7 @@ export const tablePanelWidgetQuery = {
stackedBarChart: false,
bucketWidth: 0,
mergeAllActiveQueries: false,
decimalPrecision: 2,
};
export const tablePanelQueryResponse = {

View File

@@ -35,6 +35,8 @@ export type QueryBuilderProps = {
showTraceOperator?: boolean;
version: string;
onChangeTraceView?: (view: TraceView) => void;
onSignalSourceChange?: (value: string) => void;
signalSourceChangeEnabled?: boolean;
};
export enum TraceView {

View File

@@ -52,6 +52,10 @@ export const AggregatorFilter = memo(function AggregatorFilter({
(query.aggregations?.[0] as MetricAggregation)?.metricName || '',
);
useEffect(() => {
setSearchText('');
}, [signalSource]);
const debouncedSearchText = useMemo(() => {
// eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-unused-vars
const [_, value] = getAutocompleteValueAndType(searchText);
@@ -67,6 +71,7 @@ export const AggregatorFilter = memo(function AggregatorFilter({
queryAggregation.timeAggregation,
query.dataSource,
index,
signalSource,
],
async () =>
getAggregateAttribute({
@@ -100,6 +105,7 @@ export const AggregatorFilter = memo(function AggregatorFilter({
setOptionsData(options);
setAttributeKeys?.(data?.payload?.attributeKeys || []);
},
keepPreviousData: false,
},
);
@@ -164,8 +170,11 @@ export const AggregatorFilter = memo(function AggregatorFilter({
queryAggregation.timeAggregation,
query.dataSource,
index,
signalSource,
])?.payload?.attributeKeys || [];
setAttributeKeys?.(attributeKeys);
return attributeKeys;
}, [
debouncedValue,
@@ -173,6 +182,7 @@ export const AggregatorFilter = memo(function AggregatorFilter({
query.dataSource,
queryClient,
index,
signalSource,
setAttributeKeys,
]);

View File

@@ -90,8 +90,9 @@ export function QueryTable({
column: any,
tableColumns: any,
): void => {
e.stopPropagation();
if (isQueryTypeBuilder && enableDrillDown) {
e.stopPropagation();
onClick({ x: e.clientX, y: e.clientY }, { record, column, tableColumns });
}
},

View File

@@ -271,7 +271,7 @@ export const defaultMoreMenuItems: SidebarItem[] = [
icon: <ChartArea size={16} />,
isNew: false,
isEnabled: true,
isBeta: true,
isBeta: false,
itemKey: 'meter-explorer',
},
{

View File

@@ -55,6 +55,353 @@
flex-direction: column;
gap: 8px;
.span-name-wrapper {
display: flex;
justify-content: space-between;
align-items: center;
.loading-spinner-container {
padding: 4px 8px;
line-height: 18px; /* 128.571% */
letter-spacing: -0.07px;
display: inline-flex;
}
.span-percentile-value-container {
.span-percentile-value {
color: var(--text-sakura-400, #f56c87);
font-variant-numeric: lining-nums tabular-nums stacked-fractions
slashed-zero;
font-feature-settings: 'dlig' on, 'salt' on;
border-radius: 0 50px 50px 0;
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
min-width: 48px;
padding-left: 8px;
padding-right: 8px;
border-left: 1px solid var(--bg-slate-400);
cursor: pointer;
display: inline-flex;
align-items: center;
word-break: normal;
gap: 6px;
}
&.span-percentile-value-container-open {
.span-percentile-value {
border: 1px solid var(--bg-slate-400);
background: var(--bg-slate-400);
}
}
}
}
.span-percentiles-container {
display: flex;
flex-direction: column;
position: relative;
fill: linear-gradient(
139deg,
rgba(18, 19, 23, 0.32) 0%,
rgba(18, 19, 23, 0.36) 98.68%
);
stroke-width: 1px;
stroke: var(--bg-slate-500, #161922);
filter: drop-shadow(2px 4px 16px rgba(0, 0, 0, 0.2));
backdrop-filter: blur(20px);
border: 1px solid var(--bg-slate-500);
border-radius: 4px;
.span-percentiles-header {
display: flex;
align-items: center;
justify-content: space-between;
gap: 8px;
padding: 8px 12px 8px 12px;
border-bottom: 1px solid var(--bg-slate-500);
.span-percentiles-header-text {
display: flex;
align-items: center;
gap: 8px;
cursor: pointer;
}
}
.span-percentile-content {
display: flex;
flex-direction: column;
gap: 8px;
padding: 8px;
.span-percentile-content-title {
.span-percentile-value {
color: var(--text-sakura-400, #f56c87);
font-variant-numeric: lining-nums tabular-nums stacked-fractions
slashed-zero;
font-feature-settings: 'dlig' on, 'salt' on;
}
.span-percentile-value-loader {
display: inline-flex;
align-items: flex-end;
justify-content: flex-end;
margin-right: 4px;
margin-left: 4px;
line-height: 18px;
}
}
.span-percentile-timerange {
width: 100%;
.span-percentile-timerange-select {
width: 100%;
margin-top: 8px;
margin-bottom: 16px;
.ant-select-selector {
border-radius: 50px;
border: 1px solid var(--bg-slate-400, #1d212d);
background: var(--bg-slate-500, #161922);
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 12px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
letter-spacing: 0.28px;
height: 32px;
}
}
}
.span-percentile-values-table {
.span-percentile-values-table-header-row {
display: flex;
align-items: center;
justify-content: space-between;
gap: 8px;
.span-percentile-values-table-header {
color: var(--text-vanilla-400);
text-align: right;
font-family: Inter;
font-size: 11px;
font-style: normal;
font-weight: 500;
line-height: 20px; /* 181.818% */
text-transform: uppercase;
}
}
.span-percentile-values-table-data-rows {
margin-top: 8px;
display: flex;
flex-direction: column;
gap: 4px;
.span-percentile-values-table-data-rows-skeleton {
display: flex;
flex-direction: column;
gap: 4px;
.ant-skeleton-title {
width: 100% !important;
margin-top: 0px !important;
}
.ant-skeleton-paragraph {
margin-top: 8px;
& > li + li {
margin-top: 10px;
width: 100% !important;
}
}
}
}
.span-percentile-values-table-data-row {
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
padding: 0px 4px;
.span-percentile-values-table-data-row-key {
flex: 0 0 auto;
color: var(--text-vanilla-100);
text-align: right;
font-variant-numeric: lining-nums tabular-nums slashed-zero;
font-feature-settings: 'dlig' on, 'salt' on;
font-family: Inter;
font-size: 12px;
font-style: normal;
font-weight: 500;
line-height: 20px; /* 166.667% */
}
.span-percentile-values-table-data-row-value {
color: var(--text-vanilla-400);
font-variant-numeric: lining-nums tabular-nums stacked-fractions
slashed-zero;
font-feature-settings: 'dlig' on, 'salt' on, 'ss02' on;
font-family: Inter;
font-size: 12px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 166.667% */
}
.dashed-line {
flex: 1;
height: 0; /* line only */
margin: 0 8px;
border-top: 1px dashed var(--bg-slate-300);
/* Use border image to control dash length & spacing */
border-top-width: 1px;
border-top-style: solid; /* temporary solid for image */
border-image: repeating-linear-gradient(
to right,
#1d212d 0,
#1d212d 10px,
transparent 10px,
transparent 20px
)
1 stretch;
}
}
.current-span-percentile-row {
border-radius: 2px;
background: rgba(78, 116, 248, 0.2);
.span-percentile-values-table-data-row-key {
color: var(--text-robin-300);
}
.dashed-line {
flex: 1;
height: 0; /* line only */
margin: 0 8px;
border-top: 1px dashed #abbdff;
/* Use border image to control dash length & spacing */
border-top-width: 1px;
border-top-style: solid; /* temporary solid for image */
border-image: repeating-linear-gradient(
to right,
#abbdff 0,
#abbdff 10px,
transparent 10px,
transparent 20px
)
1 stretch;
}
.span-percentile-values-table-data-row-value {
color: var(--text-robin-400);
}
}
}
}
.resource-attributes-select-container {
overflow: hidden;
width: calc(100% + 16px);
position: absolute;
top: 32px;
left: -8px;
z-index: 1000;
.resource-attributes-select-container-header {
.resource-attributes-select-container-input {
border-radius: 0px;
border: none !important;
box-shadow: none !important;
height: 36px;
border-bottom: 1px solid var(--bg-slate-400) !important;
}
}
border-radius: 4px;
border: 1px solid var(--bg-slate-400, #1d212d);
background: linear-gradient(
139deg,
rgba(18, 19, 23, 1) 0%,
rgba(18, 19, 23, 1) 98.68%
);
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2);
backdrop-filter: blur(20px);
.ant-select {
width: 100%;
}
.resource-attributes-items {
height: 200px;
overflow-y: auto;
&::-webkit-scrollbar {
width: 0.3rem;
height: 0.3rem;
}
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-thumb {
background: var(--bg-slate-300);
}
&::-webkit-scrollbar-thumb:hover {
background: var(--bg-slate-200);
}
}
.resource-attributes-select-item {
display: flex;
align-items: center;
gap: 8px;
padding: 8px 12px 8px 12px;
.resource-attributes-select-item-checkbox {
.ant-checkbox-disabled {
background-color: var(--bg-robin-500);
color: var(--bg-vanilla-100);
}
.resource-attributes-select-item-value {
color: var(--bg-vanilla-100);
font-family: Inter;
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
letter-spacing: -0.07px;
}
}
}
}
}
.attribute-key {
color: var(--bg-vanilla-400);
font-family: Inter;
@@ -68,7 +415,6 @@
.value-wrapper {
display: flex;
padding: 2px 8px;
align-items: center;
width: fit-content;
max-width: 100%;
@@ -77,6 +423,7 @@
background: var(--bg-slate-500);
.attribute-value {
padding: 2px 8px;
color: var(--bg-vanilla-400);
font-family: 'Inter';
font-size: 14px;
@@ -200,6 +547,44 @@
}
}
.span-percentile-tooltip {
.ant-tooltip-content {
width: 300px;
max-width: 300px;
}
.span-percentile-tooltip-text {
color: var(--text-vanilla-400);
font-variant-numeric: lining-nums tabular-nums stacked-fractions ordinal
slashed-zero;
font-feature-settings: 'dlig' on, 'salt' on;
font-family: Inter;
font-size: 12px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 166.667% */
letter-spacing: -0.06px;
.span-percentile-tooltip-text-percentile {
color: var(--text-sakura-500);
font-variant-numeric: lining-nums tabular-nums stacked-fractions slashed-zero;
font-feature-settings: 'dlig' on, 'salt' on;
font-family: Inter;
font-size: 12px;
}
.span-percentile-tooltip-text-link {
color: var(--text-vanilla-400);
text-align: right;
font-family: Inter;
font-size: 12px;
font-style: normal;
font-weight: 500;
line-height: 20px; /* 166.667% */
}
}
}
.span-details-drawer-docked {
width: 48px;
flex: 0 48px !important;
@@ -208,6 +593,7 @@
justify-content: center;
}
}
.resizable-handle {
box-sizing: border-box;
border: 2px solid transparent;
@@ -234,6 +620,173 @@
.description {
.item {
.span-name-wrapper {
.span-percentile-value-container {
&.span-percentile-value-container-open {
.span-percentile-value {
border: 1px solid var(--bg-vanilla-200);
background: var(--bg-vanilla-200);
}
}
}
.span-percentile-value {
color: var(--text-sakura-400, #f56c87);
border-left: 1px solid var(--bg-slate-300);
}
}
.span-percentiles-container {
fill: linear-gradient(
139deg,
rgba(18, 19, 23, 0.32) 0%,
rgba(18, 19, 23, 0.36) 98.68%
);
stroke-width: 1px;
stroke: var(--bg-slate-500);
filter: drop-shadow(2px 4px 16px rgba(0, 0, 0, 0.2));
backdrop-filter: blur(20px);
border: 1px solid var(--bg-vanilla-300);
border-radius: 4px;
.span-percentiles-header {
border-bottom: 1px solid var(--bg-vanilla-300);
}
.span-percentile-content {
.span-percentile-content-title {
.span-percentile-value {
color: var(--text-sakura-400, #f56c87);
}
}
.span-percentile-timerange {
.span-percentile-timerange-select {
.ant-select-selector {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-300);
color: var(--text-slate-300);
}
}
}
.span-percentile-values-table {
.span-percentile-values-table-header-row {
.span-percentile-values-table-header {
color: var(--text-vanilla-400);
}
}
.span-percentile-values-table-data-row {
.span-percentile-values-table-data-row-key {
color: var(--text-ink-100);
}
.span-percentile-values-table-data-row-value {
color: var(--text-ink-400);
}
.dashed-line {
flex: 1;
height: 0; /* line only */
margin: 0 8px;
border-top: 1px dashed var(--bg-slate-300);
/* Use border image to control dash length & spacing */
border-top-width: 1px;
border-top-style: solid; /* temporary solid for image */
border-image: repeating-linear-gradient(
to right,
var(--bg-slate-300) 0,
var(--bg-slate-300) 10px,
transparent 10px,
transparent 20px
)
1 stretch;
}
}
.current-span-percentile-row {
border-radius: 2px;
background: rgba(78, 116, 248, 0.2);
.span-percentile-values-table-data-row-key {
color: var(--text-robin-300, #95acfb);
}
.dashed-line {
border-top: 1px dashed #abbdff;
/* Use border image to control dash length & spacing */
border-top-width: 1px;
border-top-style: solid; /* temporary solid for image */
border-image: repeating-linear-gradient(
to right,
#abbdff 0,
#abbdff 10px,
transparent 10px,
transparent 20px
)
1 stretch;
}
.span-percentile-values-table-data-row-value {
color: var(--text-robin-400);
}
}
}
}
.resource-attributes-select-container {
.resource-attributes-select-container-header {
.resource-attributes-select-container-input {
border: none !important;
box-shadow: none !important;
height: 36px;
border-bottom: 1px solid var(--bg-vanilla-400) !important;
background: var(--bg-vanilla-300);
color: var(--text-ink-400);
}
}
border-radius: 4px;
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-300);
box-shadow: 0 3px 6px -4px rgba(0, 0, 0, 0.12),
0 6px 16px 0 rgba(0, 0, 0, 0.08), 0 9px 28px 8px rgba(0, 0, 0, 0.05);
backdrop-filter: blur(20px);
.resource-attributes-items {
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-thumb {
background: var(--bg-slate-300);
}
&::-webkit-scrollbar-thumb:hover {
background: var(--bg-slate-200);
}
}
.resource-attributes-select-item {
.resource-attributes-select-item-checkbox {
.ant-checkbox-disabled {
background-color: var(--bg-robin-500);
color: var(--text-ink-100);
}
.resource-attributes-select-item-value {
color: var(--text-ink-100);
}
}
}
}
}
.attribute-key {
color: var(--bg-ink-400);
}

View File

@@ -1,14 +1,54 @@
import './SpanDetailsDrawer.styles.scss';
import { Button, Tabs, TabsProps, Tooltip, Typography } from 'antd';
import {
Button,
Checkbox,
Input,
Select,
Skeleton,
Tabs,
TabsProps,
Tooltip,
Typography,
} from 'antd';
import { RadioChangeEvent } from 'antd/lib';
import getSpanPercentiles from 'api/trace/getSpanPercentiles';
import getUserPreference from 'api/v1/user/preferences/name/get';
import updateUserPreference from 'api/v1/user/preferences/name/update';
import LogsIcon from 'assets/AlertHistory/LogsIcon';
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
import SignozRadioGroup from 'components/SignozRadioGroup/SignozRadioGroup';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { themeColors } from 'constants/theme';
import { USER_PREFERENCES } from 'constants/userPreferences';
import dayjs from 'dayjs';
import useClickOutside from 'hooks/useClickOutside';
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
import { Anvil, Bookmark, Link2, PanelRight, Search } from 'lucide-react';
import { Dispatch, SetStateAction, useCallback, useState } from 'react';
import {
Anvil,
BarChart2,
Bookmark,
Check,
ChevronDown,
ChevronUp,
Link2,
Loader2,
PanelRight,
PlusIcon,
Search,
} from 'lucide-react';
import { AnimatePresence, motion } from 'motion/react';
import {
Dispatch,
SetStateAction,
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from 'react';
import { useMutation, useQuery } from 'react-query';
import { Span } from 'types/api/trace/getTraceV2';
import { formatEpochTimestamp } from 'utils/timeUtils';
@@ -17,6 +57,7 @@ import { RelatedSignalsViews } from './constants';
import Events from './Events/Events';
import LinkedSpans from './LinkedSpans/LinkedSpans';
import SpanRelatedSignals from './SpanRelatedSignals/SpanRelatedSignals';
import { hasInfraMetadata } from './utils';
interface ISpanDetailsDrawerProps {
isSpanDetailsDocked: boolean;
@@ -26,6 +67,45 @@ interface ISpanDetailsDrawerProps {
traceEndTime: number;
}
const timerangeOptions = [
{
label: '1 hour',
value: 1,
},
{
label: '2 hours',
value: 2,
},
{
label: '3 hours',
value: 3,
},
{
label: '6 hours',
value: 6,
},
{
label: '12 hours',
value: 12,
},
{
label: '24 hours',
value: 24,
},
];
interface IResourceAttribute {
key: string;
value: string;
isSelected: boolean;
}
const DEFAULT_RESOURCE_ATTRIBUTES = {
serviceName: 'service.name',
name: 'name',
};
// eslint-disable-next-line sonarjs/cognitive-complexity
function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
const {
isSpanDetailsDocked,
@@ -39,12 +119,60 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
const [shouldAutoFocusSearch, setShouldAutoFocusSearch] = useState<boolean>(
false,
);
const [isSpanPercentilesOpen, setIsSpanPercentilesOpen] = useState<boolean>(
false,
);
const [isRelatedSignalsOpen, setIsRelatedSignalsOpen] = useState<boolean>(
false,
);
const [activeDrawerView, setActiveDrawerView] = useState<RelatedSignalsViews>(
RelatedSignalsViews.LOGS,
);
const [selectedTimeRange, setSelectedTimeRange] = useState<number>(1);
const [
resourceAttributesSearchQuery,
setResourceAttributesSearchQuery,
] = useState<string>('');
const [spanPercentileData, setSpanPercentileData] = useState<{
percentile: number;
description: string;
percentiles: Record<string, number>;
} | null>(null);
const [
showResourceAttributesSelector,
setShowResourceAttributesSelector,
] = useState<boolean>(false);
const [selectedResourceAttributes, setSelectedResourceAttributes] = useState<
Record<string, string>
>({});
const [spanResourceAttributes, updateSpanResourceAttributes] = useState<
IResourceAttribute[]
>([] as IResourceAttribute[]);
const [initialWaitCompleted, setInitialWaitCompleted] = useState<boolean>(
false,
);
const [
shouldFetchSpanPercentilesData,
setShouldFetchSpanPercentilesData,
] = useState<boolean>(false);
const [
shouldUpdateUserPreference,
setShouldUpdateUserPreference,
] = useState<boolean>(false);
const handleTimeRangeChange = useCallback((value: number): void => {
setShouldFetchSpanPercentilesData(true);
setSelectedTimeRange(value);
}, []);
const color = generateColor(
selectedSpan?.serviceName || '',
themeColors.traceDetailColors,
@@ -60,6 +188,35 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
setIsRelatedSignalsOpen(false);
}, []);
const relatedSignalsOptions = useMemo(() => {
const baseOptions = [
{
label: (
<div className="view-title">
<LogsIcon width={14} height={14} />
Logs
</div>
),
value: RelatedSignalsViews.LOGS,
},
];
// Only show Infra option if span has infrastructure metadata
if (hasInfraMetadata(selectedSpan)) {
baseOptions.push({
label: (
<div className="view-title">
<BarChart2 size={14} />
Metrics
</div>
),
value: RelatedSignalsViews.INFRA,
});
}
return baseOptions;
}, [selectedSpan]);
function getItems(span: Span, startTime: number): TabsProps['items'] {
return [
{
@@ -123,6 +280,277 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
];
}
const resourceAttributesSelectorRef = useRef<HTMLDivElement | null>(null);
useClickOutside({
ref: resourceAttributesSelectorRef,
onClickOutside: () => {
if (resourceAttributesSelectorRef.current) {
setShowResourceAttributesSelector(false);
}
},
eventType: 'mousedown',
});
const spanPercentileTooltipText = useMemo(
() => (
<div className="span-percentile-tooltip-text">
<Typography.Text>
This span duration is{' '}
<span className="span-percentile-tooltip-text-percentile">
p{Math.floor(spanPercentileData?.percentile || 0)}
</span>{' '}
out of the distribution for this resource evaluated for {selectedTimeRange}{' '}
hour(s) since the span start time.
</Typography.Text>
<br />
<br />
<Typography.Text className="span-percentile-tooltip-text-link">
Click to learn more
</Typography.Text>
</div>
),
[spanPercentileData?.percentile, selectedTimeRange],
);
const endTime = useMemo(
() => Math.floor(Number(selectedSpan?.timestamp) / 1000) * 1000,
[selectedSpan?.timestamp],
);
const startTime = useMemo(
() =>
dayjs(selectedSpan?.timestamp)
.subtract(Number(selectedTimeRange), 'hour')
.unix() * 1000,
[selectedSpan?.timestamp, selectedTimeRange],
);
const { mutate: updateUserPreferenceMutation } = useMutation(
updateUserPreference,
);
// TODO: Span percentile should be eventually moved to context and not fetched on every span change
const {
data: userSelectedResourceAttributes,
isError: isErrorUserSelectedResourceAttributes,
} = useQuery({
queryFn: () =>
getUserPreference({
name: USER_PREFERENCES.SPAN_PERCENTILE_RESOURCE_ATTRIBUTES,
}),
queryKey: [
'getUserPreferenceByPreferenceName',
USER_PREFERENCES.SPAN_PERCENTILE_RESOURCE_ATTRIBUTES,
selectedSpan?.spanId,
],
enabled: selectedSpan !== null && selectedSpan?.tagMap !== undefined,
});
const {
isLoading: isLoadingSpanPercentilesData,
isFetching: isFetchingSpanPercentilesData,
data,
refetch: refetchSpanPercentilesData,
isError: isErrorSpanPercentilesData,
} = useQuery({
queryFn: () =>
getSpanPercentiles({
start: startTime || 0,
end: endTime || 0,
spanDuration: selectedSpan?.durationNano || 0,
serviceName: selectedSpan?.serviceName || '',
name: selectedSpan?.name || '',
resourceAttributes: selectedResourceAttributes,
}),
queryKey: [
REACT_QUERY_KEY.GET_SPAN_PERCENTILES,
selectedSpan?.spanId,
startTime,
endTime,
],
enabled:
selectedSpan !== null &&
shouldFetchSpanPercentilesData &&
!showResourceAttributesSelector &&
initialWaitCompleted,
onSuccess: (response) => {
if (response.httpStatusCode !== 200) {
return;
}
if (shouldUpdateUserPreference) {
updateUserPreferenceMutation({
name: USER_PREFERENCES.SPAN_PERCENTILE_RESOURCE_ATTRIBUTES,
value: [...Object.keys(selectedResourceAttributes)],
});
setShouldUpdateUserPreference(false);
}
},
keepPreviousData: false,
cacheTime: 0, // no cache
});
// Prod Req - Wait for 2 seconds before fetching span percentile data on initial load
useEffect(() => {
setSpanPercentileData(null);
setIsSpanPercentilesOpen(false);
setInitialWaitCompleted(false);
const timer = setTimeout(() => {
setInitialWaitCompleted(true);
}, 2000); // 2-second delay
return (): void => {
// clean the old state around span percentile data
clearTimeout(timer); // Cleanup on re-run or unmount
};
}, [selectedSpan?.spanId]);
useEffect(() => {
if (data?.httpStatusCode !== 200) {
setSpanPercentileData(null);
return;
}
if (data) {
const percentileData = {
percentile: data?.data?.position?.percentile || 0,
description: data?.data?.position?.description || '',
percentiles: data?.data?.percentiles || {},
};
setSpanPercentileData(percentileData);
}
}, [data]);
useEffect(() => {
if (userSelectedResourceAttributes) {
const userSelectedResourceAttributesList = (userSelectedResourceAttributes
?.data?.value as string[]).map((attribute: string) => attribute);
let selectedResourceAttributesMap: Record<string, string> = {};
userSelectedResourceAttributesList.forEach((attribute: string) => {
selectedResourceAttributesMap[attribute] =
selectedSpan?.tagMap?.[attribute] || '';
});
// filter out the attributes that are not in the selectedSpan?.tagMap
selectedResourceAttributesMap = Object.fromEntries(
Object.entries(selectedResourceAttributesMap).filter(
([key]) => selectedSpan?.tagMap?.[key] !== undefined,
),
);
const resourceAttributes = Object.entries(selectedSpan?.tagMap || {}).map(
([key, value]) => ({
key,
value,
isSelected:
key === DEFAULT_RESOURCE_ATTRIBUTES.serviceName ||
key === DEFAULT_RESOURCE_ATTRIBUTES.name ||
(key in selectedResourceAttributesMap &&
selectedResourceAttributesMap[key] !== '' &&
selectedResourceAttributesMap[key] !== undefined),
}),
);
// selected resources should be at the top of the list
const selectedResourceAttributes = resourceAttributes.filter(
(resourceAttribute) => resourceAttribute.isSelected,
);
const unselectedResourceAttributes = resourceAttributes.filter(
(resourceAttribute) => !resourceAttribute.isSelected,
);
const sortedResourceAttributes = [
...selectedResourceAttributes,
...unselectedResourceAttributes,
];
updateSpanResourceAttributes(sortedResourceAttributes);
setSelectedResourceAttributes(
selectedResourceAttributesMap as Record<string, string>,
);
setShouldFetchSpanPercentilesData(true);
}
if (isErrorUserSelectedResourceAttributes) {
const resourceAttributes = Object.entries(selectedSpan?.tagMap || {}).map(
([key, value]) => ({
key,
value,
isSelected:
key === DEFAULT_RESOURCE_ATTRIBUTES.serviceName ||
key === DEFAULT_RESOURCE_ATTRIBUTES.name,
}),
);
updateSpanResourceAttributes(resourceAttributes);
setShouldFetchSpanPercentilesData(true);
}
}, [
userSelectedResourceAttributes,
isErrorUserSelectedResourceAttributes,
selectedSpan?.tagMap,
]);
const handleResourceAttributeChange = useCallback(
(key: string, value: string, isSelected: boolean): void => {
updateSpanResourceAttributes((prev) =>
prev.map((resourceAttribute) =>
resourceAttribute.key === key
? { ...resourceAttribute, isSelected }
: resourceAttribute,
),
);
const newSelectedResourceAttributes = { ...selectedResourceAttributes };
if (isSelected) {
newSelectedResourceAttributes[key] = value;
} else {
delete newSelectedResourceAttributes[key];
}
setSelectedResourceAttributes(newSelectedResourceAttributes);
setShouldFetchSpanPercentilesData(true);
setShouldUpdateUserPreference(true);
},
[selectedResourceAttributes],
);
useEffect(() => {
if (
shouldFetchSpanPercentilesData &&
!showResourceAttributesSelector &&
initialWaitCompleted
) {
refetchSpanPercentilesData();
setShouldFetchSpanPercentilesData(false);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
shouldFetchSpanPercentilesData,
showResourceAttributesSelector,
initialWaitCompleted,
]);
const loadingSpanPercentilesData =
isLoadingSpanPercentilesData || isFetchingSpanPercentilesData;
const spanPercentileValue = Math.floor(spanPercentileData?.percentile || 0);
return (
<>
<section className="header">
@@ -143,13 +571,244 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
<section className="description">
<div className="item">
<Typography.Text className="attribute-key">span name</Typography.Text>
<Tooltip title={selectedSpan.name}>
<div className="value-wrapper">
<div className="value-wrapper span-name-wrapper">
<Tooltip title={selectedSpan.name}>
<Typography.Text className="attribute-value" ellipsis>
{selectedSpan.name}
</Typography.Text>
</div>
</Tooltip>
</Tooltip>
{loadingSpanPercentilesData && (
<div className="loading-spinner-container">
<Loader2 size={16} className="animate-spin" />
</div>
)}
{!loadingSpanPercentilesData && spanPercentileData && (
<Tooltip
title={isSpanPercentilesOpen ? '' : spanPercentileTooltipText}
placement="bottomRight"
overlayClassName="span-percentile-tooltip"
arrow={false}
>
<div
className={`span-percentile-value-container ${
isSpanPercentilesOpen
? 'span-percentile-value-container-open'
: 'span-percentile-value-container-closed'
}`}
>
<Typography.Text
className="span-percentile-value"
onClick={(): void => setIsSpanPercentilesOpen((prev) => !prev)}
disabled={loadingSpanPercentilesData}
>
<span className="span-percentile-value-text">
p{spanPercentileValue}
</span>
{!isSpanPercentilesOpen && (
<ChevronDown size={16} className="span-percentile-value-icon" />
)}
{isSpanPercentilesOpen && (
<ChevronUp size={16} className="span-percentile-value-icon" />
)}
</Typography.Text>
</div>
</Tooltip>
)}
</div>
<AnimatePresence initial={false}>
{isSpanPercentilesOpen && !isErrorSpanPercentilesData && (
<motion.div
initial={{ height: 0, opacity: 0 }}
animate={{ height: 'auto', opacity: 1 }}
exit={{ height: 0, opacity: 0 }}
key="box"
>
<div className="span-percentiles-container">
<div className="span-percentiles-header">
<Typography.Text
className="span-percentiles-header-text"
onClick={(): void => setIsSpanPercentilesOpen((prev) => !prev)}
>
<ChevronDown size={16} /> Span Percentile
</Typography.Text>
{showResourceAttributesSelector ? (
<Check
data-testid="check-icon"
size={16}
className="cursor-pointer span-percentiles-header-icon"
onClick={(): void => setShowResourceAttributesSelector(false)}
/>
) : (
<PlusIcon
data-testid="plus-icon"
size={16}
className="cursor-pointer span-percentiles-header-icon"
onClick={(): void => setShowResourceAttributesSelector(true)}
/>
)}
</div>
{showResourceAttributesSelector && (
<div
className="resource-attributes-select-container"
ref={resourceAttributesSelectorRef}
>
<div className="resource-attributes-select-container-header">
<Input
placeholder="Search resource attributes"
className="resource-attributes-select-container-input"
value={resourceAttributesSearchQuery}
onChange={(e): void =>
setResourceAttributesSearchQuery(e.target.value as string)
}
/>
</div>
<div className="resource-attributes-items">
{spanResourceAttributes
.filter((resourceAttribute) =>
resourceAttribute.key
.toLowerCase()
.includes(resourceAttributesSearchQuery.toLowerCase()),
)
.map((resourceAttribute) => (
<div
className="resource-attributes-select-item"
key={resourceAttribute.key}
>
<div className="resource-attributes-select-item-checkbox">
<Checkbox
checked={resourceAttribute.isSelected}
onChange={(e): void => {
handleResourceAttributeChange(
resourceAttribute.key,
resourceAttribute.value,
e.target.checked,
);
}}
disabled={
resourceAttribute.key === 'service.name' ||
resourceAttribute.key === 'name'
}
>
<div className="resource-attributes-select-item-value">
{resourceAttribute.key}
</div>
</Checkbox>
</div>
</div>
))}
</div>
</div>
)}
<div className="span-percentile-content">
<Typography.Text className="span-percentile-content-title">
This span duration is{' '}
{!isLoadingSpanPercentilesData &&
!isFetchingSpanPercentilesData &&
spanPercentileData ? (
<span className="span-percentile-value">
p{Math.floor(spanPercentileData?.percentile || 0)}
</span>
) : (
<span className="span-percentile-value-loader">
<Loader2 size={12} className="animate-spin" />
</span>
)}{' '}
out of the distribution for this resource evaluated for{' '}
{selectedTimeRange} hour(s) since the span start time.
</Typography.Text>
<div className="span-percentile-timerange">
<Select
labelInValue
placeholder="Select timerange"
className="span-percentile-timerange-select"
value={{
label: `${selectedTimeRange}h : ${dayjs(selectedSpan?.timestamp)
.subtract(selectedTimeRange, 'hour')
.format(DATE_TIME_FORMATS.TIME_SPAN_PERCENTILE)} - ${dayjs(
selectedSpan?.timestamp,
).format(DATE_TIME_FORMATS.TIME_SPAN_PERCENTILE)}`,
value: selectedTimeRange,
}}
onChange={(value): void => {
handleTimeRangeChange(Number(value.value));
}}
options={timerangeOptions}
/>
</div>
<div className="span-percentile-values-table">
<div className="span-percentile-values-table-header-row">
<Typography.Text className="span-percentile-values-table-header">
Percentile
</Typography.Text>
<Typography.Text className="span-percentile-values-table-header">
Duration
</Typography.Text>
</div>
<div className="span-percentile-values-table-data-rows">
{isLoadingSpanPercentilesData || isFetchingSpanPercentilesData ? (
<Skeleton
active
paragraph={{ rows: 3 }}
className="span-percentile-values-table-data-rows-skeleton"
/>
) : (
<>
{Object.entries(spanPercentileData?.percentiles || {}).map(
([percentile, duration]) => (
<div
className="span-percentile-values-table-data-row"
key={percentile}
>
<Typography.Text className="span-percentile-values-table-data-row-key">
{percentile}
</Typography.Text>
<div className="dashed-line" />
<Typography.Text className="span-percentile-values-table-data-row-value">
{getYAxisFormattedValue(`${duration / 1000000}`, 'ms')}
</Typography.Text>
</div>
),
)}
<div className="span-percentile-values-table-data-row current-span-percentile-row">
<Typography.Text className="span-percentile-values-table-data-row-key">
p{Math.floor(spanPercentileData?.percentile || 0)}
</Typography.Text>
<div className="dashed-line" />
<Typography.Text className="span-percentile-values-table-data-row-value">
(this span){' '}
{getYAxisFormattedValue(
`${selectedSpan.durationNano / 1000000}`,
'ms',
)}
</Typography.Text>
</div>
</>
)}
</div>
</div>
</div>
</div>
</motion.div>
)}
</AnimatePresence>
</div>
<div className="item">
<Typography.Text className="attribute-key">span id</Typography.Text>
@@ -226,17 +885,7 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
<div className="related-signals-section">
<SignozRadioGroup
value=""
options={[
{
label: (
<div className="view-title">
<LogsIcon width={14} height={14} />
Logs
</div>
),
value: RelatedSignalsViews.LOGS,
},
]}
options={relatedSignalsOptions}
onChange={handleRelatedSignalsChange}
className="related-signals-radio"
/>

View File

@@ -30,6 +30,11 @@
display: flex;
flex-direction: column;
}
.view-title {
display: flex;
align-items: center;
gap: 8px;
}
.views-tabs-container {
padding: 16px 15px;
@@ -88,28 +93,10 @@
}
}
.infra-placeholder {
height: 50vh;
width: 100%;
display: flex;
justify-content: center;
align-items: center;
padding: 2rem;
box-sizing: border-box;
.infra-placeholder-content {
text-align: center;
color: var(--bg-slate-400);
svg {
margin-bottom: 1rem;
color: var(--bg-slate-400);
}
.ant-typography {
font-size: 16px;
color: var(--bg-slate-400);
}
.infra-metrics-container {
padding-inline: 16px;
.infra-metrics-card {
border: 1px solid var(--bg-slate-400);
}
}
}

View File

@@ -11,17 +11,20 @@ import {
initialQueryState,
} from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import InfraMetrics from 'container/LogDetailedView/InfraMetrics/InfraMetrics';
import { getEmptyLogsListConfig } from 'container/LogsExplorerList/utils';
import dayjs from 'dayjs';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { Compass, X } from 'lucide-react';
import { BarChart2, Compass, X } from 'lucide-react';
import { useCallback, useMemo, useState } from 'react';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Span } from 'types/api/trace/getTraceV2';
import { LogsAggregatorOperator } from 'types/common/queryBuilder';
import { DataSource, LogsAggregatorOperator } from 'types/common/queryBuilder';
import { RelatedSignalsViews } from '../constants';
import SpanLogs from '../SpanLogs/SpanLogs';
import { useSpanContextLogs } from '../SpanLogs/useSpanContextLogs';
import { hasInfraMetadata } from '../utils';
const FIVE_MINUTES_IN_MS = 5 * 60 * 1000;
@@ -47,6 +50,21 @@ function SpanRelatedSignals({
);
const isDarkMode = useIsDarkMode();
// Extract infrastructure metadata from span attributes
const infraMetadata = useMemo(() => {
// Only return metadata if span has infrastructure metadata
if (!hasInfraMetadata(selectedSpan)) {
return null;
}
return {
clusterName: selectedSpan.tagMap['k8s.cluster.name'] || '',
podName: selectedSpan.tagMap['k8s.pod.name'] || '',
nodeName: selectedSpan.tagMap['k8s.node.name'] || '',
hostName: selectedSpan.tagMap['host.name'] || '',
spanTimestamp: dayjs(selectedSpan.timestamp).format(),
};
}, [selectedSpan]);
const {
logs,
isLoading,
@@ -68,10 +86,34 @@ function SpanRelatedSignals({
setSelectedView(e.target.value);
}, []);
const handleClose = useCallback((): void => {
setSelectedView(RelatedSignalsViews.LOGS);
onClose();
}, [onClose]);
const tabOptions = useMemo(() => {
const baseOptions = [
{
label: (
<div className="view-title">
<LogsIcon width={14} height={14} />
Logs
</div>
),
value: RelatedSignalsViews.LOGS,
},
];
// Add Infra option if infrastructure metadata is available
if (infraMetadata) {
baseOptions.push({
label: (
<div className="view-title">
<BarChart2 size={14} />
Metrics
</div>
),
value: RelatedSignalsViews.INFRA,
});
}
return baseOptions;
}, [infraMetadata]);
const handleExplorerPageRedirect = useCallback((): void => {
const startTimeMs = traceStartTime - FIVE_MINUTES_IN_MS;
@@ -145,7 +187,7 @@ function SpanRelatedSignals({
</>
}
placement="right"
onClose={handleClose}
onClose={onClose}
open={isOpen}
style={{
overscrollBehavior: 'contain',
@@ -160,35 +202,7 @@ function SpanRelatedSignals({
<div className="views-tabs-container">
<SignozRadioGroup
value={selectedView}
options={[
{
label: (
<div className="view-title">
<LogsIcon width={14} height={14} />
Logs
</div>
),
value: RelatedSignalsViews.LOGS,
},
// {
// label: (
// <div className="view-title">
// <LogsIcon width={14} height={14} />
// Metrics
// </div>
// ),
// value: RelatedSignalsViews.METRICS,
// },
// {
// label: (
// <div className="view-title">
// <Server size={14} />
// Infra
// </div>
// ),
// value: RelatedSignalsViews.INFRA,
// },
]}
options={tabOptions}
onChange={handleTabChange}
className="related-signals-radio"
/>
@@ -197,6 +211,7 @@ function SpanRelatedSignals({
icon={<Compass size={18} />}
className="open-in-explorer"
onClick={handleExplorerPageRedirect}
data-testid="open-in-explorer-button"
>
Open in Logs Explorer
</Button>
@@ -220,6 +235,17 @@ function SpanRelatedSignals({
emptyStateConfig={!hasTraceIdLogs ? emptyStateConfig : undefined}
/>
)}
{selectedView === RelatedSignalsViews.INFRA && infraMetadata && (
<InfraMetrics
clusterName={infraMetadata.clusterName}
podName={infraMetadata.podName}
nodeName={infraMetadata.nodeName}
hostName={infraMetadata.hostName}
timestamp={infraMetadata.spanTimestamp}
dataSource={DataSource.TRACES}
/>
)}
</div>
)}
</Drawer>

View File

@@ -0,0 +1,502 @@
import ROUTES from 'constants/routes';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { server } from 'mocks-server/server';
import { QueryBuilderContext } from 'providers/QueryBuilder';
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
import { DataSource } from 'types/common/queryBuilder';
import SpanDetailsDrawer from '../SpanDetailsDrawer';
import {
expectedHostOnlyMetadata,
expectedInfraMetadata,
expectedNodeOnlyMetadata,
expectedPodOnlyMetadata,
mockEmptyMetricsResponse,
mockNodeMetricsResponse,
mockPodMetricsResponse,
mockSpanWithHostOnly,
mockSpanWithInfraMetadata,
mockSpanWithNodeOnly,
mockSpanWithoutInfraMetadata,
mockSpanWithPodOnly,
} from './infraMetricsTestData';
// Mock external dependencies
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: (): { pathname: string } => ({
pathname: `${ROUTES.TRACE_DETAIL}`,
}),
}));
const mockSafeNavigate = jest.fn();
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): any => ({
safeNavigate: mockSafeNavigate,
}),
}));
const mockUpdateAllQueriesOperators = jest.fn().mockReturnValue({
builder: {
queryData: [
{
dataSource: 'logs',
queryName: 'A',
aggregateOperator: 'noop',
filters: { items: [], op: 'AND' },
expression: 'A',
disabled: false,
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
groupBy: [],
limit: null,
having: [],
},
],
queryFormulas: [],
},
queryType: 'builder',
});
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
useQueryBuilder: (): any => ({
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
currentQuery: {
builder: {
queryData: [
{
dataSource: 'logs',
queryName: 'A',
filters: { items: [], op: 'AND' },
},
],
},
},
}),
}));
const mockWindowOpen = jest.fn();
Object.defineProperty(window, 'open', {
writable: true,
value: mockWindowOpen,
});
// Mock uplot to avoid rendering issues
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
// Mock GetMetricQueryRange to track API calls
jest.mock('lib/dashboard/getQueryResults', () => ({
GetMetricQueryRange: jest.fn(),
}));
// Mock generateColor
jest.mock('lib/uPlotLib/utils/generateColor', () => ({
generateColor: jest.fn().mockReturnValue('#1f77b4'),
}));
// Mock OverlayScrollbar
jest.mock(
'components/OverlayScrollbar/OverlayScrollbar',
() =>
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
function ({ children }: any) {
return <div data-testid="overlay-scrollbar">{children}</div>;
},
);
// Mock Virtuoso
jest.mock('react-virtuoso', () => ({
Virtuoso: jest.fn(({ data, itemContent }) => (
<div data-testid="virtuoso">
{data?.map((item: any, index: number) => (
<div key={item.id || index} data-testid={`log-item-${item.id}`}>
{itemContent(index, item)}
</div>
))}
</div>
)),
}));
// Mock InfraMetrics component for focused testing
jest.mock(
'container/LogDetailedView/InfraMetrics/InfraMetrics',
() =>
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
function MockInfraMetrics({
podName,
nodeName,
hostName,
clusterName,
timestamp,
dataSource,
}: any) {
return (
<div data-testid="infra-metrics">
<div data-testid="infra-pod-name">{podName}</div>
<div data-testid="infra-node-name">{nodeName}</div>
<div data-testid="infra-host-name">{hostName}</div>
<div data-testid="infra-cluster-name">{clusterName}</div>
<div data-testid="infra-timestamp">{timestamp}</div>
<div data-testid="infra-data-source">{dataSource}</div>
</div>
);
},
);
// Mock PreferenceContextProvider
jest.mock('providers/preferences/context/PreferenceContextProvider', () => ({
PreferenceContextProvider: ({ children }: any): JSX.Element => (
<div>{children}</div>
),
}));
describe('SpanDetailsDrawer - Infra Metrics', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any, sonarjs/no-unused-collection
let apiCallHistory: any[] = [];
beforeEach(() => {
jest.clearAllMocks();
apiCallHistory = [];
mockSafeNavigate.mockClear();
mockWindowOpen.mockClear();
mockUpdateAllQueriesOperators.mockClear();
// Setup API call tracking for infra metrics
(GetMetricQueryRange as jest.Mock).mockImplementation((query) => {
apiCallHistory.push(query);
// Return mock responses for different query types
if (
query?.query?.builder?.queryData?.[0]?.filters?.items?.some(
(item: any) => item.key?.key === 'k8s_pod_name',
)
) {
return Promise.resolve(mockPodMetricsResponse);
}
if (
query?.query?.builder?.queryData?.[0]?.filters?.items?.some(
(item: any) => item.key?.key === 'k8s_node_name',
)
) {
return Promise.resolve(mockNodeMetricsResponse);
}
return Promise.resolve(mockEmptyMetricsResponse);
});
});
afterEach(() => {
server.resetHandlers();
});
// Mock QueryBuilder context value
const mockQueryBuilderContextValue = {
currentQuery: {
builder: {
queryData: [
{
dataSource: 'logs',
queryName: 'A',
filters: { items: [], op: 'AND' },
},
],
},
},
stagedQuery: {
builder: {
queryData: [
{
dataSource: 'logs',
queryName: 'A',
filters: { items: [], op: 'AND' },
},
],
},
},
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
panelType: 'list',
redirectWithQuery: jest.fn(),
handleRunQuery: jest.fn(),
handleStageQuery: jest.fn(),
resetQuery: jest.fn(),
};
const renderSpanDetailsDrawer = (props = {}): void => {
render(
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
<SpanDetailsDrawer
isSpanDetailsDocked={false}
setIsSpanDetailsDocked={jest.fn()}
selectedSpan={mockSpanWithInfraMetadata}
traceStartTime={1640995200000} // 2022-01-01 00:00:00
traceEndTime={1640995260000} // 2022-01-01 00:01:00
// eslint-disable-next-line react/jsx-props-no-spreading
{...props}
/>
</QueryBuilderContext.Provider>,
);
};
it('should detect infra metadata from span attributes', async () => {
renderSpanDetailsDrawer();
// Click on metrics tab
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
expect(infraMetricsButton).toBeInTheDocument();
fireEvent.click(infraMetricsButton);
// Wait for infra metrics to load
await waitFor(() => {
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
});
// Verify metadata extraction
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
expectedInfraMetadata.podName,
);
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
expectedInfraMetadata.nodeName,
);
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
expectedInfraMetadata.hostName,
);
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
expectedInfraMetadata.clusterName,
);
expect(screen.getByTestId('infra-data-source')).toHaveTextContent(
DataSource.TRACES,
);
});
it('should not show infra tab when span lacks infra metadata', async () => {
render(
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
<SpanDetailsDrawer
isSpanDetailsDocked={false}
setIsSpanDetailsDocked={jest.fn()}
selectedSpan={mockSpanWithoutInfraMetadata}
traceStartTime={1640995200000}
traceEndTime={1640995260000}
/>
</QueryBuilderContext.Provider>,
);
// Should NOT show infra tab, only logs tab
expect(
screen.queryByRole('radio', { name: /metrics/i }),
).not.toBeInTheDocument();
expect(screen.getByRole('radio', { name: /logs/i })).toBeInTheDocument();
});
it('should show infra tab when span has infra metadata', async () => {
renderSpanDetailsDrawer();
// Should show both logs and infra tabs
expect(screen.getByRole('radio', { name: /metrics/i })).toBeInTheDocument();
expect(screen.getByRole('radio', { name: /logs/i })).toBeInTheDocument();
});
it('should handle pod-only metadata correctly', async () => {
render(
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
<SpanDetailsDrawer
isSpanDetailsDocked={false}
setIsSpanDetailsDocked={jest.fn()}
selectedSpan={mockSpanWithPodOnly}
traceStartTime={1640995200000}
traceEndTime={1640995260000}
/>
</QueryBuilderContext.Provider>,
);
// Click on infra tab
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
fireEvent.click(infraMetricsButton);
await waitFor(() => {
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
});
// Verify pod-only metadata
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
expectedPodOnlyMetadata.podName,
);
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
expectedPodOnlyMetadata.clusterName,
);
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
expectedPodOnlyMetadata.nodeName,
);
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
expectedPodOnlyMetadata.hostName,
);
});
it('should handle node-only metadata correctly', async () => {
render(
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
<SpanDetailsDrawer
isSpanDetailsDocked={false}
setIsSpanDetailsDocked={jest.fn()}
selectedSpan={mockSpanWithNodeOnly}
traceStartTime={1640995200000}
traceEndTime={1640995260000}
/>
</QueryBuilderContext.Provider>,
);
// Click on infra tab
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
fireEvent.click(infraMetricsButton);
await waitFor(() => {
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
});
// Verify node-only metadata
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
expectedNodeOnlyMetadata.nodeName,
);
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
expectedNodeOnlyMetadata.podName,
);
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
expectedNodeOnlyMetadata.clusterName,
);
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
expectedNodeOnlyMetadata.hostName,
);
});
it('should handle host-only metadata correctly', async () => {
render(
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
<SpanDetailsDrawer
isSpanDetailsDocked={false}
setIsSpanDetailsDocked={jest.fn()}
selectedSpan={mockSpanWithHostOnly}
traceStartTime={1640995200000}
traceEndTime={1640995260000}
/>
</QueryBuilderContext.Provider>,
);
// Click on infra tab
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
fireEvent.click(infraMetricsButton);
await waitFor(() => {
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
});
// Verify host-only metadata
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
expectedHostOnlyMetadata.hostName,
);
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
expectedHostOnlyMetadata.podName,
);
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
expectedHostOnlyMetadata.nodeName,
);
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
expectedHostOnlyMetadata.clusterName,
);
});
it('should switch between logs and infra tabs correctly', async () => {
renderSpanDetailsDrawer();
// Initially should show logs tab content
const logsButton = screen.getByRole('radio', { name: /logs/i });
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
expect(logsButton).toBeInTheDocument();
expect(infraMetricsButton).toBeInTheDocument();
// Ensure logs tab is active and wait for content to load
fireEvent.click(logsButton);
await waitFor(() => {
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('open-in-explorer-button')).toBeInTheDocument();
});
// Click on infra tab
fireEvent.click(infraMetricsButton);
await waitFor(() => {
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
});
// Should not show logs content anymore
expect(
screen.queryByTestId('open-in-explorer-button'),
).not.toBeInTheDocument();
// Switch back to logs tab
fireEvent.click(logsButton);
// Should not show infra metrics anymore
await waitFor(() => {
expect(screen.queryByTestId('infra-metrics')).not.toBeInTheDocument();
});
// Verify logs content is shown again
await waitFor(() => {
expect(screen.getByTestId('open-in-explorer-button')).toBeInTheDocument();
});
});
it('should pass correct data source and handle multiple infra identifiers', async () => {
renderSpanDetailsDrawer();
// Should show infra tab when span has any of: clusterName, podName, nodeName, hostName
expect(screen.getByRole('radio', { name: /metrics/i })).toBeInTheDocument();
// Click on infra tab
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
fireEvent.click(infraMetricsButton);
await waitFor(() => {
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
});
// Verify TRACES data source is passed
expect(screen.getByTestId('infra-data-source')).toHaveTextContent(
DataSource.TRACES,
);
// All infra identifiers should be passed through
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
'test-pod-abc123',
);
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
'test-node-456',
);
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
'test-host.example.com',
);
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
'test-cluster',
);
});
});

View File

@@ -1,3 +1,8 @@
/* eslint-disable sonarjs/no-duplicate-string */
/* eslint-disable sonarjs/no-identical-functions */
import getSpanPercentiles from 'api/trace/getSpanPercentiles';
import getUserPreference from 'api/v1/user/preferences/name/get';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
@@ -10,6 +15,8 @@ import {
userEvent,
waitFor,
} from 'tests/test-utils';
import { SuccessResponseV2 } from 'types/api';
import { GetSpanPercentilesResponseDataProps } from 'types/api/trace/getSpanPercentiles';
import SpanDetailsDrawer from '../SpanDetailsDrawer';
import {
@@ -24,11 +31,17 @@ import {
mockSpanLogsResponse,
} from './mockData';
// Get typed mocks
const mockGetSpanPercentiles = jest.mocked(getSpanPercentiles);
const mockGetUserPreference = jest.mocked(getUserPreference);
const mockSafeNavigate = jest.fn();
// Mock external dependencies
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: (): { pathname: string } => ({
useLocation: (): { pathname: string; search: string } => ({
pathname: `${ROUTES.TRACE_DETAIL}`,
search: 'trace_id=test-trace-id',
}),
}));
@@ -38,9 +51,8 @@ jest.mock('@signozhq/button', () => ({
),
}));
const mockSafeNavigate = jest.fn();
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): any => ({
useSafeNavigate: (): { safeNavigate: jest.MockedFunction<() => void> } => ({
safeNavigate: mockSafeNavigate,
}),
}));
@@ -68,7 +80,10 @@ const mockUpdateAllQueriesOperators = jest.fn().mockReturnValue({
});
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
useQueryBuilder: (): any => ({
useQueryBuilder: (): {
updateAllQueriesOperators: jest.MockedFunction<() => any>;
currentQuery: any;
} => ({
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
currentQuery: {
builder: {
@@ -113,26 +128,46 @@ jest.mock('lib/uPlotLib/utils/generateColor', () => ({
generateColor: jest.fn().mockReturnValue('#1f77b4'),
}));
// Mock getSpanPercentiles API
jest.mock('api/trace/getSpanPercentiles', () => ({
__esModule: true,
default: jest.fn(),
}));
// Mock getUserPreference API
jest.mock('api/v1/user/preferences/name/get', () => ({
__esModule: true,
default: jest.fn(),
}));
jest.mock(
'components/OverlayScrollbar/OverlayScrollbar',
() =>
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
function ({ children }: any) {
function ({ children }: { children: React.ReactNode }) {
return <div data-testid="overlay-scrollbar">{children}</div>;
},
);
// Mock Virtuoso to avoid complex virtualization
jest.mock('react-virtuoso', () => ({
Virtuoso: jest.fn(({ data, itemContent }) => (
<div data-testid="virtuoso">
{data?.map((item: any, index: number) => (
<div key={item.id || index} data-testid={`log-item-${item.id}`}>
{itemContent(index, item)}
</div>
))}
</div>
)),
Virtuoso: jest.fn(
({
data,
itemContent,
}: {
data: any[];
itemContent: (index: number, item: any) => React.ReactNode;
}) => (
<div data-testid="virtuoso">
{data?.map((item: any, index: number) => (
<div key={item.id || index} data-testid={`log-item-${item.id}`}>
{itemContent(index, item)}
</div>
))}
</div>
),
),
}));
// Mock RawLogView component
@@ -145,7 +180,12 @@ jest.mock(
onLogClick,
isHighlighted,
helpTooltip,
}: any) {
}: {
data: any;
onLogClick: (data: any, event: React.MouseEvent) => void;
isHighlighted: boolean;
helpTooltip: string;
}) {
return (
// eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-static-element-interactions
<div
@@ -164,9 +204,11 @@ jest.mock(
// Mock PreferenceContextProvider
jest.mock('providers/preferences/context/PreferenceContextProvider', () => ({
PreferenceContextProvider: ({ children }: any): JSX.Element => (
<div>{children}</div>
),
PreferenceContextProvider: ({
children,
}: {
children: React.ReactNode;
}): JSX.Element => <div>{children}</div>,
}));
// Mock QueryBuilder context value
@@ -217,6 +259,51 @@ const renderSpanDetailsDrawer = (props = {}): void => {
);
};
// Constants for repeated strings
const SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER = 'Search resource attributes';
const P75_TEXT = 'p75';
const SPAN_PERCENTILE_TEXT = 'Span Percentile';
// Mock data for span percentiles
const mockSpanPercentileResponse = {
httpStatusCode: 200 as const,
data: {
percentiles: {
p50: 500000000, // 500ms in nanoseconds
p90: 1000000000, // 1s in nanoseconds
p95: 1500000000, // 1.5s in nanoseconds
p99: 2000000000, // 2s in nanoseconds
},
position: {
percentile: 75.5,
description: 'This span is in the 75th percentile',
},
},
};
const mockUserPreferenceResponse = {
statusCode: 200,
httpStatusCode: 200,
error: null,
message: 'Success',
data: {
name: 'span_percentile_resource_attributes',
description: 'Resource attributes for span percentile calculation',
valueType: 'array',
defaultValue: [],
value: ['service.name', 'name', 'http.method'],
allowedValues: [],
allowedScopes: [],
createdAt: '2023-01-01T00:00:00Z',
updatedAt: '2023-01-01T00:00:00Z',
},
};
const mockSpanPercentileErrorResponse = ({
httpStatusCode: 500,
data: null,
} as unknown) as SuccessResponseV2<GetSpanPercentilesResponseDataProps>;
describe('SpanDetailsDrawer', () => {
let apiCallHistory: any = {};
@@ -231,12 +318,14 @@ describe('SpanDetailsDrawer', () => {
mockSafeNavigate.mockClear();
mockWindowOpen.mockClear();
mockUpdateAllQueriesOperators.mockClear();
mockGetSpanPercentiles.mockClear();
mockGetUserPreference.mockClear();
// Setup API call tracking
(GetMetricQueryRange as jest.Mock).mockImplementation((query) => {
// Determine response based on v5 filter expressions
const filterExpression =
query.query?.builder?.queryData?.[0]?.filter?.expression;
const filterExpression = (query as any)?.query?.builder?.queryData?.[0]
?.filter?.expression;
if (!filterExpression) return Promise.resolve(mockEmptyLogsResponse);
@@ -321,17 +410,17 @@ describe('SpanDetailsDrawer', () => {
} = apiCallHistory;
// 1. Span logs query (trace_id + span_id)
expect(spanQuery.query.builder.queryData[0].filter.expression).toBe(
expect((spanQuery as any).query.builder.queryData[0].filter.expression).toBe(
expectedSpanFilterExpression,
);
// 2. Before logs query (trace_id + id < first_span_log_id)
expect(beforeQuery.query.builder.queryData[0].filter.expression).toBe(
expectedBeforeFilterExpression,
);
expect(
(beforeQuery as any).query.builder.queryData[0].filter.expression,
).toBe(expectedBeforeFilterExpression);
// 3. After logs query (trace_id + id > last_span_log_id)
expect(afterQuery.query.builder.queryData[0].filter.expression).toBe(
expect((afterQuery as any).query.builder.queryData[0].filter.expression).toBe(
expectedAfterFilterExpression,
);
@@ -360,13 +449,19 @@ describe('SpanDetailsDrawer', () => {
} = apiCallHistory;
// Verify ordering: span query should use 'desc' (default)
expect(spanQuery.query.builder.queryData[0].orderBy[0].order).toBe('desc');
expect((spanQuery as any).query.builder.queryData[0].orderBy[0].order).toBe(
'desc',
);
// Before query should use 'desc' (default)
expect(beforeQuery.query.builder.queryData[0].orderBy[0].order).toBe('desc');
expect((beforeQuery as any).query.builder.queryData[0].orderBy[0].order).toBe(
'desc',
);
// After query should use 'asc' for chronological order
expect(afterQuery.query.builder.queryData[0].orderBy[0].order).toBe('asc');
expect((afterQuery as any).query.builder.queryData[0].orderBy[0].order).toBe(
'asc',
);
});
it('should navigate to logs explorer with span filters when span log is clicked', async () => {
@@ -527,6 +622,435 @@ describe('SpanDetailsDrawer', () => {
expect(contextLogAfter).toHaveClass('log-context');
expect(contextLogBefore).not.toHaveAttribute('title');
});
// Span Percentile Tests
describe('Span Percentile Functionality', () => {
beforeEach(() => {
// Setup default mocks for percentile tests
mockGetUserPreference.mockResolvedValue(mockUserPreferenceResponse);
mockGetSpanPercentiles.mockResolvedValue(mockSpanPercentileResponse);
});
it('should display span percentile value after successful API call', async () => {
renderSpanDetailsDrawer();
// Wait for the 2-second delay and API call to complete
await waitFor(
() => {
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
},
{ timeout: 3000 },
);
});
it('should show loading spinner while fetching percentile data', async () => {
// Mock a delayed response
mockGetSpanPercentiles.mockImplementation(
() =>
new Promise((resolve) => {
setTimeout(() => resolve(mockSpanPercentileResponse), 1000);
}),
);
renderSpanDetailsDrawer();
// Wait for loading spinner to appear (it's visible as a div with class loading-spinner-container)
await waitFor(
() => {
const spinnerContainer = document.querySelector(
'.loading-spinner-container',
);
expect(spinnerContainer).toBeInTheDocument();
},
{ timeout: 3000 },
);
});
it('should expand percentile details when percentile value is clicked', async () => {
renderSpanDetailsDrawer();
// Wait for percentile data to load
await waitFor(
() => {
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
},
{ timeout: 3000 },
);
// Click on the percentile value to expand details
const percentileValue = screen.getByText(P75_TEXT);
fireEvent.click(percentileValue);
// Verify percentile details are expanded
await waitFor(() => {
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
// Look for the text that's actually rendered
expect(screen.getByText(/This span duration is/)).toBeInTheDocument();
expect(
screen.getByText(/out of the distribution for this resource/),
).toBeInTheDocument();
});
});
it('should display percentile table with correct values', async () => {
renderSpanDetailsDrawer();
// Wait for percentile data to load
await waitFor(
() => {
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
},
{ timeout: 3000 },
);
const percentileValue = screen.getByText(P75_TEXT);
fireEvent.click(percentileValue);
// Wait for the percentile details to expand
await waitFor(() => {
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
});
// Wait for the table to be visible (it might take a moment to render)
await waitFor(
() => {
expect(screen.getByText('Percentile')).toBeInTheDocument();
expect(screen.getByText('Duration')).toBeInTheDocument();
},
{ timeout: 5000 },
);
// Verify percentile values are displayed
expect(screen.getByText('p50')).toBeInTheDocument();
expect(screen.getByText('p90')).toBeInTheDocument();
expect(screen.getByText('p95')).toBeInTheDocument();
expect(screen.getByText('p99')).toBeInTheDocument();
// Verify current span row - use getAllByText since there are multiple p75 elements
expect(screen.getAllByText(P75_TEXT)).toHaveLength(3); // Should appear in value, expanded details, and table
// Verify the table has the current span indicator (there are multiple occurrences)
expect(screen.getAllByText(/this span/i).length).toBeGreaterThan(0);
});
it('should allow time range selection and trigger API call', async () => {
renderSpanDetailsDrawer();
// Wait for percentile data to load and expand
await waitFor(
() => {
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
},
{ timeout: 3000 },
);
const percentileValue = screen.getByText(P75_TEXT);
fireEvent.click(percentileValue);
// Wait for percentile details to expand
await waitFor(() => {
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
});
// Find the time range selector and verify it exists
const timeRangeSelector = screen.getByRole('combobox');
expect(timeRangeSelector).toBeInTheDocument();
// Verify the default time range is displayed
expect(screen.getByText(/1.*hour/i)).toBeInTheDocument();
// Verify API was called with default parameters
await waitFor(() => {
expect(mockGetSpanPercentiles).toHaveBeenCalledWith(
expect.objectContaining({
start: expect.any(Number),
end: expect.any(Number),
spanDuration: mockSpan.durationNano,
serviceName: mockSpan.serviceName,
name: mockSpan.name,
resourceAttributes: expect.any(Object),
}),
);
});
});
it('should show resource attributes selector when plus icon is clicked', async () => {
renderSpanDetailsDrawer();
// Wait for percentile data to load and expand
await waitFor(
() => {
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
},
{ timeout: 3000 },
);
const percentileValue = screen.getByText(P75_TEXT);
fireEvent.click(percentileValue);
// Wait for percentile details to expand
await waitFor(() => {
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
});
// Click the plus icon using test ID
const plusIcon = screen.getByTestId('plus-icon');
fireEvent.click(plusIcon);
// Verify resource attributes selector is shown
await waitFor(() => {
expect(
screen.getByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
).toBeInTheDocument();
});
});
it('should filter resource attributes based on search query', async () => {
renderSpanDetailsDrawer();
// Wait for percentile data to load and expand
await waitFor(
() => {
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
},
{ timeout: 3000 },
);
const percentileValue = screen.getByText(P75_TEXT);
fireEvent.click(percentileValue);
// Wait for percentile details to expand and show resource attributes
await waitFor(() => {
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
});
const plusIcon = screen.getByTestId('plus-icon');
fireEvent.click(plusIcon);
await waitFor(() => {
expect(
screen.getByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
).toBeInTheDocument();
});
// Type in search query
const searchInput = screen.getByPlaceholderText(
SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER,
);
fireEvent.change(searchInput, { target: { value: 'http' } });
// Verify only matching attributes are shown (use getAllByText for all since they appear in multiple places)
expect(screen.getAllByText('http.method').length).toBeGreaterThan(0);
expect(screen.getAllByText('http.url').length).toBeGreaterThan(0);
expect(screen.getAllByText('http.status_code').length).toBeGreaterThan(0);
});
it('should handle resource attribute selection and trigger API call', async () => {
renderSpanDetailsDrawer();
// Wait for percentile data to load and expand
await waitFor(
() => {
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
},
{ timeout: 3000 },
);
const percentileValue = screen.getByText(P75_TEXT);
fireEvent.click(percentileValue);
// Wait for percentile details to expand and show resource attributes
await waitFor(() => {
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
});
const plusIcon = screen.getByTestId('plus-icon');
fireEvent.click(plusIcon);
await waitFor(() => {
expect(
screen.getByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
).toBeInTheDocument();
});
// Find and click a checkbox for a resource attribute
const httpMethodCheckbox = screen.getByRole('checkbox', {
name: /http\.method/i,
});
fireEvent.click(httpMethodCheckbox);
// Verify API was called with updated resource attributes
await waitFor(() => {
expect(mockGetSpanPercentiles).toHaveBeenCalledWith(
expect.objectContaining({
resourceAttributes: expect.objectContaining({
'http.method': 'GET',
}),
}),
);
});
});
it('should handle API error gracefully', async () => {
// Mock API error
mockGetSpanPercentiles.mockResolvedValue(mockSpanPercentileErrorResponse);
renderSpanDetailsDrawer();
// Wait for the 2-second delay
await waitFor(
() => {
// Verify no percentile value is displayed on error
expect(screen.queryByText(/p\d+/)).not.toBeInTheDocument();
},
{ timeout: 3000 },
);
});
it('should not display percentile value when API returns non-200 status', async () => {
// Mock API response with non-200 status
mockGetSpanPercentiles.mockResolvedValue(({
httpStatusCode: 500 as const,
data: null,
} as unknown) as Awaited<ReturnType<typeof getSpanPercentiles>>);
renderSpanDetailsDrawer();
// Wait for the 2-second delay
await waitFor(
() => {
// Verify no percentile value is displayed
expect(screen.queryByText(/p\d+/)).not.toBeInTheDocument();
},
{ timeout: 3000 },
);
});
it('should display tooltip with correct content', async () => {
renderSpanDetailsDrawer();
// Wait for percentile data to load
await waitFor(
() => {
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
},
{ timeout: 3000 },
);
// Hover over the percentile value to show tooltip
const percentileValue = screen.getByText(P75_TEXT);
fireEvent.mouseEnter(percentileValue);
// Verify tooltip content - use more flexible text matching
await waitFor(() => {
expect(screen.getByText(/This span duration is/)).toBeInTheDocument();
expect(screen.getByText(/out of the distribution/)).toBeInTheDocument();
expect(
screen.getByText(/evaluated for 1 hour\(s\) since the span start time/),
).toBeInTheDocument();
expect(screen.getByText('Click to learn more')).toBeInTheDocument();
});
});
it('should handle empty percentile data gracefully', async () => {
// Mock empty percentile response
mockGetSpanPercentiles.mockResolvedValue({
httpStatusCode: 200,
data: {
percentiles: {},
position: {
percentile: 0,
description: '',
},
},
});
renderSpanDetailsDrawer();
// Wait for the 2-second delay
await waitFor(
() => {
// Verify p0 is displayed for empty data
expect(screen.getByText('p0')).toBeInTheDocument();
},
{ timeout: 3000 },
);
});
it('should call API with correct parameters', async () => {
renderSpanDetailsDrawer();
// Wait for API call to be made
await waitFor(
() => {
expect(mockGetSpanPercentiles).toHaveBeenCalled();
},
{ timeout: 3000 },
);
// Verify API was called with correct parameters
expect(mockGetSpanPercentiles).toHaveBeenCalledWith({
start: expect.any(Number),
end: expect.any(Number),
spanDuration: mockSpan.durationNano,
serviceName: mockSpan.serviceName,
name: mockSpan.name,
resourceAttributes: expect.any(Object),
});
});
it('should handle user preference loading', async () => {
renderSpanDetailsDrawer();
// Verify getUserPreference was called
await waitFor(() => {
expect(mockGetUserPreference).toHaveBeenCalledWith({
name: 'span_percentile_resource_attributes',
});
});
});
it('should close resource attributes selector when check icon is clicked', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderSpanDetailsDrawer();
// Wait for percentile data to load and expand
await waitFor(
() => {
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
},
{ timeout: 3000 },
);
const percentileValue = screen.getByText(P75_TEXT);
await user.click(percentileValue);
// Wait for percentile details to expand and show resource attributes
await waitFor(() => {
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
});
const plusIcon = screen.getByTestId('plus-icon');
await user.click(plusIcon);
await waitFor(() => {
expect(
screen.getByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
).toBeInTheDocument();
});
// Click the check icon to close the selector
const checkIcon = screen.getByTestId('check-icon');
await user.click(checkIcon);
// Verify resource attributes selector is hidden
await waitFor(() => {
expect(
screen.queryByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
).not.toBeInTheDocument();
});
});
});
});
describe('SpanDetailsDrawer - Search Visibility User Flows', () => {

View File

@@ -0,0 +1,169 @@
import { Span } from 'types/api/trace/getTraceV2';
// Constants
const TEST_TRACE_ID = 'test-trace-id';
const TEST_CLUSTER_NAME = 'test-cluster';
const TEST_POD_NAME = 'test-pod-abc123';
const TEST_NODE_NAME = 'test-node-456';
const TEST_HOST_NAME = 'test-host.example.com';
// Mock span with infrastructure metadata (pod + node + host)
export const mockSpanWithInfraMetadata: Span = {
spanId: 'infra-span-id',
traceId: TEST_TRACE_ID,
// eslint-disable-next-line sonarjs/no-duplicate-string
name: 'api-service',
serviceName: 'api-service',
timestamp: 1640995200000000, // 2022-01-01 00:00:00 in microseconds
durationNano: 2000000000, // 2 seconds in nanoseconds
spanKind: 'server',
statusCodeString: 'STATUS_CODE_OK',
statusMessage: '',
parentSpanId: '',
references: [],
event: [],
tagMap: {
'k8s.cluster.name': TEST_CLUSTER_NAME,
'k8s.pod.name': TEST_POD_NAME,
'k8s.node.name': TEST_NODE_NAME,
'host.name': TEST_HOST_NAME,
'service.name': 'api-service',
'http.method': 'GET',
},
hasError: false,
rootSpanId: '',
kind: 0,
rootName: '',
hasChildren: false,
hasSibling: false,
subTreeNodeCount: 0,
level: 0,
};
// Mock span with only pod metadata
export const mockSpanWithPodOnly: Span = {
...mockSpanWithInfraMetadata,
spanId: 'pod-only-span-id',
tagMap: {
'k8s.cluster.name': TEST_CLUSTER_NAME,
'k8s.pod.name': TEST_POD_NAME,
'service.name': 'api-service',
},
};
// Mock span with only node metadata
export const mockSpanWithNodeOnly: Span = {
...mockSpanWithInfraMetadata,
spanId: 'node-only-span-id',
tagMap: {
'k8s.node.name': TEST_NODE_NAME,
'service.name': 'api-service',
},
};
// Mock span with only host metadata
export const mockSpanWithHostOnly: Span = {
...mockSpanWithInfraMetadata,
spanId: 'host-only-span-id',
tagMap: {
'host.name': TEST_HOST_NAME,
'service.name': 'api-service',
},
};
// Mock span without any infrastructure metadata
export const mockSpanWithoutInfraMetadata: Span = {
...mockSpanWithInfraMetadata,
spanId: 'no-infra-span-id',
tagMap: {
'service.name': 'api-service',
'http.method': 'GET',
'http.status_code': '200',
},
};
// Mock infrastructure metrics API responses
export const mockPodMetricsResponse = {
payload: {
data: {
newResult: {
data: {
result: [
{
metric: { pod_name: TEST_POD_NAME },
values: [
[1640995200, '0.5'], // CPU usage
[1640995260, '0.6'],
],
},
],
},
},
},
},
};
export const mockNodeMetricsResponse = {
payload: {
data: {
newResult: {
data: {
result: [
{
metric: { node_name: TEST_NODE_NAME },
values: [
[1640995200, '2.1'], // Memory usage
[1640995260, '2.3'],
],
},
],
},
},
},
},
};
export const mockEmptyMetricsResponse = {
payload: {
data: {
newResult: {
data: {
result: [],
},
},
},
},
};
// Expected infrastructure metadata extractions
export const expectedInfraMetadata = {
clusterName: TEST_CLUSTER_NAME,
podName: TEST_POD_NAME,
nodeName: TEST_NODE_NAME,
hostName: TEST_HOST_NAME,
};
export const expectedPodOnlyMetadata = {
clusterName: TEST_CLUSTER_NAME,
podName: TEST_POD_NAME,
nodeName: '',
hostName: '',
// eslint-disable-next-line sonarjs/no-duplicate-string
spanTimestamp: '2022-01-01T00:00:00.000Z',
};
export const expectedNodeOnlyMetadata = {
clusterName: '',
podName: '',
nodeName: TEST_NODE_NAME,
hostName: '',
spanTimestamp: '2022-01-01T00:00:00.000Z',
};
export const expectedHostOnlyMetadata = {
clusterName: '',
podName: '',
nodeName: '',
hostName: TEST_HOST_NAME,
spanTimestamp: '2022-01-01T00:00:00.000Z',
};

View File

@@ -1,11 +1,11 @@
export enum RelatedSignalsViews {
LOGS = 'logs',
// METRICS = 'metrics',
// INFRA = 'infra',
INFRA = 'infra',
}
export const RELATED_SIGNALS_VIEW_TYPES = {
LOGS: RelatedSignalsViews.LOGS,
// METRICS: RelatedSignalsViews.METRICS,
// INFRA: RelatedSignalsViews.INFRA,
INFRA: RelatedSignalsViews.INFRA,
};

View File

@@ -0,0 +1,22 @@
import { Span } from 'types/api/trace/getTraceV2';
/**
* Infrastructure metadata keys that indicate infra signals are available
*/
export const INFRA_METADATA_KEYS = [
'k8s.cluster.name',
'k8s.pod.name',
'k8s.node.name',
'host.name',
] as const;
/**
* Checks if a span has any infrastructure metadata attributes
* @param span - The span to check for infrastructure metadata
* @returns true if the span has at least one infrastructure metadata key, false otherwise
*/
export function hasInfraMetadata(span: Span | undefined): boolean {
if (!span?.tagMap) return false;
return INFRA_METADATA_KEYS.some((key) => span.tagMap?.[key]);
}

View File

@@ -35,21 +35,21 @@ function TraceMetadata(props: ITraceMetadataProps): JSX.Element {
totalSpans,
notFound,
} = props;
const handlePreviousBtnClick = (): void => {
if (window.history.length > 1) {
history.goBack();
} else {
history.push(ROUTES.TRACES_EXPLORER);
}
};
return (
<div className="trace-metadata">
<section className="metadata-info">
<div className="first-row">
<Button className="previous-btn">
<ArrowLeft
size={14}
onClick={(): void => {
if (window.history.length > 1) {
history.goBack();
} else {
history.push(ROUTES.TRACES_EXPLORER);
}
}}
/>
<Button className="previous-btn" onClick={handlePreviousBtnClick}>
<ArrowLeft size={14} />
</Button>
<div className="trace-name">
<DraftingCompass size={14} className="drafting" />

View File

@@ -18,7 +18,6 @@ interface UseVariablesFromUrlReturn {
name: string,
selectedValue: IDashboardVariable['selectedValue'],
) => void;
clearUrlVariables: () => void;
}
const useVariablesFromUrl = (): UseVariablesFromUrlReturn => {
@@ -68,15 +67,6 @@ const useVariablesFromUrl = (): UseVariablesFromUrlReturn => {
[history, urlQuery],
);
const clearUrlVariables = useCallback((): void => {
const params = new URLSearchParams(urlQuery.toString());
params.delete(QueryParams.variables);
history.replace({
search: params.toString(),
});
}, [history, urlQuery]);
const updateUrlVariable = useCallback(
(name: string, selectedValue: IDashboardVariable['selectedValue']): void => {
const currentVariables = getUrlVariables();
@@ -95,7 +85,6 @@ const useVariablesFromUrl = (): UseVariablesFromUrlReturn => {
getUrlVariables,
setUrlVariables,
updateUrlVariable,
clearUrlVariables,
};
};

View File

@@ -662,21 +662,23 @@ const generateTableColumns = (
*
* @param columnKey - The column identifier (could be queryName.expression or queryName)
* @param columnUnits - The column units mapping
* @returns The unit string or undefined if not found
* @returns The unit string (none if the unit is set to empty string) or undefined if not found
*/
export const getColumnUnit = (
columnKey: string,
columnUnits: Record<string, string>,
): string | undefined => {
// First try the exact match (new syntax: queryName.expression)
if (columnUnits[columnKey]) {
return columnUnits[columnKey];
if (columnUnits[columnKey] !== undefined) {
return columnUnits[columnKey] || 'none';
}
// Fallback to old syntax: extract queryName from queryName.expression
if (columnKey.includes('.')) {
const queryName = columnKey.split('.')[0];
return columnUnits[queryName];
if (columnUnits[queryName] !== undefined) {
return columnUnits[queryName] || 'none';
}
}
return undefined;

View File

@@ -285,10 +285,11 @@ export const getUPlotChartOptions = ({
cursor: {
lock: false,
focus: {
prox: 1e6,
prox: 25,
bias: 1,
},
points: {
one: true,
size: (u, seriesIdx): number => u.series[seriesIdx].points.size * 3,
width: (u, seriesIdx, size): number => size / 4,
stroke: (u, seriesIdx): string =>
@@ -394,14 +395,25 @@ export const getUPlotChartOptions = ({
hooks: {
draw: [
(u): void => {
if (isAnomalyRule) {
if (isAnomalyRule || !thresholds?.length) {
return;
}
thresholds?.forEach((threshold) => {
const { ctx } = u;
const { left: plotLeft, width: plotWidth } = u.bbox;
const plotRight = plotLeft + plotWidth;
const canvasHeight = ctx.canvas.height;
const threshold90Percent = canvasHeight * 0.9;
// Single save/restore for all thresholds
ctx.save();
ctx.lineWidth = 2;
ctx.setLineDash([10, 5]);
for (let i = 0; i < thresholds.length; i++) {
const threshold = thresholds[i];
if (threshold.thresholdValue !== undefined) {
const { ctx } = u;
ctx.save();
const color = threshold.thresholdColor || 'red';
const yPos = u.valToPos(
convertValue(
threshold.thresholdValue,
@@ -411,35 +423,28 @@ export const getUPlotChartOptions = ({
'y',
true,
);
ctx.strokeStyle = threshold.thresholdColor || 'red';
ctx.lineWidth = 2;
ctx.setLineDash([10, 5]);
// Draw threshold line
ctx.strokeStyle = color;
ctx.beginPath();
const plotLeft = u.bbox.left; // left edge of the plot area
const plotRight = plotLeft + u.bbox.width; // right edge of the plot area
ctx.moveTo(plotLeft, yPos);
ctx.lineTo(plotRight, yPos);
ctx.stroke();
// Text configuration
// Draw threshold label if present
if (threshold.thresholdLabel) {
const text = threshold.thresholdLabel;
const textX = plotRight - ctx.measureText(text).width - 20;
const canvasHeight = ctx.canvas.height;
const textWidth = ctx.measureText(threshold.thresholdLabel).width;
const textX = plotRight - textWidth - 20;
const yposHeight = canvasHeight - yPos;
const isHeightGreaterThan90Percent = canvasHeight * 0.9 < yposHeight;
// Adjust textY based on the condition
let textY;
if (isHeightGreaterThan90Percent) {
textY = yPos + 15; // Below the threshold line
} else {
textY = yPos - 15; // Above the threshold line
}
ctx.fillStyle = threshold.thresholdColor || 'red';
ctx.fillText(text, textX, textY);
const textY = yposHeight > threshold90Percent ? yPos + 15 : yPos - 15;
ctx.fillStyle = color;
ctx.fillText(threshold.thresholdLabel, textX, textY);
}
ctx.restore();
}
});
}
ctx.restore();
},
],
setSelect: [
@@ -555,19 +560,22 @@ export const getUPlotChartOptions = ({
// Get the current text content
const legendText = seriesLabels[index];
// Clear the th content and rebuild it
thElement.innerHTML = '';
// Use DocumentFragment to batch DOM operations
const fragment = document.createDocumentFragment();
// Add back the marker
if (markerClone) {
thElement.appendChild(markerClone);
fragment.appendChild(markerClone);
}
// Create text wrapper
const textSpan = document.createElement('span');
textSpan.className = 'legend-text';
textSpan.textContent = legendText;
thElement.appendChild(textSpan);
fragment.appendChild(textSpan);
// Replace the children in a single operation
thElement.replaceChildren(fragment);
// Setup tooltip functionality - check truncation on hover
let tooltipElement: HTMLElement | null = null;

View File

@@ -16,8 +16,20 @@
// https://tobyzerner.github.io/placement.js/dist/index.js
/**
* Positions an element (tooltip/popover) relative to a reference element.
* Automatically flips to the opposite side if there's insufficient space.
*
* @param element - The HTMLElement to position
* @param reference - Reference element/Range or bounding rect
* @param side - Preferred side: 'top', 'bottom', 'left', 'right' (default: 'bottom')
* @param align - Alignment: 'start', 'center', 'end' (default: 'center')
* @param options - Optional bounds for constraining the element
* - bound: Custom boundary rect/element
* - followCursor: { x, y } - If provided, tooltip follows cursor with smart positioning
*/
export const placement = (function () {
const e = {
const AXIS_PROPS = {
size: ['height', 'width'],
clientSize: ['clientHeight', 'clientWidth'],
offsetSize: ['offsetHeight', 'offsetWidth'],
@@ -28,87 +40,241 @@ export const placement = (function () {
marginAfter: ['marginBottom', 'marginRight'],
scrollOffset: ['pageYOffset', 'pageXOffset'],
};
function t(e) {
return { top: e.top, bottom: e.bottom, left: e.left, right: e.right };
}
return function (o, r, f, a, i) {
void 0 === f && (f = 'bottom'),
void 0 === a && (a = 'center'),
void 0 === i && (i = {}),
(r instanceof Element || r instanceof Range) &&
(r = t(r.getBoundingClientRect()));
const n = {
top: r.bottom,
bottom: r.top,
left: r.right,
right: r.left,
...r,
function extractRect(source) {
return {
top: source.top,
bottom: source.bottom,
left: source.left,
right: source.right,
};
const s = {
}
return function (element, reference, side, align, options) {
// Default parameters
void 0 === side && (side = 'bottom');
void 0 === align && (align = 'center');
void 0 === options && (options = {});
// Handle cursor following mode
if (options.followCursor) {
const cursorX = options.followCursor.x;
const cursorY = options.followCursor.y;
const offset = options.followCursor.offset || 10; // Default 10px offset from cursor
element.style.position = 'absolute';
element.style.maxWidth = '';
element.style.maxHeight = '';
const elementWidth = element.offsetWidth;
const elementHeight = element.offsetHeight;
// Use viewport bounds for cursor following (not chart bounds)
const viewportBounds = {
top: 0,
left: 0,
bottom: window.innerHeight,
right: window.innerWidth,
};
// Vertical positioning: follow cursor Y with offset, clamped to viewport
const topPosition = cursorY + offset;
const clampedTop = Math.max(
viewportBounds.top,
Math.min(topPosition, viewportBounds.bottom - elementHeight),
);
element.style.top = `${clampedTop}px`;
element.style.bottom = 'auto';
// Horizontal positioning: auto-detect left or right based on available space
const spaceOnRight = viewportBounds.right - cursorX;
const spaceOnLeft = cursorX - viewportBounds.left;
if (spaceOnRight >= elementWidth + offset) {
// Enough space on the right
element.style.left = `${cursorX + offset}px`;
element.style.right = 'auto';
element.dataset.side = 'right';
} else if (spaceOnLeft >= elementWidth + offset) {
// Not enough space on right, use left
element.style.left = `${cursorX - elementWidth - offset}px`;
element.style.right = 'auto';
element.dataset.side = 'left';
} else if (spaceOnRight > spaceOnLeft) {
// Not enough space on either side, pick the side with more space
const leftPos = cursorX + offset;
const clampedLeft = Math.max(
viewportBounds.left,
Math.min(leftPos, viewportBounds.right - elementWidth),
);
element.style.left = `${clampedLeft}px`;
element.style.right = 'auto';
element.dataset.side = 'right';
} else {
const leftPos = cursorX - elementWidth - offset;
const clampedLeft = Math.max(
viewportBounds.left,
Math.min(leftPos, viewportBounds.right - elementWidth),
);
element.style.left = `${clampedLeft}px`;
element.style.right = 'auto';
element.dataset.side = 'left';
}
element.dataset.align = 'cursor';
return; // Exit early, don't run normal positioning logic
}
// Normalize reference to rect object
(reference instanceof Element || reference instanceof Range) &&
(reference = extractRect(reference.getBoundingClientRect()));
// Create anchor rect with swapped opposite edges for positioning
const anchorRect = {
top: reference.bottom,
bottom: reference.top,
left: reference.right,
right: reference.left,
...reference,
};
// Viewport bounds (can be overridden via options.bound)
const bounds = {
top: 0,
left: 0,
bottom: window.innerHeight,
right: window.innerWidth,
};
i.bound &&
((i.bound instanceof Element || i.bound instanceof Range) &&
(i.bound = t(i.bound.getBoundingClientRect())),
Object.assign(s, i.bound));
const l = getComputedStyle(o);
const m = {};
const b = {};
for (const g in e)
(m[g] = e[g][f === 'top' || f === 'bottom' ? 0 : 1]),
(b[g] = e[g][f === 'top' || f === 'bottom' ? 1 : 0]);
(o.style.position = 'absolute'),
(o.style.maxWidth = ''),
(o.style.maxHeight = '');
const d = parseInt(l[b.marginBefore]);
const c = parseInt(l[b.marginAfter]);
const u = d + c;
const p = s[b.after] - s[b.before] - u;
const h = parseInt(l[b.maxSize]);
(!h || p < h) && (o.style[b.maxSize] = `${p}px`);
const x = parseInt(l[m.marginBefore]) + parseInt(l[m.marginAfter]);
const y = n[m.before] - s[m.before] - x;
const z = s[m.after] - n[m.after] - x;
((f === m.before && o[m.offsetSize] > y) ||
(f === m.after && o[m.offsetSize] > z)) &&
(f = y > z ? m.before : m.after);
const S = f === m.before ? y : z;
const v = parseInt(l[m.maxSize]);
(!v || S < v) && (o.style[m.maxSize] = `${S}px`);
const w = window[m.scrollOffset];
const O = function (e) {
return Math.max(s[m.before], Math.min(e, s[m.after] - o[m.offsetSize] - x));
options.bound &&
((options.bound instanceof Element || options.bound instanceof Range) &&
(options.bound = extractRect(options.bound.getBoundingClientRect())),
Object.assign(bounds, options.bound));
const styles = getComputedStyle(element);
const isVertical = side === 'top' || side === 'bottom';
// Build axis property maps based on orientation
const mainAxis = {}; // Properties for the main positioning axis
const crossAxis = {}; // Properties for the perpendicular axis
for (const prop in AXIS_PROPS) {
mainAxis[prop] = AXIS_PROPS[prop][isVertical ? 0 : 1];
crossAxis[prop] = AXIS_PROPS[prop][isVertical ? 1 : 0];
}
// Reset element positioning
element.style.position = 'absolute';
element.style.maxWidth = '';
element.style.maxHeight = '';
// Cross-axis: calculate and apply max size constraint
const crossMarginBefore = parseInt(styles[crossAxis.marginBefore]);
const crossMarginAfter = parseInt(styles[crossAxis.marginAfter]);
const crossMarginTotal = crossMarginBefore + crossMarginAfter;
const crossAvailableSpace =
bounds[crossAxis.after] - bounds[crossAxis.before] - crossMarginTotal;
const crossMaxSize = parseInt(styles[crossAxis.maxSize]);
(!crossMaxSize || crossAvailableSpace < crossMaxSize) &&
(element.style[crossAxis.maxSize] = `${crossAvailableSpace}px`);
// Main-axis: calculate space on both sides
const mainMarginTotal =
parseInt(styles[mainAxis.marginBefore]) +
parseInt(styles[mainAxis.marginAfter]);
const spaceBefore =
anchorRect[mainAxis.before] - bounds[mainAxis.before] - mainMarginTotal;
const spaceAfter =
bounds[mainAxis.after] - anchorRect[mainAxis.after] - mainMarginTotal;
// Auto-flip to the side with more space if needed
((side === mainAxis.before && element[mainAxis.offsetSize] > spaceBefore) ||
(side === mainAxis.after && element[mainAxis.offsetSize] > spaceAfter)) &&
(side = spaceBefore > spaceAfter ? mainAxis.before : mainAxis.after);
// Apply main-axis max size constraint
const mainAvailableSpace =
side === mainAxis.before ? spaceBefore : spaceAfter;
const mainMaxSize = parseInt(styles[mainAxis.maxSize]);
(!mainMaxSize || mainAvailableSpace < mainMaxSize) &&
(element.style[mainAxis.maxSize] = `${mainAvailableSpace}px`);
// Position on main axis
const mainScrollOffset = window[mainAxis.scrollOffset];
const clampMainPosition = function (pos) {
return Math.max(
bounds[mainAxis.before],
Math.min(
pos,
bounds[mainAxis.after] - element[mainAxis.offsetSize] - mainMarginTotal,
),
);
};
f === m.before
? ((o.style[m.before] = `${w + O(n[m.before] - o[m.offsetSize] - x)}px`),
(o.style[m.after] = 'auto'))
: ((o.style[m.before] = `${w + O(n[m.after])}px`),
(o.style[m.after] = 'auto'));
const B = window[b.scrollOffset];
const I = function (e) {
return Math.max(s[b.before], Math.min(e, s[b.after] - o[b.offsetSize] - u));
side === mainAxis.before
? ((element.style[mainAxis.before] = `${
mainScrollOffset +
clampMainPosition(
anchorRect[mainAxis.before] -
element[mainAxis.offsetSize] -
mainMarginTotal,
)
}px`),
(element.style[mainAxis.after] = 'auto'))
: ((element.style[mainAxis.before] = `${
mainScrollOffset + clampMainPosition(anchorRect[mainAxis.after])
}px`),
(element.style[mainAxis.after] = 'auto'));
// Position on cross axis based on alignment
const crossScrollOffset = window[crossAxis.scrollOffset];
const clampCrossPosition = function (pos) {
return Math.max(
bounds[crossAxis.before],
Math.min(
pos,
bounds[crossAxis.after] - element[crossAxis.offsetSize] - crossMarginTotal,
),
);
};
switch (a) {
switch (align) {
case 'start':
(o.style[b.before] = `${B + I(n[b.before] - d)}px`),
(o.style[b.after] = 'auto');
(element.style[crossAxis.before] = `${
crossScrollOffset +
clampCrossPosition(anchorRect[crossAxis.before] - crossMarginBefore)
}px`),
(element.style[crossAxis.after] = 'auto');
break;
case 'end':
(o.style[b.before] = 'auto'),
(o.style[b.after] = `${
B + I(document.documentElement[b.clientSize] - n[b.after] - c)
(element.style[crossAxis.before] = 'auto'),
(element.style[crossAxis.after] = `${
crossScrollOffset +
clampCrossPosition(
document.documentElement[crossAxis.clientSize] -
anchorRect[crossAxis.after] -
crossMarginAfter,
)
}px`);
break;
default:
var H = n[b.after] - n[b.before];
(o.style[b.before] = `${
B + I(n[b.before] + H / 2 - o[b.offsetSize] / 2 - d)
// 'center'
var crossSize = anchorRect[crossAxis.after] - anchorRect[crossAxis.before];
(element.style[crossAxis.before] = `${
crossScrollOffset +
clampCrossPosition(
anchorRect[crossAxis.before] +
crossSize / 2 -
element[crossAxis.offsetSize] / 2 -
crossMarginBefore,
)
}px`),
(o.style[b.after] = 'auto');
(element.style[crossAxis.after] = 'auto');
}
(o.dataset.side = f), (o.dataset.align = a);
// Store final placement as data attributes
(element.dataset.side = side), (element.dataset.align = align);
};
})();

View File

@@ -3,7 +3,71 @@ import { themeColors } from 'constants/theme';
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
function isSeriesValueValid(seriesValue: number | undefined | null): boolean {
return (
seriesValue !== undefined &&
seriesValue !== null &&
!Number.isNaN(seriesValue)
);
}
// Helper function to get the focused/highlighted series at a specific position
function resolveSeriesColor(series: uPlot.Series, index: number): string {
let color = '#000000';
if (typeof series.stroke === 'string') {
color = series.stroke;
} else if (typeof series.fill === 'string') {
color = series.fill;
} else {
const seriesLabel = series.label || `Series ${index}`;
const isDarkMode = !document.body.classList.contains('lightMode');
color = generateColor(
seriesLabel,
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
);
}
return color;
}
function getPreferredSeriesIndex(
u: uPlot,
timestampIndex: number,
e: MouseEvent,
): number {
const bbox = u.over.getBoundingClientRect();
const top = e.clientY - bbox.top;
// Prefer series explicitly marked as focused
for (let i = 1; i < u.series.length; i++) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const isSeriesFocused = u.series[i]?._focus === true;
const isSeriesShown = u.series[i].show !== false;
const seriesValue = u.data[i]?.[timestampIndex];
if (isSeriesFocused && isSeriesShown && isSeriesValueValid(seriesValue)) {
return i;
}
}
// Fallback: choose series with Y closest to mouse position
let focusedSeriesIndex = -1;
let closestPixelDiff = Infinity;
for (let i = 1; i < u.series.length; i++) {
const series = u.data[i];
const seriesValue = series?.[timestampIndex];
if (isSeriesValueValid(seriesValue) && u.series[i].show !== false) {
const yPx = u.valToPos(seriesValue as number, 'y');
const diff = Math.abs(yPx - top);
if (diff < closestPixelDiff) {
closestPixelDiff = diff;
focusedSeriesIndex = i;
}
}
}
return focusedSeriesIndex;
}
export const getFocusedSeriesAtPosition = (
e: MouseEvent,
u: uPlot,
@@ -17,74 +81,28 @@ export const getFocusedSeriesAtPosition = (
} | null => {
const bbox = u.over.getBoundingClientRect();
const left = e.clientX - bbox.left;
const top = e.clientY - bbox.top;
const timestampIndex = u.posToIdx(left);
let focusedSeriesIndex = -1;
let closestPixelDiff = Infinity;
// Check all series (skip index 0 which is the x-axis)
for (let i = 1; i < u.data.length; i++) {
const series = u.data[i];
const seriesValue = series[timestampIndex];
if (
seriesValue !== undefined &&
seriesValue !== null &&
!Number.isNaN(seriesValue)
) {
const seriesYPx = u.valToPos(seriesValue, 'y');
const pixelDiff = Math.abs(seriesYPx - top);
if (pixelDiff < closestPixelDiff) {
closestPixelDiff = pixelDiff;
focusedSeriesIndex = i;
}
}
}
// If we found a focused series, return its data
if (focusedSeriesIndex > 0) {
const series = u.series[focusedSeriesIndex];
const seriesValue = u.data[focusedSeriesIndex][timestampIndex];
// Ensure we have a valid value
if (
seriesValue !== undefined &&
seriesValue !== null &&
!Number.isNaN(seriesValue)
) {
// Get color - try series stroke first, then generate based on label
let color = '#000000';
if (typeof series.stroke === 'string') {
color = series.stroke;
} else if (typeof series.fill === 'string') {
color = series.fill;
} else {
// Generate color based on series label (like the tooltip plugin does)
const seriesLabel = series.label || `Series ${focusedSeriesIndex}`;
// Detect theme mode by checking body class
const isDarkMode = !document.body.classList.contains('lightMode');
color = generateColor(
seriesLabel,
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
);
}
const preferredIndex = getPreferredSeriesIndex(u, timestampIndex, e);
if (preferredIndex > 0) {
const series = u.series[preferredIndex];
const seriesValue = u.data[preferredIndex][timestampIndex];
if (isSeriesValueValid(seriesValue)) {
const color = resolveSeriesColor(series, preferredIndex);
return {
seriesIndex: focusedSeriesIndex,
seriesName: series.label || `Series ${focusedSeriesIndex}`,
seriesIndex: preferredIndex,
seriesName: series.label || `Series ${preferredIndex}`,
value: seriesValue as number,
color,
show: series.show !== false,
isFocused: true, // This indicates it's the highlighted/bold one
isFocused: true,
};
}
}
return null;
};
export interface OnClickPluginOpts {
onClick: (
xValue: number,
@@ -137,50 +155,31 @@ function onClickPlugin(opts: OnClickPluginOpts): uPlot.Plugin {
const yValue = u.posToVal(event.offsetY, 'y');
// Get the focused/highlighted series (the one that would be bold in hover)
const focusedSeries = getFocusedSeriesAtPosition(event, u);
const focusedSeriesData = getFocusedSeriesAtPosition(event, u);
let metric = {};
const { series } = u;
const apiResult = opts.apiResponse?.data?.result || [];
const outputMetric = {
queryName: '',
inFocusOrNot: false,
};
// this is to get the metric value of the focused series
if (Array.isArray(series) && series.length > 0) {
series.forEach((item, index) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
if (item?.show && item?._focus) {
const { metric: focusedMetric, queryName } = apiResult[index - 1] || [];
metric = focusedMetric;
outputMetric.queryName = queryName;
outputMetric.inFocusOrNot = true;
}
});
}
if (!outputMetric.queryName) {
// Get the focused series data
const focusedSeriesData = getFocusedSeriesAtPosition(event, u);
// If we found a valid focused series, get its data
if (
focusedSeriesData &&
focusedSeriesData.seriesIndex <= apiResult.length
) {
const { metric: focusedMetric, queryName } =
apiResult[focusedSeriesData.seriesIndex - 1] || [];
metric = focusedMetric;
outputMetric.queryName = queryName;
outputMetric.inFocusOrNot = true;
}
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
if (
focusedSeriesData &&
focusedSeriesData.seriesIndex <= apiResult.length
) {
const { metric: focusedMetric, queryName } =
apiResult[focusedSeriesData.seriesIndex - 1] || {};
metric = focusedMetric;
outputMetric.queryName = queryName;
outputMetric.inFocusOrNot = true;
}
// Get the actual data point timestamp from the focused series
let actualDataTimestamp = xValue; // fallback to click position timestamp
if (focusedSeries) {
if (focusedSeriesData) {
// Get the data index from the focused series
const dataIndex = u.posToIdx(event.offsetX);
// Get the actual timestamp from the x-axis data (u.data[0])
@@ -209,7 +208,7 @@ function onClickPlugin(opts: OnClickPluginOpts): uPlot.Plugin {
absoluteMouseX,
absoluteMouseY,
axesData,
focusedSeries,
focusedSeriesData,
);
};
u.over.addEventListener('click', handleClick);

View File

@@ -38,6 +38,31 @@ function getTooltipBaseValue(
return data[index][idx];
}
function sortTooltipContentBasedOnValue(
tooltipDataObj: Record<string, UplotTooltipDataProps>,
): Record<string, UplotTooltipDataProps> {
const entries = Object.entries(tooltipDataObj);
// Separate focused and non-focused entries in a single pass
const focusedEntries: [string, UplotTooltipDataProps][] = [];
const nonFocusedEntries: [string, UplotTooltipDataProps][] = [];
for (let i = 0; i < entries.length; i++) {
const entry = entries[i];
if (entry[1].focus) {
focusedEntries.push(entry);
} else {
nonFocusedEntries.push(entry);
}
}
// Sort non-focused entries by value (descending)
nonFocusedEntries.sort((a, b) => b[1].value - a[1].value);
// Combine with focused entries on top
return Object.fromEntries(focusedEntries.concat(nonFocusedEntries));
}
const generateTooltipContent = (
seriesList: any[],
data: any[],
@@ -57,23 +82,31 @@ const generateTooltipContent = (
): HTMLElement => {
const container = document.createElement('div');
container.classList.add('tooltip-container');
const overlay = document.getElementById('overlay');
let tooltipCount = 0;
let tooltipTitle = '';
const formattedData: Record<string, UplotTooltipDataProps> = {};
const duplicatedLegendLabels: Record<string, true> = {};
function sortTooltipContentBasedOnValue(
tooltipDataObj: Record<string, UplotTooltipDataProps>,
): Record<string, UplotTooltipDataProps> {
const entries = Object.entries(tooltipDataObj);
entries.sort((a, b) => b[1].value - a[1].value);
return Object.fromEntries(entries);
// Pre-build a label-to-series map for O(1) lookup instead of O(n) search
let seriesColorMap: Map<string, string> | null = null;
if (isBillingUsageGraphs && series) {
seriesColorMap = new Map();
for (let i = 0; i < series.length; i++) {
const item = series[i];
if (item.label) {
const fillColor = get(item, '_fill');
if (fillColor) {
seriesColorMap.set(item.label, fillColor);
}
}
}
}
if (Array.isArray(series) && series.length > 0) {
series.forEach((item, index) => {
for (let index = 0; index < series.length; index++) {
const item = series[index];
if (index === 0) {
if (isBillingUsageGraphs) {
tooltipTitle = dayjs(data[0][idx] * 1000)
@@ -114,15 +147,12 @@ const generateTooltipContent = (
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
);
// in case of billing graph pick colors from the series options
if (isBillingUsageGraphs) {
let clr;
series.forEach((item) => {
if (item.label === label) {
clr = get(item, '_fill');
}
});
color = clr ?? color;
// O(1) lookup instead of O(n) search for billing graph colors
if (isBillingUsageGraphs && seriesColorMap) {
const billingColor = seriesColorMap.get(label);
if (billingColor) {
color = billingColor;
}
}
let tooltipItemLabel = label;
@@ -130,10 +160,7 @@ const generateTooltipContent = (
if (Number.isFinite(value)) {
const tooltipValue = getToolTipValue(value, yAxisUnit, decimalPrecision);
const dataIngestedFormated = getToolTipValue(dataIngested);
if (
duplicatedLegendLabels[label] ||
Object.prototype.hasOwnProperty.call(formattedData, label)
) {
if (duplicatedLegendLabels[label] || label in formattedData) {
duplicatedLegendLabels[label] = true;
const tempDataObj = formattedData[label];
@@ -170,15 +197,11 @@ const generateTooltipContent = (
formattedData[tooltipItemLabel] = dataObj;
}
}
});
}
}
// Show tooltip only if atleast only series has a value at the hovered timestamp
// Early return if no valid data points - avoids unnecessary DOM manipulation
if (tooltipCount <= 0) {
if (overlay && overlay.style.display === 'block') {
overlay.style.display = 'none';
}
return container;
}
@@ -187,48 +210,42 @@ const generateTooltipContent = (
UplotTooltipDataProps
> = sortTooltipContentBasedOnValue(formattedData);
const div = document.createElement('div');
div.classList.add('tooltip-content-row');
div.textContent = isHistogramGraphs ? '' : tooltipTitle;
div.classList.add('tooltip-content-header');
container.appendChild(div);
const headerDiv = document.createElement('div');
headerDiv.classList.add('tooltip-content-row', 'tooltip-content-header');
headerDiv.textContent = isHistogramGraphs ? '' : tooltipTitle;
container.appendChild(headerDiv);
const sortedKeys = Object.keys(sortedData);
// Use DocumentFragment for better performance when adding multiple elements
const fragment = document.createDocumentFragment();
if (Array.isArray(sortedKeys) && sortedKeys.length > 0) {
sortedKeys.forEach((key) => {
if (sortedData[key]) {
const { textContent, color, focus } = sortedData[key];
const div = document.createElement('div');
div.classList.add('tooltip-content-row');
div.classList.add('tooltip-content');
const squareBox = document.createElement('div');
squareBox.classList.add('pointSquare');
const sortedValues = Object.values(sortedData);
squareBox.style.borderColor = color;
for (let i = 0; i < sortedValues.length; i++) {
const { textContent, color, focus } = sortedValues[i];
const text = document.createElement('div');
text.classList.add('tooltip-data-point');
const div = document.createElement('div');
div.classList.add('tooltip-content-row', 'tooltip-content');
text.textContent = textContent;
text.style.color = color;
const squareBox = document.createElement('div');
squareBox.classList.add('pointSquare');
squareBox.style.borderColor = color;
if (focus) {
text.classList.add('focus');
} else {
text.classList.remove('focus');
}
const text = document.createElement('div');
text.classList.add('tooltip-data-point');
text.textContent = textContent;
text.style.color = color;
div.appendChild(squareBox);
div.appendChild(text);
if (focus) {
text.classList.add('focus');
}
container.appendChild(div);
}
});
div.appendChild(squareBox);
div.appendChild(text);
fragment.appendChild(div);
}
if (overlay && overlay.style.display === 'none') {
overlay.style.display = 'block';
if (fragment.hasChildNodes()) {
container.appendChild(fragment);
}
return container;
@@ -266,80 +283,152 @@ const tooltipPlugin = ({
ToolTipPluginProps): any => {
let over: HTMLElement;
let bound: HTMLElement;
let bLeft: any;
let bTop: any;
// Cache bounding box to avoid recalculating on every cursor move
let cachedBBox: DOMRect | null = null;
let isActive = false;
let overlay: HTMLElement | null = null;
// Pre-compute apiResult once
const apiResult = apiResponse?.data?.result || [];
// Sync bounds and cache the result
const syncBounds = (): void => {
const bbox = over.getBoundingClientRect();
bLeft = bbox.left;
bTop = bbox.top;
if (over) {
cachedBBox = over.getBoundingClientRect();
}
};
let overlay = document.getElementById('overlay');
// Create overlay once and reuse it
const initOverlay = (): void => {
if (!overlay) {
overlay = document.getElementById('overlay');
if (!overlay) {
overlay = document.createElement('div');
overlay.id = 'overlay';
overlay.style.cssText = 'display: none; position: absolute;';
document.body.appendChild(overlay);
}
}
};
if (!overlay) {
overlay = document.createElement('div');
overlay.id = 'overlay';
overlay.style.display = 'none';
overlay.style.position = 'absolute';
document.body.appendChild(overlay);
}
const showOverlay = (): void => {
if (overlay && overlay.style.display === 'none') {
overlay.style.display = 'block';
}
};
const apiResult = apiResponse?.data?.result || [];
const hideOverlay = (): void => {
if (overlay && overlay.style.display === 'block') {
overlay.style.display = 'none';
}
};
const plotEnter = (): void => {
isActive = true;
showOverlay();
};
const plotLeave = (): void => {
isActive = false;
hideOverlay();
};
// Cleanup function to remove event listeners
const cleanup = (): void => {
if (over) {
over.removeEventListener('mouseenter', plotEnter);
over.removeEventListener('mouseleave', plotLeave);
}
};
return {
hooks: {
init: (u: any): void => {
over = u?.over;
bound = over;
over.onmouseenter = (): void => {
if (overlay) {
overlay.style.display = 'block';
}
};
over.onmouseleave = (): void => {
if (overlay) {
overlay.style.display = 'none';
}
};
// Initialize overlay once during init
initOverlay();
// Initial bounds sync
syncBounds();
over.addEventListener('mouseenter', plotEnter);
over.addEventListener('mouseleave', plotLeave);
},
setSize: (): void => {
// Re-sync bounds when size changes
syncBounds();
},
// Cache bounding box on syncRect for better performance
syncRect: (u: any, rect: DOMRect): void => {
cachedBBox = rect;
},
setCursor: (u: {
cursor: { left: any; top: any; idx: any };
data: any[];
series: uPlot.Options['series'];
}): void => {
if (overlay) {
overlay.textContent = '';
const { left, top, idx } = u.cursor;
if (Number.isInteger(idx)) {
const anchor = { left: left + bLeft, top: top + bTop };
const content = generateTooltipContent(
apiResult,
u.data,
idx,
isDarkMode,
yAxisUnit,
decimalPrecision,
u.series,
isBillingUsageGraphs,
isHistogramGraphs,
isMergedSeries,
stackBarChart,
timezone,
colorMapping,
query,
);
if (customTooltipElement) {
content.appendChild(customTooltipElement);
}
overlay.appendChild(content);
placement(overlay, anchor, 'right', 'start', { bound });
}
if (!overlay) {
return;
}
const { left, top, idx } = u.cursor;
// Early return if not active or no valid index
if (!isActive || !Number.isInteger(idx)) {
if (isActive) {
// Clear tooltip content efficiently using replaceChildren
overlay.replaceChildren();
}
return;
}
// Use cached bounding box if available
const bbox = cachedBBox || over.getBoundingClientRect();
const anchor = {
left: left + bbox.left,
top: top + bbox.top,
};
const content = generateTooltipContent(
apiResult,
u.data,
idx,
isDarkMode,
yAxisUnit,
decimalPrecision,
u.series,
isBillingUsageGraphs,
isHistogramGraphs,
isMergedSeries,
stackBarChart,
timezone,
colorMapping,
query,
);
// Only show tooltip if there's actual content
if (content.children.length > 1) {
if (customTooltipElement) {
content.appendChild(customTooltipElement);
}
// Clear and set new content in one operation
overlay.replaceChildren(content);
placement(overlay, anchor, 'right', 'start', {
bound,
followCursor: { x: anchor.left, y: anchor.top, offset: 4 },
});
showOverlay();
} else {
hideOverlay();
}
},
destroy: (): void => {
// Cleanup on destroy
cleanup();
hideOverlay();
},
},
};

View File

@@ -16,6 +16,6 @@ export const topTracesTableColumns = [
title: 'STEP TRANSITION DURATION',
dataIndex: 'duration_ms',
key: 'duration_ms',
render: (value: string): string => getYAxisFormattedValue(value, 'ms'),
render: (value: string): string => getYAxisFormattedValue(`${value}`, 'ms'),
},
];

View File

@@ -202,11 +202,7 @@ export function DashboardProvider({
updateLocalStorageDashboardVariables,
} = useDashboardVariablesFromLocalStorage(dashboardId);
const {
getUrlVariables,
updateUrlVariable,
clearUrlVariables,
} = useVariablesFromUrl();
const { getUrlVariables, updateUrlVariable } = useVariablesFromUrl();
const updatedTimeRef = useRef<Dayjs | null>(null); // Using ref to store the updated time
const modalRef = useRef<any>(null);
@@ -218,14 +214,6 @@ export function DashboardProvider({
const [isDashboardFetching, setIsDashboardFetching] = useState<boolean>(false);
// Clear variable configs when not on dashboard pages
useEffect(() => {
const isOnDashboardPage = !!isDashboardPage || !!isDashboardWidgetPage;
if (!isOnDashboardPage) {
clearUrlVariables();
}
}, [isDashboardPage, isDashboardWidgetPage, clearUrlVariables]);
const mergeDBWithLocalStorage = (
data: Dashboard,
localStorageVariables: any,

View File

@@ -121,7 +121,6 @@ function renderWithDashboardProvider(
// Mock URL variables hook
const mockGetUrlVariables = jest.fn();
const mockUpdateUrlVariable = jest.fn();
const mockClearUrlVariables = jest.fn();
const mockSetUrlVariables = jest.fn();
jest.mock('hooks/dashboard/useVariablesFromUrl', () => ({
@@ -129,7 +128,6 @@ jest.mock('hooks/dashboard/useVariablesFromUrl', () => ({
default: jest.fn(() => ({
getUrlVariables: mockGetUrlVariables,
updateUrlVariable: mockUpdateUrlVariable,
clearUrlVariables: mockClearUrlVariables,
setUrlVariables: mockSetUrlVariables,
})),
}));
@@ -524,16 +522,6 @@ describe('Dashboard Provider - URL Variables Integration', () => {
expect(parsedVariables.environment.allSelected).toBe(false);
});
});
it('should clear URL variables when not on dashboard page', async () => {
mockUseRouteMatch.mockReturnValue(null); // Not on dashboard page
renderWithDashboardProvider('/other-page', null);
await waitFor(() => {
expect(mockClearUrlVariables).toHaveBeenCalled();
});
});
});
describe('Variable Value Normalization', () => {

View File

@@ -401,14 +401,14 @@ body {
font-size: 12px;
position: absolute;
margin: 0.5rem;
background: rgba(0, 0, 0);
background: var(--bg-ink-300);
-webkit-font-smoothing: antialiased;
color: #fff;
color: var(--bg-vanilla-100);
z-index: 10000;
// pointer-events: none;
overflow: auto;
max-height: 480px !important;
max-width: 240px !important;
max-width: 300px !important;
border-radius: 5px;
border: 1px solid rgba(255, 255, 255, 0.1);
@@ -571,6 +571,12 @@ body {
}
.lightMode {
#overlay {
color: var(--bg-ink-500);
background: var(--bg-vanilla-100);
border: 1px solid var(--bg-vanilla-300);
}
.ant-dropdown-menu {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);

View File

@@ -0,0 +1,21 @@
export interface GetSpanPercentilesProps {
start: number;
end: number;
spanDuration: number;
serviceName: string;
name: string;
resourceAttributes: Record<string, string>;
}
export interface GetSpanPercentilesResponseDataProps {
percentiles: Record<string, number>;
position: {
percentile: number;
description: string;
};
}
export interface GetSpanPercentilesResponsePayloadProps {
status: string;
data: GetSpanPercentilesResponseDataProps;
}

2
go.mod
View File

@@ -4,7 +4,7 @@ go 1.24.0
require (
dario.cat/mergo v1.0.1
github.com/AfterShip/clickhouse-sql-parser v0.4.11
github.com/AfterShip/clickhouse-sql-parser v0.4.16
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd

2
go.sum
View File

@@ -68,6 +68,8 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/AfterShip/clickhouse-sql-parser v0.4.11 h1:fZMKAjRmgzW44+hEhF6ywi4VjFZQjJ8QrFBbgBsjmF4=
github.com/AfterShip/clickhouse-sql-parser v0.4.11/go.mod h1:W0Z82wJWkJxz2RVun/RMwxue3g7ut47Xxl+SFqdJGus=
github.com/AfterShip/clickhouse-sql-parser v0.4.16 h1:gpl+wXclYUKT0p4+gBq22XeRYWwEoZ9f35vogqMvkLQ=
github.com/AfterShip/clickhouse-sql-parser v0.4.16/go.mod h1:W0Z82wJWkJxz2RVun/RMwxue3g7ut47Xxl+SFqdJGus=
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=

View File

@@ -99,9 +99,9 @@ func transformToSpanPercentileResponse(queryResult *qbtypes.QueryRangeResponse)
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "no spans found matching the specified criteria")
}
description := fmt.Sprintf("faster than %.1f%% of spans", position)
description := fmt.Sprintf("slower than %.1f%% of spans", position)
if position < 50 {
description = fmt.Sprintf("slower than %.1f%% of spans", 100-position)
description = fmt.Sprintf("faster than %.1f%% of spans", 100-position)
}
return &spanpercentiletypes.SpanPercentileResponse{

View File

@@ -374,18 +374,12 @@ func (module *Module) GetOrCreateUser(ctx context.Context, user *types.User, opt
return existingUser, nil
}
newUser, err := types.NewUser(user.DisplayName, user.Email, user.Role, user.OrgID)
err = module.CreateUser(ctx, user, opts...)
if err != nil {
return nil, err
}
err = module.CreateUser(ctx, newUser, opts...)
if err != nil {
return nil, err
}
return newUser, nil
return user, nil
}
func (m *Module) CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error {

View File

@@ -0,0 +1,695 @@
package queryfilterextractor
import (
"fmt"
"strings"
clickhouse "github.com/AfterShip/clickhouse-sql-parser/parser"
)
const (
// MetricNameColumn is the column name used for filtering metrics
MetricNameColumn = "metric_name"
)
// ClickHouseFilterExtractor extracts metric names and grouping keys from ClickHouse SQL queries
type ClickHouseFilterExtractor struct{}
// NewClickHouseFilterExtractor creates a new ClickHouse filter extractor
func NewClickHouseFilterExtractor() *ClickHouseFilterExtractor {
return &ClickHouseFilterExtractor{}
}
// Extract parses a ClickHouse query and extracts metric names and grouping keys
func (e *ClickHouseFilterExtractor) Extract(query string) (*FilterResult, error) {
p := clickhouse.NewParser(query)
stmts, err := p.ParseStmts()
if err != nil {
return nil, err
}
result := &FilterResult{MetricNames: []string{}, GroupByColumns: []ColumnInfo{}}
metricNames := make(map[string]bool)
// Track top-level queries for GROUP BY extraction
topLevelQueries := make(map[*clickhouse.SelectQuery]bool)
// Process all statements
for _, stmt := range stmts {
selectQuery, ok := stmt.(*clickhouse.SelectQuery)
if !ok {
continue
}
// Mark as top-level
topLevelQueries[selectQuery] = true
// Walk the AST to extract metrics
clickhouse.Walk(selectQuery, func(node clickhouse.Expr) bool {
e.fillMetricNamesFromExpr(node, metricNames)
return true // Continue traversal
})
}
// Extract GROUP BY from the top-level queries by first building a map of CTEs and
// then recursively extracting the GROUP BY from the CTEs and subqueries.
// Build CTE map for all top-level queries
cteMap := make(map[string]*clickhouse.SelectQuery)
for query := range topLevelQueries {
e.buildCTEMap(query, cteMap)
}
// Extract GROUP BY with aliases and origins from the CTEs and subqueries using recursive approach
// Use a map to handle duplicates (last ColumnInfo wins across queries)
groupByColumnsMap := make(map[string]ColumnInfo) // column name -> ColumnInfo
visited := make(map[*clickhouse.SelectQuery]bool)
for query := range topLevelQueries {
columns, err := e.extractGroupByColumns(query, cteMap, visited)
if err != nil {
return nil, err
}
for _, col := range columns {
// Last column info wins for duplicate columns across multiple queries
groupByColumnsMap[col.Name] = col
}
}
// Convert sets to slices
for metric := range metricNames {
result.MetricNames = append(result.MetricNames, metric)
}
// Build GroupByColumns from the map
for _, colInfo := range groupByColumnsMap {
result.GroupByColumns = append(result.GroupByColumns, colInfo)
}
return result, nil
}
// ========================================
// Metric Name Extraction
// ========================================
// fillMetricNamesFromExpr extracts metric names from various node types
func (e *ClickHouseFilterExtractor) fillMetricNamesFromExpr(node clickhouse.Expr, metricNames map[string]bool) {
if node == nil {
return
}
switch n := node.(type) {
case *clickhouse.BinaryOperation:
e.fillMetricFromBinaryOp(n, metricNames)
}
}
// fillMetricFromBinaryOp extracts metrics from binary operations
func (e *ClickHouseFilterExtractor) fillMetricFromBinaryOp(op *clickhouse.BinaryOperation, metricNames map[string]bool) {
// Check if left side is metric_name column
leftCol := e.getColumnName(op.LeftExpr)
rightCol := e.getColumnName(op.RightExpr)
// Handle metric_name on left side: metric_name = 'value'
if leftCol == MetricNameColumn {
e.fillMetricWithBinaryOpConditions(op, op.RightExpr, metricNames)
return
}
// Handle metric_name on right side: 'value' = metric_name
if rightCol == MetricNameColumn {
e.fillMetricWithBinaryOpConditions(op, op.LeftExpr, metricNames)
return
}
}
// fillMetricWithBinaryOpConditions extracts metric names from the value side of a binary operation
//
// Supported operators:
// - "=", "==": Extracts literal string values or values from any() function
// - "IN", "GLOBAL IN": Extracts all literal string values from the list
//
// Unsupported operators (can be added later if needed):
// - "!=", "<>", "NOT IN": Negative filters. (e.g., metric_name != 'a')
// - "LIKE", "ILIKE": Pattern matching filters
// - "NOT LIKE", "NOT ILIKE": Negative pattern matching filters
// - "OR", "AND": Boolean operators as the Walk function will automatically traverse both sides
// of OR/AND operations and extract metrics from each branch. (e.g., metric_name='a' OR metric_name='b')
func (e *ClickHouseFilterExtractor) fillMetricWithBinaryOpConditions(op *clickhouse.BinaryOperation, valueExpr clickhouse.Expr, metricNames map[string]bool) {
switch op.Operation {
case "=", "==":
// metric_name = 'value' or metric_name = any(['a', 'b'])
// Skip if value side is a function call (per spec - function-wrapped literals are ignored, CH59)
if fn, ok := valueExpr.(*clickhouse.FunctionExpr); ok {
// Only handle any() function, skip others like lowercase('cpu')
if fn.Name != nil && fn.Name.Name == "any" {
e.extractInValues(valueExpr, metricNames)
}
// Otherwise skip function-wrapped literals per spec
} else if val := e.extractStringLiteral(valueExpr); val != "" {
metricNames[val] = true
}
case "IN", "GLOBAL IN":
// metric_name IN ('a', 'b', 'c')
// GLOBAL IN behaves the same as IN for metric extraction purposes
// Skip if value side is a function call (per spec - function-wrapped literals are ignored, CH59)
if _, ok := valueExpr.(*clickhouse.FunctionExpr); !ok {
e.extractInValues(valueExpr, metricNames)
}
}
}
// extractStringLiteral extracts a string literal value from an expression
func (e *ClickHouseFilterExtractor) extractStringLiteral(expr clickhouse.Expr) string {
switch ex := expr.(type) {
case *clickhouse.StringLiteral:
return ex.Literal
}
return ""
}
// extractInValues extracts values from IN expressions
func (e *ClickHouseFilterExtractor) extractInValues(expr clickhouse.Expr, metricNames map[string]bool) {
// Find all string literals in the expression
strLits := clickhouse.FindAll(expr, func(node clickhouse.Expr) bool {
// metric_name passed in `in` condition will be string literal.
_, ok := node.(*clickhouse.StringLiteral)
return ok
})
for _, strLitNode := range strLits {
if strLit, ok := strLitNode.(*clickhouse.StringLiteral); ok {
// Unquote the string literal
val := e.extractStringLiteral(strLit)
if val != "" {
metricNames[val] = true
}
}
}
}
// ========================================
// GROUP BY Column Extraction
// ========================================
// extractGroupByColumns extracts the GROUP BY columns from a query
// It follows the top-down approach where outer GROUP BY overrides inner GROUP BY in subqueries and CTEs.
// Returns a slice of ColumnInfo with column names, aliases, and origins
func (e *ClickHouseFilterExtractor) extractGroupByColumns(query *clickhouse.SelectQuery, cteMap map[string]*clickhouse.SelectQuery, visited map[*clickhouse.SelectQuery]bool) ([]ColumnInfo, error) {
if visited[query] {
return nil, nil
}
// Mark this query as visited to prevent cycles
visited[query] = true
// First, check if this query has its own GROUP BY using direct field access
hasGroupBy := query.GroupBy != nil
// If this query has GROUP BY, use it (outer overrides inner)
if hasGroupBy {
// Extract GROUP BY columns
tempGroupBy := make(map[string]bool)
e.fillGroupsFromGroupByClause(query.GroupBy, tempGroupBy)
// Extract SELECT columns and their aliases from the same query level
selectAliases := e.extractSelectColumns(query)
// Build ColumnInfo array by matching GROUP BY with SELECT aliases and origins
result := []ColumnInfo{}
originVisited := make(map[*clickhouse.SelectQuery]bool)
for groupByCol := range tempGroupBy {
alias := selectAliases[groupByCol] // Will be "" if not in SELECT
// Extract originExpr by tracing back through queries
originExpr := e.extractColumnOrigin(groupByCol, query, cteMap, originVisited)
originField, err := extractCHOriginFieldFromQuery(fmt.Sprintf("SELECT %s", originExpr))
if err != nil {
return nil, err
}
result = append(result, ColumnInfo{
Name: groupByCol,
Alias: alias,
OriginExpr: originExpr,
OriginField: originField,
})
}
return result, nil
}
// If no GROUP BY in this query, follow CTE/subquery references
// It might have grouping inside the CTE/subquery
sourceQuery := e.extractSourceQuery(query, cteMap)
if sourceQuery != nil {
return e.extractGroupByColumns(sourceQuery, cteMap, visited)
}
return nil, nil
}
// fillGroupsFromGroupByClause extracts GROUP BY columns from a specific GroupByClause and fills the map with the column names
func (e *ClickHouseFilterExtractor) fillGroupsFromGroupByClause(groupByClause *clickhouse.GroupByClause, groupBy map[string]bool) {
// Extract GROUP BY expressions properly
// Find only the direct child ColumnExprList, not nested ones
// We use Find instead of FindAll to get only the first (direct child) ColumnExprList
exprListNode, foundList := clickhouse.Find(groupByClause, func(node clickhouse.Expr) bool {
_, ok := node.(*clickhouse.ColumnExprList)
return ok
})
if !foundList {
return
}
// Note: We only extract from the top-level ColumnExprList.Items to avoid extracting nested parts
// This prevents extracting 'timestamp' from 'toDate(timestamp)' - we only get 'toDate(timestamp)'
if exprList, ok := exprListNode.(*clickhouse.ColumnExprList); ok {
// Extract each expression from the list - these are top-level only
if exprList.Items != nil {
for _, item := range exprList.Items {
groupKey := e.extractColumnStrByExpr(item)
if groupKey != "" {
// Strip table alias if present (e.g., "m.region" -> "region")
groupKey = e.stripTableAlias(groupKey)
groupBy[groupKey] = true
}
}
}
}
}
// extractColumnStrByExpr extracts the complete string representation of different expression types
// Supports:
// - Ident: Simple identifier like "region" or "timestamp"
// - FunctionExpr: Function call like "toDate(timestamp)"
// - ColumnExpr: Column expression like "m.region", "toDate(timestamp)"
// - Other expression types: Return the string representation of the expression
//
// For example:
// - "region" -> "region"
// - "toDate(timestamp)" -> "toDate(timestamp)"
// - "`m.region`" -> "`m.region`"
func (e *ClickHouseFilterExtractor) extractColumnStrByExpr(expr clickhouse.Expr) string {
if expr == nil {
return ""
}
switch ex := expr.(type) {
// Ident is a simple identifier like "region" or "timestamp"
case *clickhouse.Ident:
// Handling for backticks which are native to ClickHouse and used for literal names.
// CH Parser removes the backticks from the identifier, so we need to add them back.
if ex.QuoteType == clickhouse.BackTicks {
return "`" + ex.Name + "`"
}
return ex.Name
// FunctionExpr is a function call like "toDate(timestamp)"
case *clickhouse.FunctionExpr:
// For function expressions, return the complete function call string
return ex.String()
// ColumnExpr is a column expression like "m.region", "toDate(timestamp)"
case *clickhouse.ColumnExpr:
// ColumnExpr wraps another expression - extract the underlying expression
if ex.Expr != nil {
return e.extractColumnStrByExpr(ex.Expr)
}
return ex.String()
default:
// For other expression types, return the string representation
return expr.String()
}
}
// stripTableAlias removes table alias prefix from a column name (e.g., "m.region" -> "region")
// but for literals with backticks, we need preserve the entire string. (e.g., `os.type` -> "os.type")
func (e *ClickHouseFilterExtractor) stripTableAlias(name string) string {
// Handling for backticks which are native to ClickHouse and used for literal names.
if strings.HasPrefix(name, "`") && strings.HasSuffix(name, "`") {
return strings.Trim(name, "`")
}
// split the name by dot and return the last part
parts := strings.Split(name, ".")
if len(parts) > 1 {
return parts[len(parts)-1]
}
return name
}
// getColumnName extracts column name from an expression
func (e *ClickHouseFilterExtractor) getColumnName(expr clickhouse.Expr) string {
switch ex := expr.(type) {
case *clickhouse.Ident:
return ex.Name
case *clickhouse.Path:
// Handle Path type for qualified column names like "m.metric_name"
// Extract the last field which is the column name
if len(ex.Fields) > 0 {
return ex.Fields[len(ex.Fields)-1].Name
}
return ""
}
return ""
}
// extractSourceQuery extracts the SelectQuery from FROM expressions
// Handles CTE references, subqueries, and table expressions
// For example: from the below query We'll try to extract the name of the source query
// which in the below case is "aggregated". Once we find it we return the SelectQuery node
// from the cteMap, which acts as the source for the GROUP BY extraction.
//
// WITH aggregated AS (
// SELECT region as region_alias, sum(value) AS total
// FROM metrics
// WHERE metric_name = 'cpu_usage'
// GROUP BY region
// )
// SELECT * FROM aggregated
func (e *ClickHouseFilterExtractor) extractSourceQuery(query *clickhouse.SelectQuery, cteMap map[string]*clickhouse.SelectQuery) *clickhouse.SelectQuery {
if query.From == nil {
return nil
}
// Find the FROM clause and extract the source
fromExprs := clickhouse.FindAll(query.From, func(node clickhouse.Expr) bool {
switch node.(type) {
case *clickhouse.Ident, *clickhouse.SelectQuery:
return true
}
return false
})
for _, fromExpr := range fromExprs {
switch expr := fromExpr.(type) {
case *clickhouse.Ident:
// CTE reference by simple name
if cteQuery, exists := cteMap[expr.Name]; exists {
return cteQuery
}
case *clickhouse.SelectQuery:
// Direct subquery
return expr
}
}
return nil
}
// ========================================
// Column Origin Tracing
// ========================================
// extractColumnOrigin recursively traces a column back to its original expression
// Returns the original expression string (e.g., "JSONExtractString(labels, 'service.name')")
// or the column name itself if it's a direct column reference
func (e *ClickHouseFilterExtractor) extractColumnOrigin(
columnName string,
query *clickhouse.SelectQuery,
cteMap map[string]*clickhouse.SelectQuery,
visited map[*clickhouse.SelectQuery]bool,
) string {
if query == nil {
return columnName
}
// Prevent infinite recursion and redundant work
// Once a query is visited, we don't need to check it again
if visited[query] {
return columnName
}
visited[query] = true
// this is to prevent infinite recursion in a single query search
// but we don't want this to affect the other queries searches
// so we delete it after the search is done for current query
defer delete(visited, query)
// Step 1: Search in CTE and Joins, this will take us to very end of the SubQueries and CTE
sourceQuery := e.extractSourceQuery(query, cteMap)
if sourceQuery != nil {
returningOrigin := e.extractColumnOrigin(columnName, sourceQuery, cteMap, visited)
if returningOrigin != columnName {
return returningOrigin
}
}
// Step 2: Once we're sure there are no SubQueries and CTE we just find all the selectItem
// and then get their column origin values
selectItems := clickhouse.FindAll(query, func(node clickhouse.Expr) bool {
_, ok := node.(*clickhouse.SelectItem)
return ok
})
// extractOriginFromSelectItem extracts the origin from a SelectItem
extractOriginFromSelectItem := func(selectItem *clickhouse.SelectItem) *string {
// Check if this SelectItem matches our column (by alias or by name)
alias := e.extractSelectItemAlias(selectItem)
exprStr := e.extractSelectItemName(selectItem)
normalizedExpr := e.stripTableAlias(exprStr)
// Case 1: Column matches an alias in SELECT
if alias == columnName {
// This is an alias - get the expression it's aliasing
if selectItem.Expr != nil {
originExpr := e.extractFullExpression(selectItem.Expr)
// If the expression is just a column name, trace it back further
if normalizedExpr == columnName || e.isSimpleColumnReference(selectItem.Expr) {
// It's referencing another column - trace back through source query
sourceQuery := e.extractSourceQuery(query, cteMap)
if sourceQuery != nil {
originExpr := e.extractColumnOrigin(normalizedExpr, sourceQuery, cteMap, visited)
return &originExpr
}
}
return &originExpr
}
}
// Case 2: Column matches the expression itself (no alias)
if normalizedExpr == columnName {
// Check if this is a simple column reference or a complex expression
if e.isSimpleColumnReference(selectItem.Expr) {
// Simple column - trace back through source query
sourceQuery := e.extractSourceQuery(query, cteMap)
if sourceQuery != nil {
originExpr := e.extractColumnOrigin(columnName, sourceQuery, cteMap, visited)
return &originExpr
}
return &columnName
} else {
// Complex expression - return it as origin
originExpr := e.extractFullExpression(selectItem.Expr)
return &originExpr
}
}
return nil
}
var finalColumnOrigin string
for _, itemNode := range selectItems {
if selectItem, ok := itemNode.(*clickhouse.SelectItem); ok {
// We call the extractOriginFromSelectItem function for each SelectItem
// and if the origin is not nil, we set the finalColumnOrigin to the origin
// this has to be done to get to the most nested origin of column where selectItem is present
origin := extractOriginFromSelectItem(selectItem)
if origin != nil {
finalColumnOrigin = *origin
}
}
}
if finalColumnOrigin != "" {
return finalColumnOrigin
}
return columnName
}
// extractFullExpression extracts the complete string representation of an expression
func (e *ClickHouseFilterExtractor) extractFullExpression(expr clickhouse.Expr) string {
if expr == nil {
return ""
}
return expr.String()
}
// isSimpleColumnReference checks if an expression is just a simple column reference
// (not a function call or complex expression)
func (e *ClickHouseFilterExtractor) isSimpleColumnReference(expr clickhouse.Expr) bool {
if expr == nil {
return false
}
switch ex := expr.(type) {
case *clickhouse.Ident:
// backticks are treated as non simple column reference
// so that we can return the origin expression with backticks
// origin parser will handle the backticks and extract the column name from it
if ex.QuoteType == clickhouse.BackTicks {
return false
}
return true
case *clickhouse.Path:
return true
case *clickhouse.ColumnExpr:
// Check if it wraps a simple reference
if ex.Expr != nil {
return e.isSimpleColumnReference(ex.Expr)
}
}
return false
}
// ========================================
// SELECT Column Alias Extraction
// ========================================
// extractSelectColumns extracts column names and their aliases from SELECT clause of a specific query
// Returns a map where key is normalized column name and value is the alias
// For duplicate columns with different aliases, the last alias wins
// This follows the same pattern as extractGroupFromGroupByClause - finding direct children only
func (e *ClickHouseFilterExtractor) extractSelectColumns(query *clickhouse.SelectQuery) map[string]string {
aliasMap := make(map[string]string)
if query == nil {
return aliasMap
}
// Find SelectItem nodes which represent columns in the SELECT clause
// SelectItem has an Expr field (the column/expression) and an Alias field
selectItems := clickhouse.FindAll(query, func(node clickhouse.Expr) bool {
_, ok := node.(*clickhouse.SelectItem)
return ok
})
// Process each SelectItem and extract column name and alias
for _, itemNode := range selectItems {
if selectItem, ok := itemNode.(*clickhouse.SelectItem); ok {
// Extract the column name/expression from SelectItem.Expr
columnName := e.extractSelectItemName(selectItem)
if columnName == "" {
continue
}
// Normalize column name (strip table alias)
normalizedName := e.stripTableAlias(columnName)
// Extract alias from SelectItem.Alias
alias := e.extractSelectItemAlias(selectItem)
// Store in map - last alias wins for duplicates
aliasMap[normalizedName] = alias
}
}
return aliasMap
}
// extractSelectItemName extracts the column name or expression from a SelectItem
func (e *ClickHouseFilterExtractor) extractSelectItemName(selectItem *clickhouse.SelectItem) string {
if selectItem == nil || selectItem.Expr == nil {
return ""
}
return e.extractColumnStrByExpr(selectItem.Expr)
}
// extractSelectItemAlias extracts the alias from a SelectItem
// Returns empty string if no alias is present
func (e *ClickHouseFilterExtractor) extractSelectItemAlias(selectItem *clickhouse.SelectItem) string {
if selectItem == nil || selectItem.Alias == nil {
return ""
}
// The Alias field is an *Ident (pointer type)
if selectItem.Alias.Name != "" {
return selectItem.Alias.Name
}
return ""
}
// ========================================
// CTE and Subquery Extraction
// ========================================
// buildCTEMap builds a map of CTE names to their SelectQuery nodes by recursively
// traversing all queries and their nested expressions
func (e *ClickHouseFilterExtractor) buildCTEMap(query *clickhouse.SelectQuery, cteMap map[string]*clickhouse.SelectQuery) {
if query == nil {
return
}
// Access CTEs directly from WithClause if it exists
if query.With != nil && query.With.CTEs != nil {
for _, cte := range query.With.CTEs {
cteName := e.extractCTEName(cte)
cteQuery := e.extractCTEQuery(cte)
if cteName != "" && cteQuery != nil {
cteMap[cteName] = cteQuery
// Recursively build CTE map for nested CTEs
e.buildCTEMap(cteQuery, cteMap)
}
}
}
// Also check for CTEs in subqueries and other expressions
e.buildCTEMapFromExpr(query, cteMap)
}
// extractCTEName extracts the CTE name from a CTEStmt, the Expr field is the name of the CTE
func (e *ClickHouseFilterExtractor) extractCTEName(cte *clickhouse.CTEStmt) string {
if cte == nil || cte.Expr == nil {
return ""
}
switch name := cte.Expr.(type) {
case *clickhouse.Ident:
return name.Name
default:
return cte.Expr.String()
}
}
// extractCTEQuery extracts the SelectQuery from a CTEStmt, the Alias field is the SelectQuery
func (e *ClickHouseFilterExtractor) extractCTEQuery(cte *clickhouse.CTEStmt) *clickhouse.SelectQuery {
if cte == nil || cte.Alias == nil {
return nil
}
// The Alias field should contain a SelectQuery
if selectQuery, ok := cte.Alias.(*clickhouse.SelectQuery); ok {
return selectQuery
}
return nil
}
// buildCTEMapFromExpr recursively extracts CTEs from various expression types
func (e *ClickHouseFilterExtractor) buildCTEMapFromExpr(expr clickhouse.Expr, cteMap map[string]*clickhouse.SelectQuery) {
if expr == nil {
return
}
// Walk through all nodes to find SelectQuery nodes that might contain CTEs
clickhouse.Walk(expr, func(node clickhouse.Expr) bool {
switch n := node.(type) {
case *clickhouse.SelectQuery:
// Don't process the same query we started with to avoid infinite recursion
if n != expr {
e.buildCTEMap(n, cteMap)
}
case *clickhouse.TableExpr:
if n.Expr != nil {
e.buildCTEMapFromExpr(n.Expr, cteMap)
}
case *clickhouse.JoinTableExpr:
if n.Table != nil {
e.buildCTEMapFromExpr(n.Table, cteMap)
}
}
return true // Continue traversal
})
}

View File

@@ -0,0 +1,316 @@
package queryfilterextractor
import (
"fmt"
"strings"
"github.com/AfterShip/clickhouse-sql-parser/parser"
)
// excludedFunctions contains functions that should cause ExtractOriginField to return empty string.
// Map key is the function name in lowercase, value is the original function name.
var excludedFunctions = map[string]string{
// Time functions
"now": "now",
"today": "today",
"yesterday": "yesterday",
"todatetime": "toDateTime",
"todatetime64": "toDateTime64",
"todate": "toDate",
"todate32": "toDate32",
"tostartofinterval": "toStartOfInterval",
"tostartofday": "toStartOfDay",
"tostartofweek": "toStartOfWeek",
"tostartofmonth": "toStartOfMonth",
"tostartofquarter": "toStartOfQuarter",
"tostartofyear": "toStartOfYear",
"tostartofhour": "toStartOfHour",
"tostartofminute": "toStartOfMinute",
"tostartofsecond": "toStartOfSecond",
"tostartoffiveminutes": "toStartOfFiveMinutes",
"tostartoftenminutes": "toStartOfTenMinutes",
"tostartoffifteenminutes": "toStartOfFifteenMinutes",
"tointervalsecond": "toIntervalSecond",
"tointervalminute": "toIntervalMinute",
"tointervalhour": "toIntervalHour",
"tointervalday": "toIntervalDay",
"tointervalweek": "toIntervalWeek",
"tointervalmonth": "toIntervalMonth",
"tointervalquarter": "toIntervalQuarter",
"tointervalyear": "toIntervalYear",
"parsedatetime": "parseDateTime",
"parsedatetimebesteffort": "parseDateTimeBestEffort",
// Aggregate functions
"count": "count",
"sum": "sum",
"avg": "avg",
"min": "min",
"max": "max",
"any": "any",
"stddevpop": "stddevPop",
"stddevsamp": "stddevSamp",
"varpop": "varPop",
"varsamp": "varSamp",
"grouparray": "groupArray",
"groupuniqarray": "groupUniqArray",
"quantile": "quantile",
"quantiles": "quantiles",
"quantileexact": "quantileExact",
"quantiletiming": "quantileTiming",
"median": "median",
"uniq": "uniq",
"uniqexact": "uniqExact",
"uniqcombined": "uniqCombined",
"uniqhll12": "uniqHLL12",
"topk": "topK",
"first": "first",
"last": "last",
}
// jsonExtractFunctions contains functions that extract from JSON columns.
// Map key is the function name in lowercase, value is the original function name.
var jsonExtractFunctions = map[string]string{
"jsonextractstring": "JSONExtractString",
"jsonextractint": "JSONExtractInt",
"jsonextractuint": "JSONExtractUInt",
"jsonextractfloat": "JSONExtractFloat",
"jsonextractbool": "JSONExtractBool",
"jsonextract": "JSONExtract",
"jsonextractraw": "JSONExtractRaw",
"jsonextractarrayraw": "JSONExtractArrayRaw",
"jsonextractkeysandvalues": "JSONExtractKeysAndValues",
}
// isFunctionPresentInStore checks if a function name exists in the function store map
func isFunctionPresentInStore(funcName string, funcStore map[string]string) bool {
_, exists := funcStore[strings.ToLower(funcName)]
return exists
}
// isReservedSelectKeyword checks if a keyword is a reserved keyword for the SELECT statement
// We're only including those which can appear in the SELECT statement without being quoted
func isReservedSelectKeyword(keyword string) bool {
return strings.ToUpper(keyword) == parser.KeywordSelect || strings.ToUpper(keyword) == parser.KeywordFrom
}
// extractCHOriginField extracts the origin field (column name) from a query string
// or fields getting extracted in case of JSON extraction functions.
func extractCHOriginFieldFromQuery(query string) (string, error) {
// Parse the query string
p := parser.NewParser(query)
stmts, err := p.ParseStmts()
if err != nil {
return "", err
}
if len(stmts) == 0 {
return "", fmt.Errorf("no statements found in query")
}
// Get the first statement which should be a SELECT
selectStmt, ok := stmts[0].(*parser.SelectQuery)
if !ok {
return "", fmt.Errorf("first statement is not a SELECT query")
}
// If query has multiple select items, return blank string as we don't expect multiple select items
if len(selectStmt.SelectItems) > 1 {
return "", nil
}
if len(selectStmt.SelectItems) == 0 {
return "", fmt.Errorf("SELECT query has no select items")
}
// Extract origin field from the first (and only) select item's expression
return extractOriginFieldFromExpr(selectStmt.SelectItems[0].Expr)
}
// extractOriginFieldFromExpr extracts the origin field (column name) from an expression.
// This is the internal helper function that contains the original logic.
func extractOriginFieldFromExpr(expr parser.Expr) (string, error) {
if expr == nil {
return "", fmt.Errorf("expression is nil")
}
// Check if expression contains excluded functions or IF/CASE
hasExcludedExpressions := false
hasReservedKeyword := false
parser.Walk(expr, func(node parser.Expr) bool {
// exclude reserved keywords because the parser will treat them as valid SQL
// example: SELECT FROM table here the "FROM" is a reserved keyword,
// but the parser will treat it as valid SQL
if ident, ok := node.(*parser.Ident); ok {
if ident.QuoteType == parser.Unquoted && isReservedSelectKeyword(ident.Name) {
hasReservedKeyword = true
return false
}
}
if funcExpr, ok := node.(*parser.FunctionExpr); ok {
if isFunctionPresentInStore(funcExpr.Name.Name, excludedFunctions) {
hasExcludedExpressions = true
return false
}
// Check for nested JSON extraction functions
if isFunctionPresentInStore(funcExpr.Name.Name, jsonExtractFunctions) {
// Check if any argument contains another JSON extraction function
if funcExpr.Params != nil && funcExpr.Params.Items != nil {
for _, arg := range funcExpr.Params.Items.Items {
if containsJSONExtractFunction(arg) {
hasExcludedExpressions = true
return false
}
}
}
}
}
if _, ok := node.(*parser.CaseExpr); ok {
hasExcludedExpressions = true
return false
}
return true
})
// If the expression contains reserved keywords, return error
if hasReservedKeyword {
return "", fmt.Errorf("reserved keyword found in query")
}
// If the expression contains excluded expressions, return empty string
if hasExcludedExpressions {
return "", nil
}
// Extract all column names from the expression
columns := extractColumns(expr)
// If we found exactly one unique column, return it
if len(columns) == 1 {
return columns[0], nil
}
// Multiple columns or no columns - return empty string
return "", nil
}
// containsJSONExtractFunction checks if an expression contains a JSON extraction function
func containsJSONExtractFunction(expr parser.Expr) bool {
if expr == nil {
return false
}
found := false
parser.Walk(expr, func(node parser.Expr) bool {
if funcExpr, ok := node.(*parser.FunctionExpr); ok {
if isFunctionPresentInStore(funcExpr.Name.Name, jsonExtractFunctions) {
found = true
return false
}
}
return true
})
return found
}
// extractColumns recursively extracts all unique column names from an expression.
// Note: String literals are also considered as origin fields and will be included in the result.
func extractColumns(expr parser.Expr) []string {
if expr == nil {
return nil
}
columnMap := make(map[string]bool)
extractColumnsHelper(expr, columnMap)
// Convert map to slice
columns := make([]string, 0, len(columnMap))
for col := range columnMap {
columns = append(columns, col)
}
return columns
}
// extractColumnsHelper is a recursive helper that finds all column references.
// Note: String literals are also considered as origin fields and will be added to the columnMap.
func extractColumnsHelper(expr parser.Expr, columnMap map[string]bool) {
if expr == nil {
return
}
switch n := expr.(type) {
case *parser.Ident:
// Add identifiers as column references
columnMap[n.Name] = true
case *parser.FunctionExpr:
// Special handling for JSON extraction functions
// In case of nested JSON extraction, we return blank values (handled at top level)
if isFunctionPresentInStore(n.Name.Name, jsonExtractFunctions) {
// For JSON functions, extract from the second argument (the JSON path/key being extracted)
// The first argument is the column name, the second is the exact data being extracted
// The extracted data (second argument) is treated as the origin field
if n.Params != nil && n.Params.Items != nil && len(n.Params.Items.Items) >= 2 {
secondArg := n.Params.Items.Items[1]
// If the second argument is a string literal, use its value as the origin field
// String literals are considered as origin fields
if strLit, ok := secondArg.(*parser.StringLiteral); ok {
columnMap[strLit.Literal] = true
} else {
// Otherwise, try to extract columns from it
extractColumnsHelper(secondArg, columnMap)
}
}
return
}
// For regular functions, recursively process all arguments
// Don't mark the function name itself as a column
if n.Params != nil && n.Params.Items != nil {
for _, item := range n.Params.Items.Items {
extractColumnsHelper(item, columnMap)
}
}
case *parser.BinaryOperation:
extractColumnsHelper(n.LeftExpr, columnMap)
extractColumnsHelper(n.RightExpr, columnMap)
case *parser.ColumnExpr:
extractColumnsHelper(n.Expr, columnMap)
case *parser.CastExpr:
extractColumnsHelper(n.Expr, columnMap)
case *parser.ParamExprList:
if n.Items != nil {
extractColumnsHelper(n.Items, columnMap)
}
case *parser.ColumnExprList:
for _, item := range n.Items {
extractColumnsHelper(item, columnMap)
}
case *parser.StringLiteral:
// String literals are considered as origin fields
columnMap[n.Literal] = true
return
// Support for columns like table.column_name
case *parser.Path:
if len(n.Fields) > 0 {
extractColumnsHelper(n.Fields[len(n.Fields)-1], columnMap)
}
return
// Add more cases as needed for other expression types
default:
// For unknown types, return empty (don't extract columns)
return
}
}

View File

@@ -0,0 +1,252 @@
package queryfilterextractor
import (
"testing"
)
func TestExtractOriginField(t *testing.T) {
tests := []struct {
name string
query string
expected string
expectError bool
}{
// JSON extraction functions - should return the second argument (JSON path/key) as origin field
{
name: "JSONExtractString simple",
query: `SELECT JSONExtractString(labels, 'service.name')`,
expected: "service.name",
},
{
name: "JSONExtractInt",
query: `SELECT JSONExtractInt(labels, 'status.code')`,
expected: "status.code",
},
{
name: "JSONExtractFloat",
query: `SELECT JSONExtractFloat(labels, 'cpu.usage')`,
expected: "cpu.usage",
},
{
name: "JSONExtractBool",
query: `SELECT JSONExtractBool(labels, 'feature.enabled')`,
expected: "feature.enabled",
},
{
name: "JSONExtractString with function wrapper",
query: `SELECT lower(JSONExtractString(labels, 'user.email'))`,
expected: "user.email",
},
{
name: "Nested JSON extraction",
query: `SELECT JSONExtractInt(JSONExtractRaw(labels, 'meta'), 'status.code')`,
expected: "", // Nested JSON extraction should return blank
},
// Nested functions - should return the deepest column
{
name: "Nested time functions with column",
query: `SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(60))`,
expected: "", // Contains toStartOfInterval and toDateTime which are excluded
},
{
name: "Division with column",
query: `SELECT unix_milli / 1000`,
expected: "unix_milli",
},
{
name: "Function with single column",
query: `SELECT lower(unix_milli)`,
expected: "unix_milli",
},
{
name: "CAST with single column",
query: `SELECT CAST(unix_milli AS String)`,
expected: "unix_milli",
},
{
name: "intDiv with single column",
query: `SELECT intDiv(unix_milli, 1000)`,
expected: "unix_milli",
},
// Multiple columns - should return blank
{
name: "Multiple columns in coalesce",
query: `SELECT (coalesce(cpu_usage, 0) + coalesce(mem_usage, 0)) / 2`,
expected: "",
},
{
name: "Multiple columns in arithmetic",
query: `SELECT cpu_usage + mem_usage`,
expected: "",
},
{
name: "Multiple columns in function",
query: `SELECT concat(first_name, last_name)`,
expected: "",
},
// IF/CASE conditions - should return blank
{
name: "IF with single column in condition",
query: `SELECT IF(error_count > 0, service, 'healthy')`,
expected: "", // Multiple columns: error_count and service
},
{
name: "IF with JSON and multiple columns",
query: `SELECT if(JSONExtractInt(metadata, 'retry.count') > 3, toLower(JSONExtractString(metadata, 'user.id')), hostname)`,
expected: "", // Multiple columns: metadata and hostname
},
{
name: "String literal should return string",
query: `SELECT 'constant'`,
expected: "constant",
},
// No columns - should return blank
{
name: "Number literal",
query: `SELECT 42`,
expected: "",
},
{
name: "Multiple literals",
query: `SELECT 'constant', 42`,
expected: "",
},
{
name: "Multiple string literals",
query: `SELECT 'constant', '42'`,
expected: "",
},
// Excluded functions - should return blank
{
name: "now() function",
query: `SELECT now()`,
expected: "",
},
{
name: "today() function",
query: `SELECT today()`,
expected: "",
},
{
name: "count aggregate",
query: `SELECT count(user_id)`,
expected: "",
},
{
name: "sum aggregate",
query: `SELECT sum(amount)`,
expected: "",
},
// Single column simple cases
{
name: "Simple column reference",
query: `SELECT user_id`,
expected: "user_id",
},
{
name: "Column with alias",
query: `SELECT user_id AS id`,
expected: "user_id",
},
{
name: "Column in arithmetic with literals (multiplication)",
query: `SELECT unix_milli * 1000`,
expected: "unix_milli",
},
// Edge cases
{
name: "Nested functions with single column deep",
query: `SELECT upper(lower(trim(column_name)))`,
expected: "column_name",
},
// Qualified column names (Path)
{
name: "Column with table prefix",
query: `SELECT table.column_name`,
expected: "column_name", // IndexOperation: extracts column name from Index field
},
{
name: "Qualified column in function",
query: `SELECT lower(table.column_name)`,
expected: "column_name",
},
{
name: "Qualified column in arithmetic",
query: `SELECT table.column_name * 100`,
expected: "column_name",
},
{
name: "Nested qualified column (schema.table.column)",
query: `SELECT schema.table.column_name`,
expected: "column_name", // Should extract the final column name
},
{
name: "Multiple qualified columns",
query: `SELECT table1.column1 + table2.column2`,
expected: "", // Multiple columns: column1 and column2
},
{
name: "Qualified column with CAST",
query: `SELECT CAST(table.column_name AS String)`,
expected: "column_name",
},
{
name: "Multiple select items - return blank",
query: `SELECT JSONExtractString(labels, 'service.name'), unix_milli / 1000, cpu_usage + mem_usage`,
expected: "",
},
// Error cases
{
name: "Empty query",
query: ``,
expectError: true,
},
{
name: "Invalid SQL syntax",
query: `SELECT FROM table`,
expectError: true,
},
{
name: "Non-SELECT statement (CREATE TABLE)",
query: `CREATE TABLE test (id Int32)`,
expectError: true,
},
{
name: "Non-SELECT statement (INSERT)",
query: `INSERT INTO test VALUES (1)`,
expectError: true,
},
{
name: "Malformed query",
query: `SELECT * FROM`,
expectError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := extractCHOriginFieldFromQuery(tt.query)
if tt.expectError {
if err == nil {
t.Errorf("ExtractOriginField() expected error but got nil, result = %q", result)
}
} else {
if err != nil {
t.Errorf("ExtractOriginField() unexpected error: %v", err)
}
if result != tt.expected {
t.Errorf("ExtractOriginField() = %q, want %q", result, tt.expected)
}
}
})
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,129 @@
package queryfilterextractor
import (
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql/parser"
)
// PromQLFilterExtractor extracts metric names and grouping keys from PromQL queries
type PromQLFilterExtractor struct{}
// NewPromQLFilterExtractor creates a new PromQL filter extractor
func NewPromQLFilterExtractor() *PromQLFilterExtractor {
return &PromQLFilterExtractor{}
}
// Extract parses a PromQL query and extracts metric names and grouping keys
func (e *PromQLFilterExtractor) Extract(query string) (*FilterResult, error) {
expr, err := parser.ParseExpr(query)
if err != nil {
return nil, err
}
result := &FilterResult{
MetricNames: []string{},
GroupByColumns: []ColumnInfo{},
}
// Use a visitor to traverse the AST
visitor := &promQLVisitor{
metricNames: make(map[string]bool),
groupBy: make(map[string]bool),
}
// Walk the AST
if err := parser.Walk(visitor, expr, nil); err != nil {
return result, err
}
// Convert sets to slices
for metric := range visitor.metricNames {
result.MetricNames = append(result.MetricNames, metric)
}
for groupKey := range visitor.groupBy {
result.GroupByColumns = append(result.GroupByColumns, ColumnInfo{Name: groupKey})
}
return result, nil
}
// promQLVisitor implements the parser.Visitor interface
type promQLVisitor struct {
metricNames map[string]bool
groupBy map[string]bool
// Track if we've already captured grouping from an outermost aggregation
hasOutermostGrouping bool
}
func (v *promQLVisitor) Visit(node parser.Node, path []parser.Node) (parser.Visitor, error) {
if node == nil {
return nil, nil
}
switch n := node.(type) {
case *parser.VectorSelector:
v.visitVectorSelector(n)
case *parser.AggregateExpr:
v.visitAggregateExpr(n, path)
case *parser.Call:
// Function calls may contain VectorSelectors, continue traversal
return v, nil
case *parser.BinaryExpr:
// Binary expressions may contain VectorSelectors on both sides
return v, nil
case *parser.SubqueryExpr:
// Subqueries may contain VectorSelectors
return v, nil
case *parser.ParenExpr:
// Parentheses don't change semantics, continue traversal
return v, nil
case *parser.MatrixSelector:
// Matrix selectors wrap VectorSelectors
return v, nil
}
return v, nil
}
func (v *promQLVisitor) visitVectorSelector(vs *parser.VectorSelector) {
// Check if metric name is specified directly
if vs.Name != "" {
v.metricNames[vs.Name] = true
}
// Check for __name__ label matcher
for _, matcher := range vs.LabelMatchers {
if matcher.Name == labels.MetricName {
switch matcher.Type {
case labels.MatchEqual:
v.metricNames[matcher.Value] = true
// Skip for negative filters - negative filters don't extract metric names
// case labels.MatchNotEqual, labels.MatchRegexp, labels.MatchNotRegexp:
}
}
}
}
func (v *promQLVisitor) visitAggregateExpr(ae *parser.AggregateExpr, path []parser.Node) {
// Count how many AggregateExpr nodes are in the path (excluding current node)
// This tells us the nesting level
nestingLevel := 0
for _, p := range path {
if _, ok := p.(*parser.AggregateExpr); ok {
nestingLevel++
}
}
// Only capture grouping from the outermost aggregation (nesting level 0)
if nestingLevel == 0 && !v.hasOutermostGrouping {
// If Without is true, we skip grouping per spec
if !ae.Without && len(ae.Grouping) > 0 {
v.hasOutermostGrouping = true
for _, label := range ae.Grouping {
v.groupBy[label] = true
}
}
}
// Continue traversal to find metrics in the expression
}

View File

@@ -0,0 +1,175 @@
package queryfilterextractor
import (
"reflect"
"testing"
)
func TestPromQLFilterExtractor_Extract(t *testing.T) {
extractor := NewPromQLFilterExtractor()
tests := []struct {
name string
query string
wantMetrics []string
wantGroupByColumns []ColumnInfo
wantError bool
}{
{
name: "P1 - Simple vector selector",
query: `http_requests_total{job="api"}`,
wantMetrics: []string{"http_requests_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P2 - Function call",
query: `rate(cpu_usage_seconds_total[5m])`,
wantMetrics: []string{"cpu_usage_seconds_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P3 - Aggregation with by()",
query: `sum by (pod,region) (rate(http_requests_total[5m]))`,
wantMetrics: []string{"http_requests_total"},
wantGroupByColumns: []ColumnInfo{{Name: "pod"}, {Name: "region"}},
},
{
name: "P4 - Aggregation with without()",
query: `sum without (instance) (rate(cpu_usage_total[1m]))`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{}, // without() means no grouping keys per spec
},
{
name: "P5 - Invalid: metric name set twice",
query: `sum(rate(http_requests_total{__name__!="http_requests_error_total"}[5m]))`,
wantMetrics: []string{},
wantGroupByColumns: []ColumnInfo{},
wantError: true,
},
{
name: "P6 - Regex negative label",
query: `sum(rate(http_requests_total{status!~"5.."}[5m]))`,
wantMetrics: []string{"http_requests_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P7 - Nested aggregations",
query: `sum by (region) (max by (pod, region) (cpu_usage_total{env="prod"}))`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{{Name: "region"}}, // Only outermost grouping
},
{
name: "P7a - Nested aggregation: inner grouping ignored",
query: `sum(max by (pod) (cpu_usage_total{env="prod"}))`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{}, // Inner grouping is ignored when outer has no grouping (nestingLevel != 0 case)
},
{
name: "P8 - Arithmetic expression",
query: `(http_requests_total{job="api"} + http_errors_total{job="api"})`,
wantMetrics: []string{"http_requests_total", "http_errors_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P9 - Mix of positive metric & exclusion label",
query: `sum by (region)(rate(foo{job!="db"}[5m]))`,
wantMetrics: []string{"foo"},
wantGroupByColumns: []ColumnInfo{{Name: "region"}},
},
{
name: "P10 - Function + aggregation",
query: `histogram_quantile(0.9, sum(rate(http_request_duration_seconds_bucket[5m])) by (le))`,
wantMetrics: []string{"http_request_duration_seconds_bucket"},
wantGroupByColumns: []ColumnInfo{{Name: "le"}},
},
{
name: "P11 - Subquery",
query: `sum_over_time(cpu_usage_total[1h:5m])`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P12 - Nested aggregation inside subquery",
query: `max_over_time(sum(rate(cpu_usage_total[5m]))[1h:5m])`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P13 - Subquery with multiple metrics",
query: `avg_over_time((foo + bar)[10m:1m])`,
wantMetrics: []string{"foo", "bar"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P14 - Simple meta-metric",
query: `sum by (pod) (up)`,
wantMetrics: []string{"up"},
wantGroupByColumns: []ColumnInfo{{Name: "pod"}},
},
{
name: "P15 - Binary operator unless",
query: `sum(rate(http_requests_total[5m])) unless avg(rate(http_errors_total[5m]))`,
wantMetrics: []string{"http_requests_total", "http_errors_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P16 - Vector matching",
query: `sum(rate(foo[5m])) / ignoring(instance) group_left(job) sum(rate(bar[5m]))`,
wantMetrics: []string{"foo", "bar"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P17 - Offset modifier with aggregation",
query: `sum by (env)(rate(cpu_usage_seconds_total{job="api"}[5m] offset 1h))`,
wantMetrics: []string{"cpu_usage_seconds_total"},
wantGroupByColumns: []ColumnInfo{{Name: "env"}},
},
{
name: "P18 - Invalid syntax",
query: `sum by ((foo)(bar))(http_requests_total)`,
wantMetrics: []string{},
wantGroupByColumns: []ColumnInfo{},
wantError: true,
},
{
name: "P19 - Literal expression",
query: `2 + 3`,
wantMetrics: []string{},
wantGroupByColumns: []ColumnInfo{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := extractor.Extract(tt.query)
// Check error expectation
if tt.wantError {
if err == nil {
t.Errorf("Extract() expected error but got none, query: %s", tt.query)
}
return
}
if err != nil {
t.Errorf("Extract() unexpected error = %v, query: %s", err, tt.query)
return
}
// Sort for comparison
gotMetrics := sortStrings(result.MetricNames)
wantMetrics := sortStrings(tt.wantMetrics)
if !reflect.DeepEqual(gotMetrics, wantMetrics) {
t.Errorf("Extract() MetricNames = %v, want %v", gotMetrics, wantMetrics)
}
// Test GroupByColumns - need to normalize for comparison (order may vary)
gotGroupByColumns := sortColumnInfo(result.GroupByColumns)
wantGroupByColumns := sortColumnInfo(tt.wantGroupByColumns)
if !reflect.DeepEqual(gotGroupByColumns, wantGroupByColumns) {
t.Errorf("Extract() GroupByColumns = %v, want %v", gotGroupByColumns, wantGroupByColumns)
}
})
}
}

View File

@@ -0,0 +1,42 @@
// Package queryfilterextractor provides utilities for extracting metric names
// and grouping keys.
//
// This is useful for metrics discovery, and query analysis.
package queryfilterextractor
import "fmt"
const (
ExtractorCH = "qfe_ch"
ExtractorPromQL = "qfe_promql"
)
// ColumnInfo represents a column in the query
type ColumnInfo struct {
Name string
Alias string
OriginExpr string
OriginField string
}
type FilterResult struct {
// MetricNames are the metrics that are being filtered on
MetricNames []string
// GroupByColumns are the columns that are being grouped by
GroupByColumns []ColumnInfo
}
type FilterExtractor interface {
Extract(query string) (*FilterResult, error)
}
func NewExtractor(extractorType string) (FilterExtractor, error) {
switch extractorType {
case ExtractorCH:
return NewClickHouseFilterExtractor(), nil
case ExtractorPromQL:
return NewPromQLFilterExtractor(), nil
default:
return nil, fmt.Errorf("invalid extractor type: %s", extractorType)
}
}

View File

@@ -1860,6 +1860,7 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
response.DefaultTTLDays = 15
response.TTLConditions = []model.CustomRetentionRule{}
response.Status = constants.StatusFailed
response.ColdStorageTTLDays = -1
return response, nil
}
@@ -1894,6 +1895,7 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
response.ExpectedLogsTime = ttlResult.ExpectedLogsTime
response.ExpectedLogsMoveTime = ttlResult.ExpectedLogsMoveTime
response.Status = ttlResult.Status
response.ColdStorageTTLDays = -1
if ttlResult.LogsTime > 0 {
response.DefaultTTLDays = ttlResult.LogsTime / 24
}

View File

@@ -816,10 +816,6 @@ func (aH *APIHandler) createDowntimeSchedule(w http.ResponseWriter, r *http.Requ
return
}
if len(schedule.RuleIDs) == 0 {
schedule.SilenceAll = true
}
_, err = aH.ruleManager.MaintenanceStore().CreatePlannedMaintenance(r.Context(), schedule)
if err != nil {
render.Error(w, err)
@@ -847,10 +843,6 @@ func (aH *APIHandler) editDowntimeSchedule(w http.ResponseWriter, r *http.Reques
return
}
if len(schedule.RuleIDs) == 0 {
schedule.SilenceAll = true
}
err = aH.ruleManager.MaintenanceStore().EditPlannedMaintenance(r.Context(), schedule, id)
if err != nil {
render.Error(w, err)

View File

@@ -190,7 +190,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
if aggFunc.FuncCombinator {
// Map the predicate (last argument)
origPred := args[len(args)-1].String()
whereClause, err := PrepareWhereClause(
whereClause, err := PrepareWhereClause(
origPred,
FilterExprVisitorOpts{
Logger: v.logger,
@@ -200,7 +200,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
FullTextColumn: v.fullTextColumn,
JsonBodyPrefix: v.jsonBodyPrefix,
JsonKeyToKey: v.jsonKeyToKey,
},
}, 0, 0,
)
if err != nil {
return err

View File

@@ -45,7 +45,7 @@ func CollisionHandledFinalExpr(
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
sb := sqlbuilder.NewSelectBuilder()
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
if err != nil {
return err
}

View File

@@ -48,6 +48,8 @@ func (b *defaultConditionBuilder) ConditionFor(
op qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
) (string, error) {
if key.FieldContext != telemetrytypes.FieldContextResource {

View File

@@ -206,7 +206,7 @@ func TestConditionBuilder(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(context.Background(), tc.key, tc.op, tc.value, sb)
cond, err := conditionBuilder.ConditionFor(context.Background(), tc.key, tc.op, tc.value, sb, 0, 0)
sb.Where(cond)
if tc.expectedErr != nil {

Some files were not shown because too many files have changed in this diff Show More