Compare commits

..

25 Commits

Author SHA1 Message Date
aniketio-ctrl
2a7b18e4d4 Merge branch 'main' into fix/delete-rule 2025-10-31 13:07:09 +05:30
Yunus M
5ef0a18867 Update CODEOWNERS for frontend code (#9456) 2025-10-31 12:52:37 +05:30
SagarRajput-7
c8266d1aec fix: upgraded the axios resolution to fix vulnerability (#9454) 2025-10-31 11:53:10 +05:30
SagarRajput-7
adfd16ce1b fix: adapt the scroll reset fix in alert and histogram panels (#9322) 2025-10-30 13:31:17 +00:00
SagarRajput-7
6db74a5585 feat: allow custom precision in dashboard panels (#9054) 2025-10-30 18:50:40 +05:30
aniketio-ctrl
a3af7bf2c6 Merge branch 'main' into fix/delete-rule 2025-10-30 16:56:52 +05:30
Pandey
f8e0db0085 chore: bump golangci-lint to the latest version (#9445) 2025-10-30 11:21:35 +00:00
Shaheer Kochai
01e0b36d62 fix: overall improvements to span logs drawer empty state (i.e. trace logs empty state vs. span logs empty state + UI improvements) (#9252)
* chore: remove the applied filters in related signals drawer

* chore: make the span logs highlight color more prominent

* fix: add label to open trace logs in logs explorer button

* feat: improve the span logs empty state i.e. add support for no logs for trace_id

* refactor: refactor the span logs content and make it readable

* test: add tests for span logs

* chore: improve tests

* refactor: simplify condition

* chore: remove redundant test

* fix: make trace_id logs request only if drawer is open

* chore: fix failing tests + overall improvements

* Update frontend/src/container/SpanDetailsDrawer/__tests__/SpanDetailsDrawer.test.tsx

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

* chore: fix the failing test

* fix: fix the light mode styles for empty logs component

* chore: update the empty state copy

* chore: fix the failing tests by updating the assertions with correct empty state copy

---------

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
2025-10-29 16:20:52 +00:00
Ekansh Gupta
e90bb016f7 feat: add span percentile for traces (#8955)
* feat: add span percentile for traces

* feat: fixed merge conflicts

* feat: fixed merge conflicts

* feat: fixed merge conflicts

* feat: added span percentile

* feat: added span percentile

* feat: added test for span percentiles

* feat: added test for span percentiles

* feat: added test for span percentiles

* feat: added test for span percentiles

* feat: removed comments

* feat: moved everything to module

* feat: refactored span percentile

* feat: refactored span percentile

* feat: refactored module package

* feat: fixed tests for span percentile

* feat: refactored span percentile and changed query

* feat: refactored span percentile and changed query

* feat: refactored span percentile and changed query

* feat: refactored span percentile and changed query

* feat: added better error handling

* feat: added better error handling

* feat: addressed pr comments

* feat: addressed pr comments

* feat: renamed translator.go

* feat: added query settings

* feat: added full query test

* feat: added fingerprinting

* feat: refactored tests

* feat: refactored to use fingerprinting and changed tests

* feat: refactored to use fingerprinting and changed tests

* feat: refactored to use fingerprinting and changed tests

* feat: changed errors

* feat: removed redundant tests

* feat: removed redundant tests

* feat: moved everything to trace aggregation and updated tests

* feat: addressed comments regarding metadatastore

* feat: addressed comments regarding metadatastore

* feat: addressed comments regarding metadatastore

* feat: addressed comments for float64

* feat: cleaned up code

* feat: cleaned up code
2025-10-29 21:35:59 +05:30
Amlan Kumar Nandy
bdecbfb7f5 chore: add missing unit tests for getLegend (#9374) 2025-10-29 16:27:20 +05:30
Nageshbansal
3dced2b082 chore(costmeter): enable costmeter by default in docker installations (#9432)
* chore(costmeter): enable costmeter by default in docker installations

* chore(costmeter): enable costmeter by default in docker installations
2025-10-29 15:24:54 +05:30
aniket
66c7dc9bdc Merge branch 'fix/delete-rule' of github.com:SigNoz/signoz into fix/delete-rule 2025-10-23 01:47:03 +05:30
aniket
e0a654182e fix: added silence_all column 2025-10-23 01:33:45 +05:30
aniketio-ctrl
7df6881a52 Merge branch 'main' into fix/delete-rule 2025-10-23 01:21:56 +05:30
aniket
55d4ba3ab7 fix: added silence_all column 2025-10-23 01:20:48 +05:30
aniket
6aa9601fe4 fix: added silence_all column 2025-10-23 01:00:12 +05:30
aniket
3f11ba9409 Merge branch 'main' of github.com:SigNoz/signoz into fix/delete-rule 2025-10-21 19:51:18 +05:30
aniketio-ctrl
88ff32d0bf Merge branch 'main' into fix/delete-rule 2025-10-08 17:32:15 +05:30
aniket
0634a88d80 fix: corrected test cases 2025-10-06 14:48:08 +05:30
aniket
cebc4df68c Merge branch 'fix/delete-rule' of github.com:SigNoz/signoz into fix/delete-rule 2025-10-06 14:32:27 +05:30
aniket
1a680579a6 fix: delte rule from planned maintainance 2025-10-06 14:31:58 +05:30
aniketio-ctrl
485f032155 Merge branch 'main' into fix/delete-rule 2025-10-06 14:24:05 +05:30
aniket
150efdecf1 fix: delte rule from planned maintainance 2025-10-06 14:23:25 +05:30
aniket
f73929ee00 fix: delte rule from planned maintainance 2025-10-06 14:22:40 +05:30
aniket
13884cc753 fix: delete rule in planned maiantainance 2025-10-03 18:45:21 +05:30
77 changed files with 2810 additions and 349 deletions

2
.github/CODEOWNERS vendored
View File

@@ -2,7 +2,7 @@
# Owners are automatically requested for review for PRs that changes code
# that they own.
/frontend/ @SigNoz/frontend @YounixM
/frontend/ @YounixM @aks07
/frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv

View File

@@ -1,43 +1,63 @@
version: "2"
linters:
default: standard
default: none
enable:
- bodyclose
- depguard
- errcheck
- forbidigo
- govet
- iface
- ineffassign
- misspell
- nilnil
- sloglint
- depguard
- iface
- unparam
- forbidigo
linters-settings:
sloglint:
no-mixed-args: true
kv-only: true
no-global: all
context: all
static-msg: true
msg-style: lowercased
key-naming-case: snake
depguard:
rules:
nozap:
deny:
- pkg: "go.uber.org/zap"
desc: "Do not use zap logger. Use slog instead."
noerrors:
deny:
- pkg: "errors"
desc: "Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead."
iface:
enable:
- identical
forbidigo:
forbid:
- fmt.Errorf
- ^(fmt\.Print.*|print|println)$
issues:
exclude-dirs:
- "pkg/query-service"
- "ee/query-service"
- "scripts/"
- unused
settings:
depguard:
rules:
noerrors:
deny:
- pkg: errors
desc: Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead.
nozap:
deny:
- pkg: go.uber.org/zap
desc: Do not use zap logger. Use slog instead.
forbidigo:
forbid:
- pattern: fmt.Errorf
- pattern: ^(fmt\.Print.*|print|println)$
iface:
enable:
- identical
sloglint:
no-mixed-args: true
kv-only: true
no-global: all
context: all
static-msg: true
key-naming-case: snake
exclusions:
generated: lax
presets:
- comments
- common-false-positives
- legacy
- std-error-handling
paths:
- pkg/query-service
- ee/query-service
- scripts/
- tmp/
- third_party$
- builtin$
- examples$
formatters:
exclusions:
generated: lax
paths:
- third_party$
- builtin$
- examples$

View File

@@ -1,3 +1,10 @@
connectors:
signozmeter:
metrics_flush_interval: 1h
dimensions:
- name: service.name
- name: deployment.environment
- name: host.name
receivers:
otlp:
protocols:
@@ -21,6 +28,10 @@ processors:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
batch/meter:
send_batch_max_size: 25000
send_batch_size: 20000
timeout: 1s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system]
@@ -66,6 +77,11 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
signozclickhousemeter:
dsn: tcp://clickhouse:9000/signoz_meter
timeout: 45s
sending_queue:
enabled: false
service:
telemetry:
logs:
@@ -77,16 +93,20 @@ service:
traces:
receivers: [otlp]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces]
exporters: [clickhousetraces, signozmeter]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [signozclickhousemetrics, signozmeter]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [signozclickhousemetrics, signozmeter]
logs:
receivers: [otlp]
processors: [batch]
exporters: [clickhouselogsexporter]
exporters: [clickhouselogsexporter, signozmeter]
metrics/meter:
receivers: [signozmeter]
processors: [batch/meter]
exporters: [signozclickhousemeter]

View File

@@ -1,3 +1,10 @@
connectors:
signozmeter:
metrics_flush_interval: 1h
dimensions:
- name: service.name
- name: deployment.environment
- name: host.name
receivers:
otlp:
protocols:
@@ -21,6 +28,10 @@ processors:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
batch/meter:
send_batch_max_size: 25000
send_batch_size: 20000
timeout: 1s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system]
@@ -66,6 +77,11 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
signozclickhousemeter:
dsn: tcp://clickhouse:9000/signoz_meter
timeout: 45s
sending_queue:
enabled: false
service:
telemetry:
logs:
@@ -77,16 +93,20 @@ service:
traces:
receivers: [otlp]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces]
exporters: [clickhousetraces, signozmeter]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [signozclickhousemetrics, signozmeter]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [signozclickhousemetrics, signozmeter]
logs:
receivers: [otlp]
processors: [batch]
exporters: [clickhouselogsexporter]
exporters: [clickhouselogsexporter, signozmeter]
metrics/meter:
receivers: [signozmeter]
processors: [batch/meter]
exporters: [signozclickhousemeter]

View File

@@ -69,7 +69,7 @@
"antd": "5.11.0",
"antd-table-saveas-excel": "2.2.1",
"antlr4": "4.13.2",
"axios": "1.8.2",
"axios": "1.12.0",
"babel-eslint": "^10.1.0",
"babel-jest": "^29.6.4",
"babel-loader": "9.1.3",

View File

@@ -0,0 +1,371 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { getYAxisFormattedValue, PrecisionOptionsEnum } from '../yAxisConfig';
const testFullPrecisionGetYAxisFormattedValue = (
value: string,
format: string,
): string => getYAxisFormattedValue(value, format, PrecisionOptionsEnum.FULL);
describe('getYAxisFormattedValue - none (full precision legacy assertions)', () => {
test('large integers and decimals', () => {
expect(testFullPrecisionGetYAxisFormattedValue('250034', 'none')).toBe(
'250034',
);
expect(
testFullPrecisionGetYAxisFormattedValue('250034897.12345', 'none'),
).toBe('250034897.12345');
expect(
testFullPrecisionGetYAxisFormattedValue('250034897.02354', 'none'),
).toBe('250034897.02354');
expect(testFullPrecisionGetYAxisFormattedValue('9999999.9999', 'none')).toBe(
'9999999.9999',
);
});
test('preserves leading zeros after decimal until first non-zero', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1.0000234', 'none')).toBe(
'1.0000234',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.00003', 'none')).toBe(
'0.00003',
);
});
test('trims to three significant decimals and removes trailing zeros', () => {
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'none'),
).toBe('0.000000250034');
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'none')).toBe(
'0.00000025',
);
// Big precision, limiting the javascript precision (~16 digits)
expect(
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'none'),
).toBe('1');
expect(
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'none'),
).toBe('1.005555555595958');
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
'0.000000001',
);
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000250000', 'none'),
).toBe('0.00000025');
});
test('whole numbers normalize', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'none')).toBe('1000');
expect(testFullPrecisionGetYAxisFormattedValue('99.5458', 'none')).toBe(
'99.5458',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.234567', 'none')).toBe(
'1.234567',
);
expect(testFullPrecisionGetYAxisFormattedValue('99.998', 'none')).toBe(
'99.998',
);
});
test('strip redundant decimal zeros', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1000.000', 'none')).toBe(
'1000',
);
expect(testFullPrecisionGetYAxisFormattedValue('99.500', 'none')).toBe(
'99.5',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.000', 'none')).toBe('1');
});
test('edge values', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0', 'none')).toBe('0');
expect(testFullPrecisionGetYAxisFormattedValue('-0', 'none')).toBe('0');
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'none')).toBe('∞');
expect(testFullPrecisionGetYAxisFormattedValue('-Infinity', 'none')).toBe(
'-∞',
);
expect(testFullPrecisionGetYAxisFormattedValue('invalid', 'none')).toBe(
'NaN',
);
expect(testFullPrecisionGetYAxisFormattedValue('', 'none')).toBe('NaN');
expect(testFullPrecisionGetYAxisFormattedValue('abc123', 'none')).toBe('NaN');
});
test('small decimals keep precision as-is', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'none')).toBe(
'0.0001',
);
expect(testFullPrecisionGetYAxisFormattedValue('-0.0001', 'none')).toBe(
'-0.0001',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
'0.000000001',
);
});
test('simple decimals preserved', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.1', 'none')).toBe('0.1');
expect(testFullPrecisionGetYAxisFormattedValue('0.2', 'none')).toBe('0.2');
expect(testFullPrecisionGetYAxisFormattedValue('0.3', 'none')).toBe('0.3');
expect(testFullPrecisionGetYAxisFormattedValue('1.0000000001', 'none')).toBe(
'1.0000000001',
);
});
});
describe('getYAxisFormattedValue - units (full precision legacy assertions)', () => {
test('ms', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'ms')).toBe('1.5 s');
expect(testFullPrecisionGetYAxisFormattedValue('500', 'ms')).toBe('500 ms');
expect(testFullPrecisionGetYAxisFormattedValue('60000', 'ms')).toBe('1 min');
expect(testFullPrecisionGetYAxisFormattedValue('295.429', 'ms')).toBe(
'295.429 ms',
);
expect(testFullPrecisionGetYAxisFormattedValue('4353.81', 'ms')).toBe(
'4.35381 s',
);
});
test('s', () => {
expect(testFullPrecisionGetYAxisFormattedValue('90', 's')).toBe('1.5 mins');
expect(testFullPrecisionGetYAxisFormattedValue('30', 's')).toBe('30 s');
expect(testFullPrecisionGetYAxisFormattedValue('3600', 's')).toBe('1 hour');
});
test('m', () => {
expect(testFullPrecisionGetYAxisFormattedValue('90', 'm')).toBe('1.5 hours');
expect(testFullPrecisionGetYAxisFormattedValue('30', 'm')).toBe('30 min');
expect(testFullPrecisionGetYAxisFormattedValue('1440', 'm')).toBe('1 day');
});
test('bytes', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'bytes')).toBe(
'1 KiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('512', 'bytes')).toBe('512 B');
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'bytes')).toBe(
'1.5 KiB',
);
});
test('mbytes', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'mbytes')).toBe(
'1 GiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('512', 'mbytes')).toBe(
'512 MiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'mbytes')).toBe(
'1.5 GiB',
);
});
test('kbytes', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'kbytes')).toBe(
'1 MiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('512', 'kbytes')).toBe(
'512 KiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'kbytes')).toBe(
'1.5 MiB',
);
});
test('short', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'short')).toBe('1 K');
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'short')).toBe(
'1.5 K',
);
expect(testFullPrecisionGetYAxisFormattedValue('999', 'short')).toBe('999');
expect(testFullPrecisionGetYAxisFormattedValue('1000000', 'short')).toBe(
'1 Mil',
);
expect(testFullPrecisionGetYAxisFormattedValue('1555600', 'short')).toBe(
'1.5556 Mil',
);
expect(testFullPrecisionGetYAxisFormattedValue('999999', 'short')).toBe(
'999.999 K',
);
expect(testFullPrecisionGetYAxisFormattedValue('1000000000', 'short')).toBe(
'1 Bil',
);
expect(testFullPrecisionGetYAxisFormattedValue('1500000000', 'short')).toBe(
'1.5 Bil',
);
expect(testFullPrecisionGetYAxisFormattedValue('999999999', 'short')).toBe(
'999.999999 Mil',
);
});
test('percent', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.15', 'percent')).toBe(
'0.15%',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.1234', 'percent')).toBe(
'0.1234%',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.123499', 'percent')).toBe(
'0.123499%',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.5', 'percent')).toBe(
'1.5%',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'percent')).toBe(
'0.0001%',
);
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000001', 'percent'),
).toBe('1e-9%');
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'percent'),
).toBe('0.000000250034%');
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'percent')).toBe(
'0.00000025%',
);
// Big precision, limiting the javascript precision (~16 digits)
expect(
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'percent'),
).toBe('1%');
expect(
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'percent'),
).toBe('1.005555555595958%');
});
test('ratio', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.5', 'ratio')).toBe(
'0.5 ratio',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.25', 'ratio')).toBe(
'1.25 ratio',
);
expect(testFullPrecisionGetYAxisFormattedValue('2.0', 'ratio')).toBe(
'2 ratio',
);
});
test('temperature units', () => {
expect(testFullPrecisionGetYAxisFormattedValue('25', 'celsius')).toBe(
'25 °C',
);
expect(testFullPrecisionGetYAxisFormattedValue('0', 'celsius')).toBe('0 °C');
expect(testFullPrecisionGetYAxisFormattedValue('-10', 'celsius')).toBe(
'-10 °C',
);
expect(testFullPrecisionGetYAxisFormattedValue('77', 'fahrenheit')).toBe(
'77 °F',
);
expect(testFullPrecisionGetYAxisFormattedValue('32', 'fahrenheit')).toBe(
'32 °F',
);
expect(testFullPrecisionGetYAxisFormattedValue('14', 'fahrenheit')).toBe(
'14 °F',
);
});
test('ms edge cases', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0', 'ms')).toBe('0 ms');
expect(testFullPrecisionGetYAxisFormattedValue('-1500', 'ms')).toBe('-1.5 s');
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'ms')).toBe('∞');
});
test('bytes edge cases', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0', 'bytes')).toBe('0 B');
expect(testFullPrecisionGetYAxisFormattedValue('-1024', 'bytes')).toBe(
'-1 KiB',
);
});
});
describe('getYAxisFormattedValue - precision option tests', () => {
test('precision 0 drops decimal part', () => {
expect(getYAxisFormattedValue('1.2345', 'none', 0)).toBe('1');
expect(getYAxisFormattedValue('0.9999', 'none', 0)).toBe('0');
expect(getYAxisFormattedValue('12345.6789', 'none', 0)).toBe('12345');
expect(getYAxisFormattedValue('0.0000123456', 'none', 0)).toBe('0');
expect(getYAxisFormattedValue('1000.000', 'none', 0)).toBe('1000');
expect(getYAxisFormattedValue('0.000000250034', 'none', 0)).toBe('0');
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 0)).toBe('1');
// with unit
expect(getYAxisFormattedValue('4353.81', 'ms', 0)).toBe('4 s');
});
test('precision 1,2,3,4 decimals', () => {
expect(getYAxisFormattedValue('1.2345', 'none', 1)).toBe('1.2');
expect(getYAxisFormattedValue('1.2345', 'none', 2)).toBe('1.23');
expect(getYAxisFormattedValue('1.2345', 'none', 3)).toBe('1.234');
expect(getYAxisFormattedValue('1.2345', 'none', 4)).toBe('1.2345');
expect(getYAxisFormattedValue('0.0000123456', 'none', 1)).toBe('0.00001');
expect(getYAxisFormattedValue('0.0000123456', 'none', 2)).toBe('0.000012');
expect(getYAxisFormattedValue('0.0000123456', 'none', 3)).toBe('0.0000123');
expect(getYAxisFormattedValue('0.0000123456', 'none', 4)).toBe('0.00001234');
expect(getYAxisFormattedValue('1000.000', 'none', 1)).toBe('1000');
expect(getYAxisFormattedValue('1000.000', 'none', 2)).toBe('1000');
expect(getYAxisFormattedValue('1000.000', 'none', 3)).toBe('1000');
expect(getYAxisFormattedValue('1000.000', 'none', 4)).toBe('1000');
expect(getYAxisFormattedValue('0.000000250034', 'none', 1)).toBe('0.0000002');
expect(getYAxisFormattedValue('0.000000250034', 'none', 2)).toBe(
'0.00000025',
); // leading zeros + 2 significant => same trimmed
expect(getYAxisFormattedValue('0.000000250034', 'none', 3)).toBe(
'0.00000025',
);
expect(getYAxisFormattedValue('0.000000250304', 'none', 4)).toBe(
'0.0000002503',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 1)).toBe(
'1.005',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 2)).toBe(
'1.0055',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 3)).toBe(
'1.00555',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 4)).toBe(
'1.005555',
);
// with unit
expect(getYAxisFormattedValue('4353.81', 'ms', 1)).toBe('4.4 s');
expect(getYAxisFormattedValue('4353.81', 'ms', 2)).toBe('4.35 s');
expect(getYAxisFormattedValue('4353.81', 'ms', 3)).toBe('4.354 s');
expect(getYAxisFormattedValue('4353.81', 'ms', 4)).toBe('4.3538 s');
// Percentages
expect(getYAxisFormattedValue('0.123456', 'percent', 2)).toBe('0.12%');
expect(getYAxisFormattedValue('0.123456', 'percent', 4)).toBe('0.1235%'); // approximation
});
test('precision full uses up to DEFAULT_SIGNIFICANT_DIGITS significant digits', () => {
expect(
getYAxisFormattedValue(
'0.00002625429914148441',
'none',
PrecisionOptionsEnum.FULL,
),
).toBe('0.000026254299141');
expect(
getYAxisFormattedValue(
'0.000026254299141484417',
's',
PrecisionOptionsEnum.FULL,
),
).toBe('26254299141484417000000 µs');
expect(
getYAxisFormattedValue('4353.81', 'ms', PrecisionOptionsEnum.FULL),
).toBe('4.35381 s');
expect(getYAxisFormattedValue('500', 'ms', PrecisionOptionsEnum.FULL)).toBe(
'500 ms',
);
});
});

View File

@@ -1,58 +1,158 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { formattedValueToString, getValueFormat } from '@grafana/data';
import * as Sentry from '@sentry/react';
import { isNaN } from 'lodash-es';
const DEFAULT_SIGNIFICANT_DIGITS = 15;
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
const MAX_DECIMALS = 15;
export enum PrecisionOptionsEnum {
ZERO = 0,
ONE = 1,
TWO = 2,
THREE = 3,
FOUR = 4,
FULL = 'full',
}
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;
/**
* Formats a number for display, preserving leading zeros after the decimal point
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
* It avoids scientific notation and removes unnecessary trailing zeros.
*
* @example
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
* formatDecimalWithLeadingZeros(5.0); // "5"
*
* @param value The number to format.
* @returns The formatted string.
*/
const formatDecimalWithLeadingZeros = (
value: number,
precision: PrecisionOption,
): string => {
if (value === 0) {
return '0';
}
// Use toLocaleString to get a full decimal representation without scientific notation.
const numStr = value.toLocaleString('en-US', {
useGrouping: false,
maximumFractionDigits: 20,
});
const [integerPart, decimalPart = ''] = numStr.split('.');
// If there's no decimal part, the integer part is the result.
if (!decimalPart) {
return integerPart;
}
// Find the index of the first non-zero digit in the decimal part.
const firstNonZeroIndex = decimalPart.search(/[^0]/);
// If the decimal part consists only of zeros, return just the integer part.
if (firstNonZeroIndex === -1) {
return integerPart;
}
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
const significantDigits =
precision === PrecisionOptionsEnum.FULL
? DEFAULT_SIGNIFICANT_DIGITS
: precision;
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
// If precision is 0, we drop the decimal part entirely.
if (precision === 0) {
return integerPart;
}
// Remove any trailing zeros from the result to keep it clean.
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
// Return the integer part, or the integer and decimal parts combined.
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
};
/**
* Formats a Y-axis value based on a given format string.
*
* @param value The string value from the axis.
* @param format The format identifier (e.g. 'none', 'ms', 'bytes', 'short').
* @returns A formatted string ready for display.
*/
export const getYAxisFormattedValue = (
value: string,
format: string,
precision: PrecisionOption = 2, // default precision requested
): string => {
let decimalPrecision: number | undefined;
const parsedValue = getValueFormat(format)(
parseFloat(value),
undefined,
undefined,
undefined,
);
try {
const decimalSplitted = parsedValue.text.split('.');
if (decimalSplitted.length === 1) {
decimalPrecision = 0;
} else {
const decimalDigits = decimalSplitted[1].split('');
decimalPrecision = decimalDigits.length;
let nonZeroCtr = 0;
for (let idx = 0; idx < decimalDigits.length; idx += 1) {
if (decimalDigits[idx] !== '0') {
nonZeroCtr += 1;
if (nonZeroCtr >= 2) {
decimalPrecision = idx + 1;
}
} else if (nonZeroCtr) {
decimalPrecision = idx;
break;
}
}
const numValue = parseFloat(value);
// Handle non-numeric or special values first.
if (isNaN(numValue)) return 'NaN';
if (numValue === Infinity) return '∞';
if (numValue === -Infinity) return '-∞';
const decimalPlaces = value.split('.')[1]?.length || undefined;
// Use custom formatter for the 'none' format honoring precision
if (format === 'none') {
return formatDecimalWithLeadingZeros(numValue, precision);
}
// For all other standard formats, delegate to grafana/data's built-in formatter.
const computeDecimals = (): number | undefined => {
if (precision === PrecisionOptionsEnum.FULL) {
return decimalPlaces && decimalPlaces >= DEFAULT_SIGNIFICANT_DIGITS
? decimalPlaces
: DEFAULT_SIGNIFICANT_DIGITS;
}
return precision;
};
return formattedValueToString(
getValueFormat(format)(
parseFloat(value),
decimalPrecision,
undefined,
undefined,
),
);
} catch (error) {
console.error(error);
}
return `${parseFloat(value)}`;
};
const fallbackFormat = (): string => {
if (precision === PrecisionOptionsEnum.FULL) return numValue.toString();
if (precision === 0) return Math.round(numValue).toString();
return precision !== undefined
? numValue
.toFixed(precision)
.replace(/(\.[0-9]*[1-9])0+$/, '$1') // trimming zeros
.replace(/\.$/, '')
: numValue.toString();
};
export const getToolTipValue = (value: string, format?: string): string => {
try {
return formattedValueToString(
getValueFormat(format)(parseFloat(value), undefined, undefined, undefined),
);
const formatter = getValueFormat(format);
const formattedValue = formatter(numValue, computeDecimals(), undefined);
if (formattedValue.text && formattedValue.text.includes('.')) {
formattedValue.text = formatDecimalWithLeadingZeros(
parseFloat(formattedValue.text),
precision,
);
}
return formattedValueToString(formattedValue);
} catch (error) {
console.error(error);
Sentry.captureEvent({
message: `Error applying formatter: ${
error instanceof Error ? error.message : 'Unknown error'
}`,
level: 'error',
});
return fallbackFormat();
}
return `${value}`;
};
export const getToolTipValue = (
value: string | number,
format?: string,
precision?: PrecisionOption,
): string =>
getYAxisFormattedValue(value?.toString(), format || 'none', precision);

View File

@@ -60,6 +60,14 @@ function Metrics({
setElement,
} = useMultiIntersectionObserver(hostWidgetInfo.length, { threshold: 0.1 });
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
}>({
scrollTop: 0,
scrollLeft: 0,
});
const queryPayloads = useMemo(
() =>
getHostQueryPayload(
@@ -147,6 +155,13 @@ function Metrics({
maxTimeScale: graphTimeIntervals[idx].end,
onDragSelect: (start, end) => onDragSelect(start, end, idx),
query: currentQuery,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: {
scrollTop: number;
scrollLeft: number;
}) => {
legendScrollPositionRef.current = position;
},
}),
),
[

View File

@@ -57,8 +57,8 @@ export const RawLogViewContainer = styled(Row)<{
transition: background-color 2s ease-in;`
: ''}
${({ $isCustomHighlighted, $isDarkMode, $logType }): string =>
getCustomHighlightBackground($isCustomHighlighted, $isDarkMode, $logType)}
${({ $isCustomHighlighted }): string =>
getCustomHighlightBackground($isCustomHighlighted)}
`;
export const InfoIconWrapper = styled(Info)`

View File

@@ -86,6 +86,7 @@ export const REACT_QUERY_KEY = {
SPAN_LOGS: 'SPAN_LOGS',
SPAN_BEFORE_LOGS: 'SPAN_BEFORE_LOGS',
SPAN_AFTER_LOGS: 'SPAN_AFTER_LOGS',
TRACE_ONLY_LOGS: 'TRACE_ONLY_LOGS',
// Routing Policies Query Keys
GET_ROUTING_POLICIES: 'GET_ROUTING_POLICIES',

View File

@@ -69,6 +69,13 @@ function StatusCodeBarCharts({
} = endPointStatusCodeLatencyBarChartsDataQuery;
const { startTime: minTime, endTime: maxTime } = timeRange;
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
}>({
scrollTop: 0,
scrollLeft: 0,
});
const graphRef = useRef<HTMLDivElement>(null);
const dimensions = useResizeObserver(graphRef);
@@ -207,6 +214,13 @@ function StatusCodeBarCharts({
onDragSelect,
colorMapping,
query: currentQuery,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: {
scrollTop: number;
scrollLeft: number;
}) => {
legendScrollPositionRef.current = position;
},
}),
[
minTime,

View File

@@ -171,3 +171,30 @@
}
}
}
.lightMode {
.empty-logs-search {
&__resources-card {
background: var(--bg-vanilla-100);
border: 1px solid var(--bg-vanilla-300);
}
&__resources-title {
color: var(--bg-ink-400);
}
&__resources-description,
&__description-list,
&__subtitle {
color: var(--bg-ink-300);
}
&__title {
color: var(--bg-ink-500);
}
&__clear-filters-btn {
border: 1px dashed var(--bg-vanilla-300);
color: var(--bg-ink-400);
}
}
}

View File

@@ -108,6 +108,13 @@ function ChartPreview({
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const [graphVisibility, setGraphVisibility] = useState<boolean[]>([]);
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
}>({
scrollTop: 0,
scrollLeft: 0,
});
const { currentQuery } = useQueryBuilder();
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
@@ -296,6 +303,13 @@ function ChartPreview({
setGraphsVisibilityStates: setGraphVisibility,
enhancedLegend: true,
legendPosition,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: {
scrollTop: number;
scrollLeft: number;
}) => {
legendScrollPositionRef.current = position;
},
}),
[
yAxisUnit,

View File

@@ -48,6 +48,7 @@ function GridTableComponent({
widgetId,
panelType,
queryRangeRequest,
decimalPrecision,
...props
}: GridTableComponentProps): JSX.Element {
const { t } = useTranslation(['valueGraph']);
@@ -87,10 +88,19 @@ function GridTableComponent({
const newValue = { ...val };
Object.keys(val).forEach((k) => {
const unit = getColumnUnit(k, columnUnits);
if (unit) {
// Apply formatting if:
// 1. Column has a unit defined, OR
// 2. decimalPrecision is specified (format all values)
const shouldFormat = unit || decimalPrecision !== undefined;
if (shouldFormat) {
// the check below takes care of not adding units for rows that have n/a or null values
if (val[k] !== 'n/a' && val[k] !== null) {
newValue[k] = getYAxisFormattedValue(String(val[k]), unit);
newValue[k] = getYAxisFormattedValue(
String(val[k]),
unit || 'none',
decimalPrecision,
);
} else if (val[k] === null) {
newValue[k] = 'n/a';
}
@@ -103,7 +113,7 @@ function GridTableComponent({
return mutateDataSource;
},
[columnUnits],
[columnUnits, decimalPrecision],
);
const dataSource = useMemo(() => applyColumnUnits(originalDataSource), [

View File

@@ -1,4 +1,5 @@
import { TableProps } from 'antd';
import { PrecisionOption } from 'components/Graph/yAxisConfig';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { LogsExplorerTableProps } from 'container/LogsExplorerTable/LogsExplorerTable.interfaces';
import {
@@ -15,6 +16,7 @@ export type GridTableComponentProps = {
query: Query;
thresholds?: ThresholdProps[];
columnUnits?: ColumnUnit;
decimalPrecision?: PrecisionOption;
tableProcessedDataRef?: React.MutableRefObject<RowData[]>;
sticky?: TableProps<RowData>['sticky'];
searchTerm?: string;

View File

@@ -99,7 +99,11 @@ function GridValueComponent({
rawValue={value}
value={
yAxisUnit
? getYAxisFormattedValue(String(value), yAxisUnit)
? getYAxisFormattedValue(
String(value),
yAxisUnit,
widget?.decimalPrecision,
)
: value.toString()
}
/>

View File

@@ -115,6 +115,13 @@ function EntityMetrics<T>({
const graphRef = useRef<HTMLDivElement>(null);
const dimensions = useResizeObserver(graphRef);
const { currentQuery } = useQueryBuilder();
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
}>({
scrollTop: 0,
scrollLeft: 0,
});
const chartData = useMemo(
() =>
@@ -184,6 +191,13 @@ function EntityMetrics<T>({
maxTimeScale: graphTimeIntervals[idx].end,
onDragSelect: (start, end) => onDragSelect(start, end, idx),
query: currentQuery,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: {
scrollTop: number;
scrollLeft: number;
}) => {
legendScrollPositionRef.current = position;
},
});
}),
[

View File

@@ -83,6 +83,13 @@ function NodeMetrics({
const isDarkMode = useIsDarkMode();
const graphRef = useRef<HTMLDivElement>(null);
const dimensions = useResizeObserver(graphRef);
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
}>({
scrollTop: 0,
scrollLeft: 0,
});
const chartData = useMemo(
() => queries.map(({ data }) => getUPlotChartData(data?.payload)),
@@ -109,6 +116,13 @@ function NodeMetrics({
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value),
timezone: timezone.value,
query: currentQuery,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: {
scrollTop: number;
scrollLeft: number;
}) => {
legendScrollPositionRef.current = position;
},
}),
),
[

View File

@@ -45,6 +45,14 @@ function PodMetrics({
};
}, [logLineTimestamp]);
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
}>({
scrollTop: 0,
scrollLeft: 0,
});
const { featureFlags } = useAppContext();
const dotMetricsEnabled =
featureFlags?.find((flag) => flag.name === FeatureKeys.DOT_METRICS_ENABLED)
@@ -91,6 +99,13 @@ function PodMetrics({
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value),
timezone: timezone.value,
query: currentQuery,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: {
scrollTop: number;
scrollLeft: number;
}) => {
legendScrollPositionRef.current = position;
},
}),
),
[

View File

@@ -158,7 +158,8 @@
}
}
.log-scale {
.log-scale,
.decimal-precision-selector {
margin-top: 16px;
display: flex;
justify-content: space-between;

View File

@@ -192,3 +192,17 @@ export const panelTypeVsContextLinks: {
[PANEL_TYPES.TRACE]: false,
[PANEL_TYPES.EMPTY_WIDGET]: false,
} as const;
export const panelTypeVsDecimalPrecision: {
[key in PANEL_TYPES]: boolean;
} = {
[PANEL_TYPES.TIME_SERIES]: true,
[PANEL_TYPES.VALUE]: true,
[PANEL_TYPES.TABLE]: true,
[PANEL_TYPES.LIST]: false,
[PANEL_TYPES.PIE]: true,
[PANEL_TYPES.BAR]: true,
[PANEL_TYPES.HISTOGRAM]: false,
[PANEL_TYPES.TRACE]: false,
[PANEL_TYPES.EMPTY_WIDGET]: false,
} as const;

View File

@@ -12,6 +12,10 @@ import {
Switch,
Typography,
} from 'antd';
import {
PrecisionOption,
PrecisionOptionsEnum,
} from 'components/Graph/yAxisConfig';
import TimePreference from 'components/TimePreferenceDropDown';
import { PANEL_TYPES, PanelDisplay } from 'constants/queryBuilder';
import GraphTypes, {
@@ -48,6 +52,7 @@ import {
panelTypeVsColumnUnitPreferences,
panelTypeVsContextLinks,
panelTypeVsCreateAlert,
panelTypeVsDecimalPrecision,
panelTypeVsFillSpan,
panelTypeVsLegendColors,
panelTypeVsLegendPosition,
@@ -95,6 +100,8 @@ function RightContainer({
selectedTime,
yAxisUnit,
setYAxisUnit,
decimalPrecision,
setDecimalPrecision,
setGraphHandler,
thresholds,
combineHistogram,
@@ -160,6 +167,7 @@ function RightContainer({
panelTypeVsColumnUnitPreferences[selectedGraph];
const allowContextLinks =
panelTypeVsContextLinks[selectedGraph] && enableDrillDown;
const allowDecimalPrecision = panelTypeVsDecimalPrecision[selectedGraph];
const { currentQuery } = useQueryBuilder();
@@ -356,6 +364,30 @@ function RightContainer({
}
/>
)}
{allowDecimalPrecision && (
<section className="decimal-precision-selector">
<Typography.Text className="typography">
Decimal Precision
</Typography.Text>
<Select
options={[
{ label: '0 decimals', value: PrecisionOptionsEnum.ZERO },
{ label: '1 decimal', value: PrecisionOptionsEnum.ONE },
{ label: '2 decimals', value: PrecisionOptionsEnum.TWO },
{ label: '3 decimals', value: PrecisionOptionsEnum.THREE },
{ label: '4 decimals', value: PrecisionOptionsEnum.FOUR },
{ label: 'Full Precision', value: PrecisionOptionsEnum.FULL },
]}
value={decimalPrecision}
style={{ width: '100%' }}
className="panel-type-select"
defaultValue={PrecisionOptionsEnum.TWO}
onChange={(val: PrecisionOption): void => setDecimalPrecision(val)}
/>
</section>
)}
{allowSoftMinMax && (
<section className="soft-min-max">
<section className="container">
@@ -553,6 +585,8 @@ interface RightContainerProps {
setBucketWidth: Dispatch<SetStateAction<number>>;
setBucketCount: Dispatch<SetStateAction<number>>;
setYAxisUnit: Dispatch<SetStateAction<string>>;
decimalPrecision: PrecisionOption;
setDecimalPrecision: Dispatch<SetStateAction<PrecisionOption>>;
setGraphHandler: (type: PANEL_TYPES) => void;
thresholds: ThresholdProps[];
setThresholds: Dispatch<SetStateAction<ThresholdProps[]>>;

View File

@@ -4,6 +4,10 @@ import './NewWidget.styles.scss';
import { WarningOutlined } from '@ant-design/icons';
import { Button, Flex, Modal, Space, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import {
PrecisionOption,
PrecisionOptionsEnum,
} from 'components/Graph/yAxisConfig';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { adjustQueryForV5 } from 'components/QueryBuilderV2/utils';
import { QueryParams } from 'constants/query';
@@ -178,6 +182,10 @@ function NewWidget({
selectedWidget?.yAxisUnit || 'none',
);
const [decimalPrecision, setDecimalPrecision] = useState<PrecisionOption>(
selectedWidget?.decimalPrecision ?? PrecisionOptionsEnum.TWO,
);
const [stackedBarChart, setStackedBarChart] = useState<boolean>(
selectedWidget?.stackedBarChart || false,
);
@@ -257,6 +265,7 @@ function NewWidget({
opacity,
nullZeroValues: selectedNullZeroValue,
yAxisUnit,
decimalPrecision,
thresholds,
softMin,
softMax,
@@ -290,6 +299,7 @@ function NewWidget({
thresholds,
title,
yAxisUnit,
decimalPrecision,
bucketWidth,
bucketCount,
combineHistogram,
@@ -493,6 +503,8 @@ function NewWidget({
title: selectedWidget?.title,
stackedBarChart: selectedWidget?.stackedBarChart || false,
yAxisUnit: selectedWidget?.yAxisUnit,
decimalPrecision:
selectedWidget?.decimalPrecision || PrecisionOptionsEnum.TWO,
panelTypes: graphType,
query: adjustedQueryForV5,
thresholds: selectedWidget?.thresholds,
@@ -522,6 +534,8 @@ function NewWidget({
title: selectedWidget?.title,
stackedBarChart: selectedWidget?.stackedBarChart || false,
yAxisUnit: selectedWidget?.yAxisUnit,
decimalPrecision:
selectedWidget?.decimalPrecision || PrecisionOptionsEnum.TWO,
panelTypes: graphType,
query: adjustedQueryForV5,
thresholds: selectedWidget?.thresholds,
@@ -836,6 +850,8 @@ function NewWidget({
setSelectedTime={setSelectedTime}
selectedTime={selectedTime}
setYAxisUnit={setYAxisUnit}
decimalPrecision={decimalPrecision}
setDecimalPrecision={setDecimalPrecision}
thresholds={thresholds}
setThresholds={setThresholds}
selectedWidget={selectedWidget}

View File

@@ -1,5 +1,6 @@
import { DefaultOptionType } from 'antd/es/select';
import { omitIdFromQuery } from 'components/ExplorerCard/utils';
import { PrecisionOptionsEnum } from 'components/Graph/yAxisConfig';
import {
initialQueryBuilderFormValuesMap,
PANEL_TYPES,
@@ -554,6 +555,7 @@ export const getDefaultWidgetData = (
softMax: null,
softMin: null,
stackedBarChart: name === PANEL_TYPES.BAR,
decimalPrecision: PrecisionOptionsEnum.TWO, // default decimal precision
selectedLogFields: defaultLogsSelectedColumns.map((field) => ({
...field,
type: field.fieldContext ?? '',

View File

@@ -26,6 +26,7 @@ function HistogramPanelWrapper({
enableDrillDown = false,
}: PanelWrapperProps): JSX.Element {
const graphRef = useRef<HTMLDivElement>(null);
const legendScrollPositionRef = useRef<number>(0);
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
const isDarkMode = useIsDarkMode();
const containerDimensions = useResizeObserver(graphRef);
@@ -129,6 +130,10 @@ function HistogramPanelWrapper({
onClickHandler: enableDrillDown
? clickHandlerWithContextMenu
: onClickHandler ?? _noop,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: number) => {
legendScrollPositionRef.current = position;
},
}),
[
containerDimensions,

View File

@@ -104,6 +104,7 @@ function PiePanelWrapper({
const formattedTotal = getYAxisFormattedValue(
totalValue.toString(),
widget?.yAxisUnit || 'none',
widget?.decimalPrecision,
);
// Extract numeric part and unit separately for styling
@@ -219,6 +220,7 @@ function PiePanelWrapper({
const displayValue = getYAxisFormattedValue(
arc.data.value,
widget?.yAxisUnit || 'none',
widget?.decimalPrecision,
);
// Determine text anchor based on position in the circle

View File

@@ -40,6 +40,7 @@ function TablePanelWrapper({
enableDrillDown={enableDrillDown}
panelType={widget.panelTypes}
queryRangeRequest={queryRangeRequest}
decimalPrecision={widget.decimalPrecision}
// eslint-disable-next-line react/jsx-props-no-spreading
{...GRID_TABLE_CONFIG}
/>

View File

@@ -249,6 +249,7 @@ function UplotPanelWrapper({
}) => {
legendScrollPositionRef.current = position;
},
decimalPrecision: widget.decimalPrecision,
}),
[
queryResponse.data?.payload,

View File

@@ -27,7 +27,7 @@ describe('Value panel wrappper tests', () => {
);
// selected y axis unit as miliseconds (ms)
expect(getByText('295')).toBeInTheDocument();
expect(getByText('295.43')).toBeInTheDocument();
expect(getByText('ms')).toBeInTheDocument();
});

View File

@@ -330,7 +330,7 @@ exports[`Table panel wrappper tests table should render fine with the query resp
<div
class="line-clamped-wrapper__text"
>
431 ms
431.25 ms
</div>
</div>
</div>
@@ -368,7 +368,7 @@ exports[`Table panel wrappper tests table should render fine with the query resp
<div
class="line-clamped-wrapper__text"
>
431 ms
431.25 ms
</div>
</div>
</div>
@@ -406,7 +406,7 @@ exports[`Table panel wrappper tests table should render fine with the query resp
<div
class="line-clamped-wrapper__text"
>
287 ms
287.11 ms
</div>
</div>
</div>
@@ -444,7 +444,7 @@ exports[`Table panel wrappper tests table should render fine with the query resp
<div
class="line-clamped-wrapper__text"
>
230 ms
230.02 ms
</div>
</div>
</div>
@@ -482,7 +482,7 @@ exports[`Table panel wrappper tests table should render fine with the query resp
<div
class="line-clamped-wrapper__text"
>
66.4 ms
66.37 ms
</div>
</div>
</div>

View File

@@ -51,7 +51,7 @@ exports[`Value panel wrappper tests should render tooltip when there are conflic
class="ant-typography value-graph-text css-dev-only-do-not-override-2i2tap"
style="color: Blue; font-size: 16px;"
>
295
295.43
</span>
<span
class="ant-typography value-graph-unit css-dev-only-do-not-override-2i2tap"

View File

@@ -0,0 +1,289 @@
import { getLegend } from 'lib/dashboard/getQueryResults';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { QueryData } from 'types/api/widgets/getQuery';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
import { getMockQuery, getMockQueryData } from './testUtils';
const mockQueryData = getMockQueryData();
const mockQuery = getMockQuery();
const MOCK_LABEL_NAME = 'mock-label-name';
describe('getLegend', () => {
it('should directly return the label name for clickhouse query', () => {
const legendsData = getLegend(
mockQueryData,
getMockQuery({
queryType: EQueryType.CLICKHOUSE,
}),
MOCK_LABEL_NAME,
);
expect(legendsData).toBeDefined();
expect(legendsData).toBe(MOCK_LABEL_NAME);
});
it('should directly return the label name for promql query', () => {
const legendsData = getLegend(
mockQueryData,
getMockQuery({
queryType: EQueryType.PROM,
}),
MOCK_LABEL_NAME,
);
expect(legendsData).toBeDefined();
expect(legendsData).toBe(MOCK_LABEL_NAME);
});
it('should return alias when single builder query with single aggregation and alias (logs)', () => {
const payloadQuery = getMockQuery({
...mockQuery,
builder: {
...mockQuery.builder,
queryData: [
{
...mockQuery.builder.queryData[0],
queryName: mockQueryData.queryName,
dataSource: DataSource.LOGS,
aggregations: [{ expression: "sum(bytes) as 'alias_sum'" }],
},
],
},
});
const legendsData = getLegend(mockQueryData, payloadQuery, MOCK_LABEL_NAME);
expect(legendsData).toBe('alias_sum');
});
it('should return legend when single builder query with no alias but legend set (builder)', () => {
const payloadQuery = getMockQuery({
...mockQuery,
builder: {
...mockQuery.builder,
queryData: [
{
...mockQuery.builder.queryData[0],
queryName: mockQueryData.queryName,
dataSource: DataSource.LOGS,
aggregations: [{ expression: 'count()' }],
legend: 'custom-legend',
},
],
},
});
const legendsData = getLegend(mockQueryData, payloadQuery, MOCK_LABEL_NAME);
expect(legendsData).toBe('custom-legend');
});
it('should return label when grouped by with single aggregation (builder)', () => {
const payloadQuery = getMockQuery({
...mockQuery,
builder: {
...mockQuery.builder,
queryData: [
{
...mockQuery.builder.queryData[0],
queryName: mockQueryData.queryName,
dataSource: DataSource.LOGS,
aggregations: [{ expression: 'count()' }],
groupBy: [
{ key: 'serviceName', dataType: DataTypes.String, type: 'resource' },
],
},
],
},
});
const legendsData = getLegend(mockQueryData, payloadQuery, MOCK_LABEL_NAME);
expect(legendsData).toBe(MOCK_LABEL_NAME);
});
it("should return '<alias>-<label>' when grouped by with multiple aggregations (builder)", () => {
const payloadQuery = getMockQuery({
...mockQuery,
builder: {
...mockQuery.builder,
queryData: [
{
...mockQuery.builder.queryData[0],
queryName: mockQueryData.queryName,
dataSource: DataSource.LOGS,
aggregations: [
{ expression: "sum(bytes) as 'sum_b'" },
{ expression: 'count()' },
],
groupBy: [
{ key: 'serviceName', dataType: DataTypes.String, type: 'resource' },
],
},
],
},
});
const legendsData = getLegend(mockQueryData, payloadQuery, MOCK_LABEL_NAME);
expect(legendsData).toBe(`sum_b-${MOCK_LABEL_NAME}`);
});
it('should fallback to label or query name when no alias/expression', () => {
const legendsData = getLegend(mockQueryData, mockQuery, MOCK_LABEL_NAME);
expect(legendsData).toBe(MOCK_LABEL_NAME);
});
it('should return alias when single query with multiple aggregations and no group by', () => {
const payloadQuery = getMockQuery({
...mockQuery,
builder: {
...mockQuery.builder,
queryData: [
{
...mockQuery.builder.queryData[0],
queryName: mockQueryData.queryName,
dataSource: DataSource.LOGS,
aggregations: [
{ expression: "sum(bytes) as 'total'" },
{ expression: 'count()' },
],
groupBy: [],
},
],
},
});
const legendsData = getLegend(mockQueryData, payloadQuery, MOCK_LABEL_NAME);
expect(legendsData).toBe('total');
});
it("should return '<alias>-<label>' when multiple queries with group by", () => {
const payloadQuery = getMockQuery({
...mockQuery,
builder: {
...mockQuery.builder,
queryData: [
{
...mockQuery.builder.queryData[0],
queryName: mockQueryData.queryName,
dataSource: DataSource.LOGS,
aggregations: [
{ expression: "sum(bytes) as 'sum_b'" },
{ expression: 'count()' },
],
groupBy: [
{ key: 'serviceName', dataType: DataTypes.String, type: 'resource' },
],
},
{
...mockQuery.builder.queryData[0],
queryName: 'B',
dataSource: DataSource.LOGS,
aggregations: [{ expression: 'count()' }],
},
],
},
});
const legendsData = getLegend(mockQueryData, payloadQuery, MOCK_LABEL_NAME);
expect(legendsData).toBe(`sum_b-${MOCK_LABEL_NAME}`);
});
it('should return label according to the index of the query', () => {
const payloadQuery = getMockQuery({
...mockQuery,
builder: {
...mockQuery.builder,
queryData: [
{
...mockQuery.builder.queryData[0],
queryName: mockQueryData.queryName,
dataSource: DataSource.LOGS,
aggregations: [
{ expression: "sum(bytes) as 'sum_a'" },
{ expression: 'count()' },
],
groupBy: [
{ key: 'serviceName', dataType: DataTypes.String, type: 'resource' },
],
},
{
...mockQuery.builder.queryData[0],
queryName: 'B',
dataSource: DataSource.LOGS,
aggregations: [{ expression: 'count()' }],
},
],
},
});
const legendsData = getLegend(
{
...mockQueryData,
metaData: {
...mockQueryData.metaData,
index: 1,
},
} as QueryData,
payloadQuery,
MOCK_LABEL_NAME,
);
expect(legendsData).toBe(`count()-${MOCK_LABEL_NAME}`);
});
it('should handle trace operator with multiple queries and group by', () => {
const payloadQuery = getMockQuery({
...mockQuery,
builder: {
...mockQuery.builder,
queryData: [
{
...mockQuery.builder.queryData[0],
queryName: 'A',
dataSource: DataSource.TRACES,
aggregations: [{ expression: 'count()' }],
},
],
queryTraceOperator: [
{
...mockQuery.builder.queryData[0],
queryName: mockQueryData.queryName,
dataSource: DataSource.TRACES,
aggregations: [
{ expression: "count() as 'total_count' avg(duration_nano)" },
],
groupBy: [
{ key: 'service.name', dataType: DataTypes.String, type: 'resource' },
],
expression: 'A',
},
],
},
});
const legendsData = getLegend(mockQueryData, payloadQuery, MOCK_LABEL_NAME);
expect(legendsData).toBe(`total_count-${MOCK_LABEL_NAME}`);
});
it('should handle single trace operator query with group by', () => {
const payloadQuery = getMockQuery({
...mockQuery,
builder: {
...mockQuery.builder,
queryData: [],
queryTraceOperator: [
{
...mockQuery.builder.queryData[0],
queryName: mockQueryData.queryName,
dataSource: DataSource.TRACES,
aggregations: [{ expression: "count() as 'total' avg(duration_nano)" }],
groupBy: [
{ key: 'service.name', dataType: DataTypes.String, type: 'resource' },
],
expression: 'A && B',
},
],
},
});
const legendsData = getLegend(mockQueryData, payloadQuery, MOCK_LABEL_NAME);
expect(legendsData).toBe(`total-${MOCK_LABEL_NAME}`);
});
});

View File

@@ -0,0 +1,118 @@
import { getUplotHistogramChartOptions } from 'lib/uPlotLib/getUplotHistogramChartOptions';
import uPlot from 'uplot';
// Mock dependencies
jest.mock('lib/uPlotLib/plugins/tooltipPlugin', () => jest.fn(() => ({})));
jest.mock('lib/uPlotLib/plugins/onClickPlugin', () => jest.fn(() => ({})));
const mockApiResponse = {
data: {
result: [
{
metric: { __name__: 'test_metric' },
queryName: 'test_query',
values: [[1640995200, '10'] as [number, string]],
},
],
resultType: 'time_series',
newResult: {
data: {
result: [],
resultType: 'time_series',
},
},
},
};
const mockDimensions = { width: 800, height: 400 };
const mockHistogramData: uPlot.AlignedData = [[1640995200], [10]];
const TEST_HISTOGRAM_ID = 'test-histogram';
describe('Histogram Chart Options Legend Scroll Position', () => {
let originalRequestAnimationFrame: typeof global.requestAnimationFrame;
beforeEach(() => {
jest.clearAllMocks();
originalRequestAnimationFrame = global.requestAnimationFrame;
});
afterEach(() => {
global.requestAnimationFrame = originalRequestAnimationFrame;
});
it('should set up scroll position tracking in histogram chart ready hook', () => {
const mockSetScrollPosition = jest.fn();
const options = getUplotHistogramChartOptions({
id: TEST_HISTOGRAM_ID,
dimensions: mockDimensions,
isDarkMode: false,
apiResponse: mockApiResponse,
histogramData: mockHistogramData,
legendScrollPosition: 0,
setLegendScrollPosition: mockSetScrollPosition,
});
// Create mock chart with legend element
const mockChart = ({
root: document.createElement('div'),
} as unknown) as uPlot;
const legend = document.createElement('div');
legend.className = 'u-legend';
mockChart.root.appendChild(legend);
const addEventListenerSpy = jest.spyOn(legend, 'addEventListener');
// Execute ready hook
if (options.hooks?.ready) {
options.hooks.ready.forEach((hook) => hook?.(mockChart));
}
// Verify that scroll event listener was added and cleanup function was stored
expect(addEventListenerSpy).toHaveBeenCalledWith(
'scroll',
expect.any(Function),
);
expect(
(mockChart as uPlot & { _legendScrollCleanup?: () => void })
._legendScrollCleanup,
).toBeDefined();
});
it('should restore histogram chart scroll position when provided', () => {
const mockScrollPosition = 50;
const mockSetScrollPosition = jest.fn();
const options = getUplotHistogramChartOptions({
id: TEST_HISTOGRAM_ID,
dimensions: mockDimensions,
isDarkMode: false,
apiResponse: mockApiResponse,
histogramData: mockHistogramData,
legendScrollPosition: mockScrollPosition,
setLegendScrollPosition: mockSetScrollPosition,
});
// Create mock chart with legend element
const mockChart = ({
root: document.createElement('div'),
} as unknown) as uPlot;
const legend = document.createElement('div');
legend.className = 'u-legend';
legend.scrollTop = 0;
mockChart.root.appendChild(legend);
// Mock requestAnimationFrame
const mockRequestAnimationFrame = jest.fn((callback) => callback());
global.requestAnimationFrame = mockRequestAnimationFrame;
// Execute ready hook
if (options.hooks?.ready) {
options.hooks.ready.forEach((hook) => hook?.(mockChart));
}
// Verify that requestAnimationFrame was called and scroll position was restored
expect(mockRequestAnimationFrame).toHaveBeenCalledWith(expect.any(Function));
expect(legend.scrollTop).toBe(mockScrollPosition);
});
});

View File

@@ -0,0 +1,36 @@
import { initialQueryState } from 'constants/queryBuilder';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { QueryData } from 'types/api/widgets/getQuery';
import { EQueryType } from 'types/common/dashboard';
export function getMockQueryData(): QueryData {
return {
lowerBoundSeries: [],
upperBoundSeries: [],
predictedSeries: [],
anomalyScores: [],
metric: {},
queryName: 'test-query-name',
legend: 'test-legend',
values: [],
quantity: [],
unit: 'test-unit',
table: {
rows: [],
columns: [],
},
metaData: {
alias: 'test-alias',
index: 0,
queryName: 'test-query-name',
},
};
}
export function getMockQuery(overrides?: Partial<Query>): Query {
return {
...initialQueryState,
queryType: EQueryType.QUERY_BUILDER,
...overrides,
};
}

View File

@@ -12,7 +12,9 @@ import {
PANEL_TYPES,
} from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch';
import LogsError from 'container/LogsError/LogsError';
import { EmptyLogsListConfig } from 'container/LogsExplorerList/utils';
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
import { FontSize } from 'container/OptionsMenu/types';
import { getOperatorValue } from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
@@ -30,8 +32,6 @@ import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import { v4 as uuid } from 'uuid';
import { useSpanContextLogs } from './useSpanContextLogs';
interface SpanLogsProps {
traceId: string;
spanId: string;
@@ -39,29 +39,29 @@ interface SpanLogsProps {
startTime: number;
endTime: number;
};
logs: ILog[];
isLoading: boolean;
isError: boolean;
isFetching: boolean;
isLogSpanRelated: (logId: string) => boolean;
handleExplorerPageRedirect: () => void;
emptyStateConfig?: EmptyLogsListConfig;
}
function SpanLogs({
traceId,
spanId,
timeRange,
logs,
isLoading,
isError,
isFetching,
isLogSpanRelated,
handleExplorerPageRedirect,
emptyStateConfig,
}: SpanLogsProps): JSX.Element {
const { updateAllQueriesOperators } = useQueryBuilder();
const {
logs,
isLoading,
isError,
isFetching,
isLogSpanRelated,
} = useSpanContextLogs({
traceId,
spanId,
timeRange,
});
// Create trace_id and span_id filters for logs explorer navigation
const createLogsFilter = useCallback(
(targetSpanId: string): TagFilter => {
@@ -236,9 +236,7 @@ function SpanLogs({
<img src="/Icons/no-data.svg" alt="no-data" className="no-data-img" />
<Typography.Text className="no-data-text-1">
No logs found for selected span.
<span className="no-data-text-2">
Try viewing logs for the current trace.
</span>
<span className="no-data-text-2">View logs for the current trace.</span>
</Typography.Text>
</section>
<section className="action-section">
@@ -249,24 +247,45 @@ function SpanLogs({
onClick={handleExplorerPageRedirect}
size="md"
>
Log Explorer
View Logs
</Button>
</section>
</div>
);
const renderSpanLogsContent = (): JSX.Element | null => {
if (isLoading || isFetching) {
return <LogsLoading />;
}
if (isError) {
return <LogsError />;
}
if (logs.length === 0) {
if (emptyStateConfig) {
return (
<EmptyLogsSearch
dataSource={DataSource.LOGS}
panelType="LIST"
customMessage={emptyStateConfig}
/>
);
}
return renderNoLogsFound();
}
return renderContent;
};
return (
<div className={cx('span-logs', { 'span-logs-empty': logs.length === 0 })}>
{(isLoading || isFetching) && <LogsLoading />}
{!isLoading &&
!isFetching &&
!isError &&
logs.length === 0 &&
renderNoLogsFound()}
{isError && !isLoading && !isFetching && <LogsError />}
{!isLoading && !isFetching && !isError && logs.length > 0 && renderContent}
{renderSpanLogsContent()}
</div>
);
}
SpanLogs.defaultProps = {
emptyStateConfig: undefined,
};
export default SpanLogs;

View File

@@ -0,0 +1,214 @@
import { getEmptyLogsListConfig } from 'container/LogsExplorerList/utils';
import { server } from 'mocks-server/server';
import { render, screen, userEvent } from 'tests/test-utils';
import SpanLogs from '../SpanLogs';
// Mock external dependencies
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
useQueryBuilder: (): any => ({
updateAllQueriesOperators: jest.fn().mockReturnValue({
builder: {
queryData: [
{
dataSource: 'logs',
queryName: 'A',
aggregateOperator: 'noop',
filter: { expression: "trace_id = 'test-trace-id'" },
expression: 'A',
disabled: false,
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
groupBy: [],
limit: null,
having: [],
},
],
queryFormulas: [],
},
queryType: 'builder',
}),
}),
}));
// Mock window.open
const mockWindowOpen = jest.fn();
Object.defineProperty(window, 'open', {
writable: true,
value: mockWindowOpen,
});
// Mock Virtuoso to avoid complex virtualization
jest.mock('react-virtuoso', () => ({
Virtuoso: jest.fn(({ data, itemContent }: any) => (
<div data-testid="virtuoso">
{data?.map((item: any, index: number) => (
<div key={item.id || index} data-testid={`log-item-${item.id}`}>
{itemContent(index, item)}
</div>
))}
</div>
)),
}));
// Mock RawLogView component
jest.mock(
'components/Logs/RawLogView',
() =>
function MockRawLogView({
data,
onLogClick,
isHighlighted,
helpTooltip,
}: any): JSX.Element {
return (
<button
type="button"
data-testid={`raw-log-${data.id}`}
className={isHighlighted ? 'log-highlighted' : 'log-context'}
title={helpTooltip}
onClick={(e): void => onLogClick?.(data, e)}
>
<div>{data.body}</div>
<div>{data.timestamp}</div>
</button>
);
},
);
// Mock PreferenceContextProvider
jest.mock('providers/preferences/context/PreferenceContextProvider', () => ({
PreferenceContextProvider: ({ children }: any): JSX.Element => (
<div>{children}</div>
),
}));
// Mock OverlayScrollbar
jest.mock('components/OverlayScrollbar/OverlayScrollbar', () => ({
default: ({ children }: any): JSX.Element => (
<div data-testid="overlay-scrollbar">{children}</div>
),
}));
// Mock LogsLoading component
jest.mock('container/LogsLoading/LogsLoading', () => ({
LogsLoading: function MockLogsLoading(): JSX.Element {
return <div data-testid="logs-loading">Loading logs...</div>;
},
}));
// Mock LogsError component
jest.mock(
'container/LogsError/LogsError',
() =>
function MockLogsError(): JSX.Element {
return <div data-testid="logs-error">Error loading logs</div>;
},
);
// Don't mock EmptyLogsSearch - test the actual component behavior
const TEST_TRACE_ID = 'test-trace-id';
const TEST_SPAN_ID = 'test-span-id';
const defaultProps = {
traceId: TEST_TRACE_ID,
spanId: TEST_SPAN_ID,
timeRange: {
startTime: 1640995200000,
endTime: 1640995260000,
},
logs: [],
isLoading: false,
isError: false,
isFetching: false,
isLogSpanRelated: jest.fn().mockReturnValue(false),
handleExplorerPageRedirect: jest.fn(),
};
describe('SpanLogs', () => {
beforeEach(() => {
jest.clearAllMocks();
mockWindowOpen.mockClear();
});
afterEach(() => {
server.resetHandlers();
});
it('should show simple empty state when emptyStateConfig is not provided', () => {
// eslint-disable-next-line react/jsx-props-no-spreading
render(<SpanLogs {...defaultProps} />);
// Should show simple empty state (no emptyStateConfig provided)
expect(
screen.getByText('No logs found for selected span.'),
).toBeInTheDocument();
expect(
screen.getByText('View logs for the current trace.'),
).toBeInTheDocument();
expect(
screen.getByRole('button', {
name: /view logs/i,
}),
).toBeInTheDocument();
// Should NOT show enhanced empty state
expect(screen.queryByTestId('empty-logs-search')).not.toBeInTheDocument();
expect(screen.queryByTestId('documentation-links')).not.toBeInTheDocument();
});
it('should show enhanced empty state when entire trace has no logs', () => {
render(
<SpanLogs
// eslint-disable-next-line react/jsx-props-no-spreading
{...defaultProps}
emptyStateConfig={getEmptyLogsListConfig(jest.fn())}
/>,
);
// Should show enhanced empty state with custom message
expect(screen.getByText('No logs found for this trace.')).toBeInTheDocument();
expect(screen.getByText('This could be because :')).toBeInTheDocument();
// Should show description list
expect(
screen.getByText('Logs are not linked to Traces.'),
).toBeInTheDocument();
expect(
screen.getByText('Logs are not being sent to SigNoz.'),
).toBeInTheDocument();
expect(
screen.getByText('No logs are associated with this particular trace/span.'),
).toBeInTheDocument();
// Should show documentation links
expect(screen.getByText('RESOURCES')).toBeInTheDocument();
expect(screen.getByText('Sending logs to SigNoz')).toBeInTheDocument();
expect(screen.getByText('Correlate traces and logs')).toBeInTheDocument();
// Should NOT show simple empty state
expect(
screen.queryByText('No logs found for selected span.'),
).not.toBeInTheDocument();
});
it('should call handleExplorerPageRedirect when Log Explorer button is clicked', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const mockHandleExplorerPageRedirect = jest.fn();
render(
<SpanLogs
// eslint-disable-next-line react/jsx-props-no-spreading
{...defaultProps}
handleExplorerPageRedirect={mockHandleExplorerPageRedirect}
/>,
);
const logExplorerButton = screen.getByRole('button', {
name: /view logs/i,
});
await user.click(logExplorerButton);
expect(mockHandleExplorerPageRedirect).toHaveBeenCalledTimes(1);
});
});

View File

@@ -85,7 +85,7 @@ export const getTraceOnlyFilters = (traceId: string): TagFilter => ({
type: '',
key: 'trace_id',
},
op: 'in',
op: '=',
value: traceId,
},
],

View File

@@ -11,7 +11,7 @@ import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Filter } from 'types/api/v5/queryRange';
import { v4 as uuid } from 'uuid';
import { getSpanLogsQueryPayload } from './constants';
import { getSpanLogsQueryPayload, getTraceOnlyFilters } from './constants';
interface UseSpanContextLogsProps {
traceId: string;
@@ -20,6 +20,7 @@ interface UseSpanContextLogsProps {
startTime: number;
endTime: number;
};
isDrawerOpen?: boolean;
}
interface UseSpanContextLogsReturn {
@@ -29,6 +30,7 @@ interface UseSpanContextLogsReturn {
isFetching: boolean;
spanLogIds: Set<string>;
isLogSpanRelated: (logId: string) => boolean;
hasTraceIdLogs: boolean;
}
const traceIdKey = {
@@ -110,6 +112,7 @@ export const useSpanContextLogs = ({
traceId,
spanId,
timeRange,
isDrawerOpen = true,
}: UseSpanContextLogsProps): UseSpanContextLogsReturn => {
const [allLogs, setAllLogs] = useState<ILog[]>([]);
const [spanLogIds, setSpanLogIds] = useState<Set<string>>(new Set());
@@ -264,6 +267,43 @@ export const useSpanContextLogs = ({
setAllLogs(combined);
}, [beforeLogs, spanLogs, afterLogs]);
// Phase 4: Check for trace_id-only logs when span has no logs
// This helps differentiate between "no logs for span" vs "no logs for trace"
const traceOnlyFilter = useMemo(() => {
if (spanLogs.length > 0) return null;
const filters = getTraceOnlyFilters(traceId);
return convertFiltersToExpression(filters);
}, [traceId, spanLogs.length]);
const traceOnlyQueryPayload = useMemo(() => {
if (!traceOnlyFilter) return null;
return getSpanLogsQueryPayload(
timeRange.startTime,
timeRange.endTime,
traceOnlyFilter,
);
}, [timeRange.startTime, timeRange.endTime, traceOnlyFilter]);
const { data: traceOnlyData } = useQuery({
queryKey: [
REACT_QUERY_KEY.TRACE_ONLY_LOGS,
traceId,
timeRange.startTime,
timeRange.endTime,
],
queryFn: () =>
GetMetricQueryRange(traceOnlyQueryPayload as any, ENTITY_VERSION_V5),
enabled: isDrawerOpen && !!traceOnlyQueryPayload && spanLogs.length === 0,
staleTime: FIVE_MINUTES_IN_MS,
});
const hasTraceIdLogs = useMemo(() => {
if (spanLogs.length > 0) return true;
return !!(
traceOnlyData?.payload?.data?.newResult?.data?.result?.[0]?.list?.length || 0
);
}, [spanLogs.length, traceOnlyData]);
// Helper function to check if a log belongs to the span
const isLogSpanRelated = useCallback(
(logId: string): boolean => spanLogIds.has(logId),
@@ -277,5 +317,6 @@ export const useSpanContextLogs = ({
isFetching: isSpanFetching || isBeforeFetching || isAfterFetching,
spanLogIds,
isLogSpanRelated,
hasTraceIdLogs,
};
};

View File

@@ -37,7 +37,8 @@
align-items: center;
justify-content: space-between;
.open-in-explorer {
width: 30px;
display: flex;
align-items: center;
height: 30px;
border-radius: 2px;
border: 1px solid var(--bg-slate-400);

View File

@@ -11,39 +11,20 @@ import {
initialQueryState,
} from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { getEmptyLogsListConfig } from 'container/LogsExplorerList/utils';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { Compass, X } from 'lucide-react';
import { useCallback, useMemo, useState } from 'react';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
import { Span } from 'types/api/trace/getTraceV2';
import { LogsAggregatorOperator } from 'types/common/queryBuilder';
import { RelatedSignalsViews } from '../constants';
import SpanLogs from '../SpanLogs/SpanLogs';
import { useSpanContextLogs } from '../SpanLogs/useSpanContextLogs';
const FIVE_MINUTES_IN_MS = 5 * 60 * 1000;
interface AppliedFiltersProps {
filters: TagFilterItem[];
}
function AppliedFilters({ filters }: AppliedFiltersProps): JSX.Element {
return (
<div className="span-related-signals-drawer__applied-filters">
<div className="span-related-signals-drawer__filters-list">
{filters.map((filter) => (
<div key={filter.id} className="span-related-signals-drawer__filter-tag">
<Typography.Text>
{filter.key?.key}={filter.value}
</Typography.Text>
</div>
))}
</div>
</div>
);
}
interface SpanRelatedSignalsProps {
selectedSpan: Span;
traceStartTime: number;
@@ -66,6 +47,23 @@ function SpanRelatedSignals({
);
const isDarkMode = useIsDarkMode();
const {
logs,
isLoading,
isError,
isFetching,
isLogSpanRelated,
hasTraceIdLogs,
} = useSpanContextLogs({
traceId: selectedSpan.traceId,
spanId: selectedSpan.spanId,
timeRange: {
startTime: traceStartTime - FIVE_MINUTES_IN_MS,
endTime: traceEndTime + FIVE_MINUTES_IN_MS,
},
isDrawerOpen: isOpen,
});
const handleTabChange = useCallback((e: RadioChangeEvent): void => {
setSelectedView(e.target.value);
}, []);
@@ -75,25 +73,6 @@ function SpanRelatedSignals({
onClose();
}, [onClose]);
const appliedFilters = useMemo(
(): TagFilterItem[] => [
{
id: 'trace-id-filter',
key: {
key: 'trace_id',
id: 'trace-id-key',
dataType: 'string' as const,
isColumn: true,
type: '',
isJSON: false,
} as BaseAutocompleteData,
op: '=',
value: selectedSpan.traceId,
},
],
[selectedSpan.traceId],
);
const handleExplorerPageRedirect = useCallback((): void => {
const startTimeMs = traceStartTime - FIVE_MINUTES_IN_MS;
const endTimeMs = traceEndTime + FIVE_MINUTES_IN_MS;
@@ -146,6 +125,14 @@ function SpanRelatedSignals({
);
}, [selectedSpan.traceId, traceStartTime, traceEndTime]);
const emptyStateConfig = useMemo(
() => ({
...getEmptyLogsListConfig(() => {}),
showClearFiltersButton: false,
}),
[],
);
return (
<Drawer
width="50%"
@@ -210,23 +197,28 @@ function SpanRelatedSignals({
icon={<Compass size={18} />}
className="open-in-explorer"
onClick={handleExplorerPageRedirect}
/>
>
Open in Logs Explorer
</Button>
)}
</div>
{selectedView === RelatedSignalsViews.LOGS && (
<>
<AppliedFilters filters={appliedFilters} />
<SpanLogs
traceId={selectedSpan.traceId}
spanId={selectedSpan.spanId}
timeRange={{
startTime: traceStartTime - FIVE_MINUTES_IN_MS,
endTime: traceEndTime + FIVE_MINUTES_IN_MS,
}}
handleExplorerPageRedirect={handleExplorerPageRedirect}
/>
</>
<SpanLogs
traceId={selectedSpan.traceId}
spanId={selectedSpan.spanId}
timeRange={{
startTime: traceStartTime - FIVE_MINUTES_IN_MS,
endTime: traceEndTime + FIVE_MINUTES_IN_MS,
}}
logs={logs}
isLoading={isLoading}
isError={isError}
isFetching={isFetching}
isLogSpanRelated={isLogSpanRelated}
handleExplorerPageRedirect={handleExplorerPageRedirect}
emptyStateConfig={!hasTraceIdLogs ? emptyStateConfig : undefined}
/>
)}
</div>
)}

View File

@@ -16,6 +16,7 @@ import {
expectedAfterFilterExpression,
expectedBeforeFilterExpression,
expectedSpanFilterExpression,
expectedTraceOnlyFilterExpression,
mockAfterLogsResponse,
mockBeforeLogsResponse,
mockEmptyLogsResponse,
@@ -217,19 +218,22 @@ const renderSpanDetailsDrawer = (props = {}): void => {
};
describe('SpanDetailsDrawer', () => {
let apiCallHistory: any[] = [];
let apiCallHistory: any = {};
beforeEach(() => {
jest.clearAllMocks();
apiCallHistory = [];
apiCallHistory = {
span_logs: null,
before_logs: null,
after_logs: null,
trace_only_logs: null,
};
mockSafeNavigate.mockClear();
mockWindowOpen.mockClear();
mockUpdateAllQueriesOperators.mockClear();
// Setup API call tracking
(GetMetricQueryRange as jest.Mock).mockImplementation((query) => {
apiCallHistory.push(query);
// Determine response based on v5 filter expressions
const filterExpression =
query.query?.builder?.queryData?.[0]?.filter?.expression;
@@ -238,14 +242,23 @@ describe('SpanDetailsDrawer', () => {
// Check for span logs query (contains both trace_id and span_id)
if (filterExpression.includes('span_id')) {
apiCallHistory.span_logs = query;
return Promise.resolve(mockSpanLogsResponse);
}
// Check for before logs query (contains trace_id and id <)
if (filterExpression.includes('id <')) {
apiCallHistory.before_logs = query;
return Promise.resolve(mockBeforeLogsResponse);
}
// Check for after logs query (contains trace_id and id >)
if (filterExpression.includes('id >')) {
apiCallHistory.after_logs = query;
return Promise.resolve(mockAfterLogsResponse);
}
// Check for trace only logs query (contains trace_id)
if (filterExpression.includes('trace_id =')) {
apiCallHistory.trace_only_logs = query;
return Promise.resolve(mockAfterLogsResponse);
}
@@ -287,7 +300,7 @@ describe('SpanDetailsDrawer', () => {
});
});
it('should make three API queries when logs tab is opened', async () => {
it('should make 4 API queries when logs tab is opened', async () => {
renderSpanDetailsDrawer();
// Click on logs tab to trigger API calls
@@ -296,11 +309,16 @@ describe('SpanDetailsDrawer', () => {
// Wait for all API calls to complete
await waitFor(() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
expect(GetMetricQueryRange).toHaveBeenCalledTimes(4);
});
// Verify the three distinct queries were made
const [spanQuery, beforeQuery, afterQuery] = apiCallHistory;
// Verify the four distinct queries were made
const {
span_logs: spanQuery,
before_logs: beforeQuery,
after_logs: afterQuery,
trace_only_logs: traceOnlyQuery,
} = apiCallHistory;
// 1. Span logs query (trace_id + span_id)
expect(spanQuery.query.builder.queryData[0].filter.expression).toBe(
@@ -316,6 +334,11 @@ describe('SpanDetailsDrawer', () => {
expect(afterQuery.query.builder.queryData[0].filter.expression).toBe(
expectedAfterFilterExpression,
);
// 4. Trace only logs query (trace_id)
expect(traceOnlyQuery.query.builder.queryData[0].filter.expression).toBe(
expectedTraceOnlyFilterExpression,
);
});
it('should use correct timestamp ordering for different query types', async () => {
@@ -327,10 +350,14 @@ describe('SpanDetailsDrawer', () => {
// Wait for all API calls to complete
await waitFor(() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
expect(GetMetricQueryRange).toHaveBeenCalledTimes(4);
});
const [spanQuery, beforeQuery, afterQuery] = apiCallHistory;
const {
span_logs: spanQuery,
before_logs: beforeQuery,
after_logs: afterQuery,
} = apiCallHistory;
// Verify ordering: span query should use 'desc' (default)
expect(spanQuery.query.builder.queryData[0].orderBy[0].order).toBe('desc');
@@ -463,24 +490,6 @@ describe('SpanDetailsDrawer', () => {
expect(mockSafeNavigate).not.toHaveBeenCalled();
});
it('should handle empty logs state', async () => {
// Mock empty response for all queries
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockEmptyLogsResponse);
renderSpanDetailsDrawer();
// Open logs view
const logsButton = screen.getByRole('radio', { name: /logs/i });
fireEvent.click(logsButton);
// Wait and verify empty state is shown
await waitFor(() => {
expect(
screen.getByText(/No logs found for selected span/),
).toBeInTheDocument();
});
});
it('should display span logs as highlighted and context logs as regular', async () => {
renderSpanDetailsDrawer();
@@ -490,7 +499,7 @@ describe('SpanDetailsDrawer', () => {
// Wait for all API calls to complete first
await waitFor(() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
expect(GetMetricQueryRange).toHaveBeenCalledTimes(4);
});
// Wait for all logs to be rendered - both span logs and context logs

View File

@@ -12,7 +12,7 @@ export const mockSpan: Span = {
traceId: TEST_TRACE_ID,
name: TEST_SERVICE,
serviceName: TEST_SERVICE,
timestamp: 1640995200000000, // 2022-01-01 00:00:00 in microseconds
timestamp: 1640995200000, // 2022-01-01 00:00:00 in milliseconds
durationNano: 1000000000, // 1 second in nanoseconds
spanKind: 'server',
statusCodeString: 'STATUS_CODE_OK',
@@ -207,3 +207,4 @@ export const mockEmptyLogsResponse = {
export const expectedSpanFilterExpression = `trace_id = '${TEST_TRACE_ID}' AND span_id = '${TEST_SPAN_ID}'`;
export const expectedBeforeFilterExpression = `trace_id = '${TEST_TRACE_ID}' AND id < 'span-log-1'`;
export const expectedAfterFilterExpression = `trace_id = '${TEST_TRACE_ID}' AND id > 'span-log-2'`;
export const expectedTraceOnlyFilterExpression = `trace_id = '${TEST_TRACE_ID}'`;

View File

@@ -81,6 +81,13 @@ function TimeSeriesView({
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const [graphVisibility, setGraphVisibility] = useState<boolean[]>([]);
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
}>({
scrollTop: 0,
scrollLeft: 0,
});
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
@@ -203,6 +210,13 @@ function TimeSeriesView({
setGraphsVisibilityStates: setGraphVisibility,
enhancedLegend: true,
legendPosition: LegendPosition.BOTTOM,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: {
scrollTop: number;
scrollLeft: number;
}) => {
legendScrollPositionRef.current = position;
},
});
return (

View File

@@ -21,7 +21,7 @@ import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
import { isEmpty } from 'lodash-es';
import { SuccessResponse, SuccessResponseV2, Warning } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import { prepareQueryRangePayload } from './prepareQueryRangePayload';
@@ -76,14 +76,13 @@ const getQueryDataSource = (
const getLegendForSingleAggregation = (
queryData: QueryData,
payloadQuery: Query,
allQueries: IBuilderQuery[],
aggregationAlias: string,
aggregationExpression: string,
labelName: string,
singleAggregation: boolean,
) => {
// Find the corresponding query in payloadQuery
const queryItem = payloadQuery.builder?.queryData.find(
const queryItem = allQueries.find(
(query) => query.queryName === queryData.queryName,
);
@@ -108,14 +107,13 @@ const getLegendForSingleAggregation = (
const getLegendForMultipleAggregations = (
queryData: QueryData,
payloadQuery: Query,
allQueries: IBuilderQuery[],
aggregationAlias: string,
aggregationExpression: string,
labelName: string,
singleAggregation: boolean,
) => {
// Find the corresponding query in payloadQuery
const queryItem = payloadQuery.builder?.queryData.find(
const queryItem = allQueries.find(
(query) => query.queryName === queryData.queryName,
);
@@ -148,15 +146,18 @@ export const getLegend = (
return labelName;
}
const aggregationPerQuery = payloadQuery?.builder?.queryData.reduce(
(acc, query) => {
if (query.queryName === queryData.queryName) {
acc[query.queryName] = createAggregation(query);
}
return acc;
},
{},
);
// Combine queryData and queryTraceOperator
const allQueries = [
...(payloadQuery?.builder?.queryData || []),
...(payloadQuery?.builder?.queryTraceOperator || []),
];
const aggregationPerQuery = allQueries.reduce((acc, query) => {
if (query.queryName === queryData.queryName) {
acc[query.queryName] = createAggregation(query);
}
return acc;
}, {});
const metaData = queryData?.metaData;
const aggregation =
@@ -165,8 +166,8 @@ export const getLegend = (
const aggregationAlias = aggregation?.alias || '';
const aggregationExpression = aggregation?.expression || '';
// Check if there's only one total query (queryData)
const singleQuery = payloadQuery?.builder?.queryData?.length === 1;
// Check if there's only one total query
const singleQuery = allQueries.length === 1;
const singleAggregation =
aggregationPerQuery?.[metaData?.queryName]?.length === 1;
@@ -174,7 +175,7 @@ export const getLegend = (
return singleQuery
? getLegendForSingleAggregation(
queryData,
payloadQuery,
allQueries,
aggregationAlias,
aggregationExpression,
labelName,
@@ -182,7 +183,7 @@ export const getLegend = (
)
: getLegendForMultipleAggregations(
queryData,
payloadQuery,
allQueries,
aggregationAlias,
aggregationExpression,
labelName,

View File

@@ -47,6 +47,7 @@ export interface GetUPlotChartOptions {
panelType?: PANEL_TYPES;
onDragSelect?: (startTime: number, endTime: number) => void;
yAxisUnit?: string;
decimalPrecision?: PrecisionOption;
onClickHandler?: OnClickPluginOpts['onClick'];
graphsVisibilityStates?: boolean[];
setGraphsVisibilityStates?: FullViewProps['setGraphsVisibilityStates'];
@@ -192,6 +193,7 @@ export const getUPlotChartOptions = ({
apiResponse,
onDragSelect,
yAxisUnit,
decimalPrecision,
minTimeScale,
maxTimeScale,
onClickHandler = _noop,
@@ -359,6 +361,7 @@ export const getUPlotChartOptions = ({
colorMapping,
customTooltipElement,
query: query || currentQuery,
decimalPrecision,
}),
onClickPlugin({
onClick: onClickHandler,

View File

@@ -17,6 +17,11 @@ import { drawStyles } from './utils/constants';
import { generateColor } from './utils/generateColor';
import getAxes from './utils/getAxes';
// Extended uPlot interface with custom properties
interface ExtendedUPlot extends uPlot {
_legendScrollCleanup?: () => void;
}
type GetUplotHistogramChartOptionsProps = {
id?: string;
apiResponse?: MetricRangePayloadProps;
@@ -30,6 +35,8 @@ type GetUplotHistogramChartOptionsProps = {
setGraphsVisibilityStates?: Dispatch<SetStateAction<boolean[]>>;
mergeAllQueries?: boolean;
onClickHandler?: OnClickPluginOpts['onClick'];
legendScrollPosition?: number;
setLegendScrollPosition?: (position: number) => void;
};
type GetHistogramSeriesProps = {
@@ -124,6 +131,8 @@ export const getUplotHistogramChartOptions = ({
mergeAllQueries,
onClickHandler = _noop,
panelType,
legendScrollPosition,
setLegendScrollPosition,
}: GetUplotHistogramChartOptionsProps): uPlot.Options =>
({
id,
@@ -179,33 +188,94 @@ export const getUplotHistogramChartOptions = ({
(self): void => {
const legend = self.root.querySelector('.u-legend');
if (legend) {
const legendElement = legend as HTMLElement;
// Enhanced legend scroll position preservation
if (setLegendScrollPosition && typeof legendScrollPosition === 'number') {
const handleScroll = (): void => {
setLegendScrollPosition(legendElement.scrollTop);
};
// Add scroll event listener to save position
legendElement.addEventListener('scroll', handleScroll);
// Restore scroll position
requestAnimationFrame(() => {
legendElement.scrollTop = legendScrollPosition;
});
// Store cleanup function
const extSelf = self as ExtendedUPlot;
extSelf._legendScrollCleanup = (): void => {
legendElement.removeEventListener('scroll', handleScroll);
};
}
const seriesEls = legend.querySelectorAll('.u-series');
const seriesArray = Array.from(seriesEls);
seriesArray.forEach((seriesEl, index) => {
seriesEl.addEventListener('click', () => {
if (graphsVisibilityStates) {
setGraphsVisibilityStates?.((prev) => {
const newGraphVisibilityStates = [...prev];
if (
newGraphVisibilityStates[index + 1] &&
newGraphVisibilityStates.every((value, i) =>
i === index + 1 ? value : !value,
)
) {
newGraphVisibilityStates.fill(true);
} else {
newGraphVisibilityStates.fill(false);
newGraphVisibilityStates[index + 1] = true;
// Add click handlers for marker and text separately
const thElement = seriesEl.querySelector('th');
if (thElement) {
const currentMarker = thElement.querySelector('.u-marker');
const textElement =
thElement.querySelector('.legend-text') || thElement;
// Marker click handler - checkbox behavior (toggle individual series)
if (currentMarker) {
currentMarker.addEventListener('click', (e) => {
e.stopPropagation?.(); // Prevent event bubbling to text handler
if (graphsVisibilityStates) {
setGraphsVisibilityStates?.((prev) => {
const newGraphVisibilityStates = [...prev];
// Toggle the specific series visibility (checkbox behavior)
newGraphVisibilityStates[index + 1] = !newGraphVisibilityStates[
index + 1
];
saveLegendEntriesToLocalStorage({
options: self,
graphVisibilityState: newGraphVisibilityStates,
name: id || '',
});
return newGraphVisibilityStates;
});
}
saveLegendEntriesToLocalStorage({
options: self,
graphVisibilityState: newGraphVisibilityStates,
name: id || '',
});
return newGraphVisibilityStates;
});
}
});
// Text click handler - show only/show all behavior (existing behavior)
textElement.addEventListener('click', (e) => {
e.stopPropagation?.(); // Prevent event bubbling
if (graphsVisibilityStates) {
setGraphsVisibilityStates?.((prev) => {
const newGraphVisibilityStates = [...prev];
// Show only this series / show all behavior
if (
newGraphVisibilityStates[index + 1] &&
newGraphVisibilityStates.every((value, i) =>
i === index + 1 ? value : !value,
)
) {
// If only this series is visible, show all
newGraphVisibilityStates.fill(true);
} else {
// Otherwise, show only this series
newGraphVisibilityStates.fill(false);
newGraphVisibilityStates[index + 1] = true;
}
saveLegendEntriesToLocalStorage({
options: self,
graphVisibilityState: newGraphVisibilityStates,
name: id || '',
});
return newGraphVisibilityStates;
});
}
});
}
});
}
},

View File

@@ -1,4 +1,4 @@
import { getToolTipValue } from 'components/Graph/yAxisConfig';
import { getToolTipValue, PrecisionOption } from 'components/Graph/yAxisConfig';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { themeColors } from 'constants/theme';
import dayjs from 'dayjs';
@@ -44,6 +44,7 @@ const generateTooltipContent = (
idx: number,
isDarkMode: boolean,
yAxisUnit?: string,
decimalPrecision?: PrecisionOption,
series?: uPlot.Options['series'],
isBillingUsageGraphs?: boolean,
isHistogramGraphs?: boolean,
@@ -127,7 +128,7 @@ const generateTooltipContent = (
let tooltipItemLabel = label;
if (Number.isFinite(value)) {
const tooltipValue = getToolTipValue(value, yAxisUnit);
const tooltipValue = getToolTipValue(value, yAxisUnit, decimalPrecision);
const dataIngestedFormated = getToolTipValue(dataIngested);
if (
duplicatedLegendLabels[label] ||
@@ -239,6 +240,7 @@ type ToolTipPluginProps = {
isBillingUsageGraphs?: boolean;
isHistogramGraphs?: boolean;
isMergedSeries?: boolean;
decimalPrecision?: PrecisionOption;
stackBarChart?: boolean;
isDarkMode: boolean;
customTooltipElement?: HTMLDivElement;
@@ -259,6 +261,7 @@ const tooltipPlugin = ({
timezone,
colorMapping,
query,
decimalPrecision,
}: // eslint-disable-next-line sonarjs/cognitive-complexity
ToolTipPluginProps): any => {
let over: HTMLElement;
@@ -320,6 +323,7 @@ ToolTipPluginProps): any => {
idx,
isDarkMode,
yAxisUnit,
decimalPrecision,
u.series,
isBillingUsageGraphs,
isHistogramGraphs,

View File

@@ -1,6 +1,6 @@
/* eslint-disable @typescript-eslint/ban-ts-comment */
// @ts-nocheck
import { getToolTipValue } from 'components/Graph/yAxisConfig';
import { getToolTipValue, PrecisionOption } from 'components/Graph/yAxisConfig';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { uPlotXAxisValuesFormat } from './constants';
@@ -18,11 +18,13 @@ const getAxes = ({
yAxisUnit,
panelType,
isLogScale,
decimalPrecision,
}: {
isDarkMode: boolean;
yAxisUnit?: string;
panelType?: PANEL_TYPES;
isLogScale?: boolean;
decimalPrecision?: PrecisionOption;
// eslint-disable-next-line sonarjs/cognitive-complexity
}): any => [
{
@@ -61,7 +63,7 @@ const getAxes = ({
if (v === null || v === undefined || Number.isNaN(v)) {
return '';
}
const value = getToolTipValue(v.toString(), yAxisUnit);
const value = getToolTipValue(v.toString(), yAxisUnit, decimalPrecision);
return `${value}`;
}),
gap: 5,

View File

@@ -1,3 +1,4 @@
import { PrecisionOption } from 'components/Graph/yAxisConfig';
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
@@ -113,6 +114,7 @@ export interface IBaseWidget {
timePreferance: timePreferenceType;
stepSize?: number;
yAxisUnit?: string;
decimalPrecision?: PrecisionOption; // number of decimals or 'full precision'
stackedBarChart?: boolean;
bucketCount?: number;
bucketWidth?: number;

View File

@@ -49,12 +49,8 @@ export const getHightLightedLogBackground = (
return `background-color: ${orange[3]};`;
};
export const getCustomHighlightBackground = (
isHighlighted = false,
isDarkMode = true,
$logType: string,
): string => {
export const getCustomHighlightBackground = (isHighlighted = false): string => {
if (!isHighlighted) return '';
return getActiveLogBackground(true, isDarkMode, $logType);
return `background-color: ${Color.BG_ROBIN_500}20;`;
};

View File

@@ -6369,13 +6369,13 @@ axe-core@^4.6.2:
resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.7.0.tgz"
integrity sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ==
axios@1.8.2:
version "1.8.2"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.8.2.tgz#fabe06e241dfe83071d4edfbcaa7b1c3a40f7979"
integrity sha512-ls4GYBm5aig9vWx8AWDSGLpnpDQRtWAfrjU+EuytuODrFBkqesN2RkOQCBzrA1RQNHw1SmRMSDDDSwzNAYQ6Rg==
axios@1.12.0:
version "1.12.0"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.12.0.tgz#11248459be05a5ee493485628fa0e4323d0abfc3"
integrity sha512-oXTDccv8PcfjZmPGlWsPSwtOJCZ/b6W5jAMCNcfwJbCzDckwG0jrYJFaWH1yvivfCXjVzV/SPDEhMB3Q+DSurg==
dependencies:
follow-redirects "^1.15.6"
form-data "^4.0.0"
form-data "^4.0.4"
proxy-from-env "^1.1.0"
axobject-query@^3.1.1:
@@ -9677,7 +9677,7 @@ force-graph@1:
kapsule "^1.14"
lodash-es "4"
form-data@4.0.4, form-data@^3.0.0, form-data@^4.0.0:
form-data@4.0.4, form-data@^3.0.0, form-data@^4.0.4:
version "4.0.4"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.4.tgz#784cdcce0669a9d68e94d11ac4eea98088edd2c4"
integrity sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==

View File

@@ -2,7 +2,6 @@ package alertmanagerbatcher
import (
"context"
"io"
"log/slog"
"testing"
@@ -11,7 +10,7 @@ import (
)
func TestBatcherWithOneAlertAndDefaultConfigs(t *testing.T) {
batcher := New(slog.New(slog.NewTextHandler(io.Discard, nil)), NewConfig())
batcher := New(slog.New(slog.DiscardHandler), NewConfig())
_ = batcher.Start(context.Background())
batcher.Add(context.Background(), &alertmanagertypes.PostableAlert{Alert: alertmanagertypes.AlertModel{
@@ -25,7 +24,7 @@ func TestBatcherWithOneAlertAndDefaultConfigs(t *testing.T) {
}
func TestBatcherWithBatchSize(t *testing.T) {
batcher := New(slog.New(slog.NewTextHandler(io.Discard, nil)), Config{Size: 2, Capacity: 4})
batcher := New(slog.New(slog.DiscardHandler), Config{Size: 2, Capacity: 4})
_ = batcher.Start(context.Background())
var alerts alertmanagertypes.PostableAlerts
@@ -45,7 +44,7 @@ func TestBatcherWithBatchSize(t *testing.T) {
}
func TestBatcherWithCClosed(t *testing.T) {
batcher := New(slog.New(slog.NewTextHandler(io.Discard, nil)), Config{Size: 2, Capacity: 4})
batcher := New(slog.New(slog.DiscardHandler), Config{Size: 2, Capacity: 4})
_ = batcher.Start(context.Background())
var alerts alertmanagertypes.PostableAlerts

View File

@@ -2,14 +2,14 @@ package alertmanagerserver
import (
"context"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes/alertmanagertypestest"
"github.com/prometheus/alertmanager/dispatch"
"io"
"log/slog"
"net/http"
"testing"
"time"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes/alertmanagertypestest"
"github.com/prometheus/alertmanager/dispatch"
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager"
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfroutingstore/nfroutingstoretest"
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/rulebasednotification"
@@ -89,7 +89,7 @@ func TestEndToEndAlertManagerFlow(t *testing.T) {
srvCfg := NewConfig()
stateStore := alertmanagertypestest.NewStateStore()
registry := prometheus.NewRegistry()
logger := slog.New(slog.NewTextHandler(io.Discard, nil))
logger := slog.New(slog.DiscardHandler)
server, err := New(context.Background(), logger, registry, srvCfg, orgID, stateStore, notificationManager)
require.NoError(t, err)
amConfig, err := alertmanagertypes.NewDefaultConfig(srvCfg.Global, srvCfg.Route, orgID)

View File

@@ -3,7 +3,6 @@ package alertmanagerserver
import (
"bytes"
"context"
"io"
"log/slog"
"net"
"net/http"
@@ -26,7 +25,7 @@ import (
func TestServerSetConfigAndStop(t *testing.T) {
notificationManager := nfmanagertest.NewMock()
server, err := New(context.Background(), slog.New(slog.NewTextHandler(io.Discard, nil)), prometheus.NewRegistry(), NewConfig(), "1", alertmanagertypestest.NewStateStore(), notificationManager)
server, err := New(context.Background(), slog.New(slog.DiscardHandler), prometheus.NewRegistry(), NewConfig(), "1", alertmanagertypestest.NewStateStore(), notificationManager)
require.NoError(t, err)
amConfig, err := alertmanagertypes.NewDefaultConfig(alertmanagertypes.GlobalConfig{}, alertmanagertypes.RouteConfig{GroupInterval: 1 * time.Minute, RepeatInterval: 1 * time.Minute, GroupWait: 1 * time.Minute}, "1")
@@ -38,7 +37,7 @@ func TestServerSetConfigAndStop(t *testing.T) {
func TestServerTestReceiverTypeWebhook(t *testing.T) {
notificationManager := nfmanagertest.NewMock()
server, err := New(context.Background(), slog.New(slog.NewTextHandler(io.Discard, nil)), prometheus.NewRegistry(), NewConfig(), "1", alertmanagertypestest.NewStateStore(), notificationManager)
server, err := New(context.Background(), slog.New(slog.DiscardHandler), prometheus.NewRegistry(), NewConfig(), "1", alertmanagertypestest.NewStateStore(), notificationManager)
require.NoError(t, err)
amConfig, err := alertmanagertypes.NewDefaultConfig(alertmanagertypes.GlobalConfig{}, alertmanagertypes.RouteConfig{GroupInterval: 1 * time.Minute, RepeatInterval: 1 * time.Minute, GroupWait: 1 * time.Minute}, "1")
@@ -86,7 +85,7 @@ func TestServerPutAlerts(t *testing.T) {
srvCfg := NewConfig()
srvCfg.Route.GroupInterval = 1 * time.Second
notificationManager := nfmanagertest.NewMock()
server, err := New(context.Background(), slog.New(slog.NewTextHandler(io.Discard, nil)), prometheus.NewRegistry(), srvCfg, "1", stateStore, notificationManager)
server, err := New(context.Background(), slog.New(slog.DiscardHandler), prometheus.NewRegistry(), srvCfg, "1", stateStore, notificationManager)
require.NoError(t, err)
amConfig, err := alertmanagertypes.NewDefaultConfig(srvCfg.Global, srvCfg.Route, "1")
@@ -134,7 +133,7 @@ func TestServerTestAlert(t *testing.T) {
srvCfg := NewConfig()
srvCfg.Route.GroupInterval = 1 * time.Second
notificationManager := nfmanagertest.NewMock()
server, err := New(context.Background(), slog.New(slog.NewTextHandler(io.Discard, nil)), prometheus.NewRegistry(), srvCfg, "1", stateStore, notificationManager)
server, err := New(context.Background(), slog.New(slog.DiscardHandler), prometheus.NewRegistry(), srvCfg, "1", stateStore, notificationManager)
require.NoError(t, err)
amConfig, err := alertmanagertypes.NewDefaultConfig(srvCfg.Global, srvCfg.Route, "1")
@@ -239,7 +238,7 @@ func TestServerTestAlertContinuesOnFailure(t *testing.T) {
srvCfg := NewConfig()
srvCfg.Route.GroupInterval = 1 * time.Second
notificationManager := nfmanagertest.NewMock()
server, err := New(context.Background(), slog.New(slog.NewTextHandler(io.Discard, nil)), prometheus.NewRegistry(), srvCfg, "1", stateStore, notificationManager)
server, err := New(context.Background(), slog.New(slog.DiscardHandler), prometheus.NewRegistry(), srvCfg, "1", stateStore, notificationManager)
require.NoError(t, err)
amConfig, err := alertmanagertypes.NewDefaultConfig(srvCfg.Global, srvCfg.Route, "1")

View File

@@ -2,7 +2,6 @@ package factory
import (
"context"
"io"
"log/slog"
"sync"
"testing"
@@ -33,7 +32,7 @@ func TestRegistryWith2Services(t *testing.T) {
s1 := newTestService(t)
s2 := newTestService(t)
registry, err := NewRegistry(slog.New(slog.NewTextHandler(io.Discard, nil)), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
registry, err := NewRegistry(slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
require.NoError(t, err)
ctx, cancel := context.WithCancel(context.Background())
@@ -54,7 +53,7 @@ func TestRegistryWith2ServicesWithoutWait(t *testing.T) {
s1 := newTestService(t)
s2 := newTestService(t)
registry, err := NewRegistry(slog.New(slog.NewTextHandler(io.Discard, nil)), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
registry, err := NewRegistry(slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
require.NoError(t, err)
ctx := context.Background()

View File

@@ -1,7 +1,6 @@
package middleware
import (
"io"
"log/slog"
"net"
"net/http"
@@ -17,7 +16,7 @@ func TestTimeout(t *testing.T) {
writeTimeout := 6 * time.Second
defaultTimeout := 2 * time.Second
maxTimeout := 4 * time.Second
m := NewTimeout(slog.New(slog.NewTextHandler(io.Discard, nil)), []string{"/excluded"}, defaultTimeout, maxTimeout)
m := NewTimeout(slog.New(slog.DiscardHandler), []string{"/excluded"}, defaultTimeout, maxTimeout)
listener, err := net.Listen("tcp", "localhost:0")
require.NoError(t, err)

View File

@@ -1,7 +1,6 @@
package instrumentationtest
import (
"io"
"log/slog"
"github.com/SigNoz/signoz/pkg/factory"
@@ -21,7 +20,7 @@ type noopInstrumentation struct {
func New() instrumentation.Instrumentation {
return &noopInstrumentation{
logger: slog.New(slog.NewTextHandler(io.Discard, nil)),
logger: slog.New(slog.DiscardHandler),
meterProvider: noopmetric.NewMeterProvider(),
tracerProvider: nooptrace.NewTracerProvider(),
}

View File

@@ -0,0 +1,58 @@
package implspanpercentile
import (
"encoding/json"
"net/http"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/spanpercentiletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type handler struct {
module spanpercentile.Module
}
func NewHandler(module spanpercentile.Module) spanpercentile.Handler {
return &handler{
module: module,
}
}
func (h *handler) GetSpanPercentileDetails(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
spanPercentileRequest, err := parseSpanPercentileRequestBody(r)
if err != nil {
render.Error(w, err)
return
}
result, err := h.module.GetSpanPercentile(r.Context(), valuer.MustNewUUID(claims.OrgID), valuer.MustNewUUID(claims.UserID), spanPercentileRequest)
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, result)
}
func parseSpanPercentileRequestBody(r *http.Request) (*spanpercentiletypes.SpanPercentileRequest, error) {
req := new(spanpercentiletypes.SpanPercentileRequest)
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
return nil, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "cannot parse the request body: %v", err)
}
if err := req.Validate(); err != nil {
return nil, err
}
return req, nil
}

View File

@@ -0,0 +1,126 @@
package implspanpercentile
import (
"context"
"fmt"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile"
"github.com/SigNoz/signoz/pkg/querier"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/spanpercentiletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type module struct {
querier querier.Querier
}
func NewModule(
querier querier.Querier,
_ factory.ProviderSettings,
) spanpercentile.Module {
return &module{
querier: querier,
}
}
func (m *module) GetSpanPercentile(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, req *spanpercentiletypes.SpanPercentileRequest) (*spanpercentiletypes.SpanPercentileResponse, error) {
queryRangeRequest, err := buildSpanPercentileQuery(ctx, req)
if err != nil {
return nil, err
}
if err := queryRangeRequest.Validate(); err != nil {
return nil, err
}
result, err := m.querier.QueryRange(ctx, orgID, queryRangeRequest)
if err != nil {
return nil, err
}
return transformToSpanPercentileResponse(result)
}
func transformToSpanPercentileResponse(queryResult *qbtypes.QueryRangeResponse) (*spanpercentiletypes.SpanPercentileResponse, error) {
if len(queryResult.Data.Results) == 0 {
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "no data returned from query")
}
scalarData, ok := queryResult.Data.Results[0].(*qbtypes.ScalarData)
if !ok {
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "unexpected result type")
}
if len(scalarData.Data) == 0 {
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "no rows returned from query")
}
row := scalarData.Data[0]
columnMap := make(map[string]int)
for i, col := range scalarData.Columns {
columnMap[col.Name] = i
}
p50Idx, ok := columnMap["__result_0"]
if !ok {
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "missing __result_0 column")
}
p90Idx, ok := columnMap["__result_1"]
if !ok {
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "missing __result_1 column")
}
p99Idx, ok := columnMap["__result_2"]
if !ok {
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "missing __result_2 column")
}
positionIdx, ok := columnMap["__result_3"]
if !ok {
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "missing __result_3 column")
}
p50, err := toFloat64(row[p50Idx])
if err != nil {
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "no spans found matching the specified criteria")
}
p90, err := toFloat64(row[p90Idx])
if err != nil {
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "no spans found matching the specified criteria")
}
p99, err := toFloat64(row[p99Idx])
if err != nil {
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "no spans found matching the specified criteria")
}
position, err := toFloat64(row[positionIdx])
if err != nil {
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "no spans found matching the specified criteria")
}
description := fmt.Sprintf("faster than %.1f%% of spans", position)
if position < 50 {
description = fmt.Sprintf("slower than %.1f%% of spans", 100-position)
}
return &spanpercentiletypes.SpanPercentileResponse{
Percentiles: spanpercentiletypes.PercentileStats{
P50: p50,
P90: p90,
P99: p99,
},
Position: spanpercentiletypes.PercentilePosition{
Percentile: position,
Description: description,
},
}, nil
}
func toFloat64(val any) (float64, error) {
result, ok := val.(float64)
if !ok {
return 0, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot convert %T to float64", val)
}
return result, nil
}

View File

@@ -0,0 +1,118 @@
package implspanpercentile
import (
"context"
"fmt"
"sort"
"strings"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/spanpercentiletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func buildSpanPercentileQuery(
_ context.Context,
req *spanpercentiletypes.SpanPercentileRequest,
) (*qbtypes.QueryRangeRequest, error) {
if err := req.Validate(); err != nil {
return nil, err
}
var attrKeys []string
for key := range req.ResourceAttributes {
attrKeys = append(attrKeys, key)
}
sort.Strings(attrKeys)
filterConditions := []string{
fmt.Sprintf("service.name = '%s'", strings.ReplaceAll(req.ServiceName, "'", `\'`)),
fmt.Sprintf("name = '%s'", strings.ReplaceAll(req.Name, "'", `\'`)),
}
for _, key := range attrKeys {
value := req.ResourceAttributes[key]
filterConditions = append(filterConditions,
fmt.Sprintf("%s = '%s'", key, strings.ReplaceAll(value, "'", `\'`)))
}
filterExpr := strings.Join(filterConditions, " AND ")
groupByKeys := []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
}
for _, key := range attrKeys {
groupByKeys = append(groupByKeys, qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: key,
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
})
}
query := qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Name: "span_percentile",
Signal: telemetrytypes.SignalTraces,
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "p50(duration_nano)",
Alias: "p50_duration_nano",
},
{
Expression: "p90(duration_nano)",
Alias: "p90_duration_nano",
},
{
Expression: "p99(duration_nano)",
Alias: "p99_duration_nano",
},
{
Expression: fmt.Sprintf(
"(100.0 * countIf(duration_nano <= %d)) / count()",
req.DurationNano,
),
Alias: "percentile_position",
},
},
GroupBy: groupByKeys,
Filter: &qbtypes.Filter{
Expression: filterExpr,
},
}
queryEnvelope := qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: query,
}
return &qbtypes.QueryRangeRequest{
SchemaVersion: "v5",
Start: req.Start,
End: req.End,
RequestType: qbtypes.RequestTypeScalar,
CompositeQuery: qbtypes.CompositeQuery{
Queries: []qbtypes.QueryEnvelope{queryEnvelope},
},
FormatOptions: &qbtypes.FormatOptions{
FormatTableResultForUI: true,
},
}, nil
}

View File

@@ -0,0 +1,149 @@
package implspanpercentile
import (
"context"
"fmt"
"sort"
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/spanpercentiletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/require"
)
func TestBuildSpanPercentileQuery(t *testing.T) {
req := &spanpercentiletypes.SpanPercentileRequest{
DurationNano: 100000,
Name: "test",
ServiceName: "test-service",
ResourceAttributes: map[string]string{},
Start: 1640995200000,
End: 1640995800000,
}
ctx := context.Background()
result, err := buildSpanPercentileQuery(ctx, req)
require.NoError(t, err)
require.NotNil(t, result)
require.Equal(t, 1, len(result.CompositeQuery.Queries))
require.Equal(t, qbtypes.QueryTypeBuilder, result.CompositeQuery.Queries[0].Type)
query, ok := result.CompositeQuery.Queries[0].Spec.(qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation])
require.True(t, ok, "Spec should be QueryBuilderQuery type")
require.Equal(t, "span_percentile", query.Name)
require.Equal(t, telemetrytypes.SignalTraces, query.Signal)
require.Equal(t, 4, len(query.Aggregations))
require.Equal(t, "p50(duration_nano)", query.Aggregations[0].Expression)
require.Equal(t, "p50_duration_nano", query.Aggregations[0].Alias)
require.Equal(t, "p90(duration_nano)", query.Aggregations[1].Expression)
require.Equal(t, "p90_duration_nano", query.Aggregations[1].Alias)
require.Equal(t, "p99(duration_nano)", query.Aggregations[2].Expression)
require.Equal(t, "p99_duration_nano", query.Aggregations[2].Alias)
require.Equal(t, "(100.0 * countIf(duration_nano <= 100000)) / count()", query.Aggregations[3].Expression)
require.Equal(t, "percentile_position", query.Aggregations[3].Alias)
require.NotNil(t, query.Filter)
require.Equal(t, "service.name = 'test-service' AND name = 'test'", query.Filter.Expression)
require.Equal(t, 2, len(query.GroupBy))
require.Equal(t, "service.name", query.GroupBy[0].TelemetryFieldKey.Name)
require.Equal(t, telemetrytypes.FieldContextResource, query.GroupBy[0].TelemetryFieldKey.FieldContext)
require.Equal(t, "name", query.GroupBy[1].TelemetryFieldKey.Name)
require.Equal(t, telemetrytypes.FieldContextSpan, query.GroupBy[1].TelemetryFieldKey.FieldContext)
require.Equal(t, qbtypes.RequestTypeScalar, result.RequestType)
}
func TestBuildSpanPercentileQueryWithResourceAttributes(t *testing.T) {
testCases := []struct {
name string
request *spanpercentiletypes.SpanPercentileRequest
expectedFilterExpr string
}{
{
name: "query with service.name only (no additional resource attributes)",
request: &spanpercentiletypes.SpanPercentileRequest{
DurationNano: 100000,
Name: "GET /api/users",
ServiceName: "user-service",
ResourceAttributes: map[string]string{},
Start: 1640995200000,
End: 1640995800000,
},
expectedFilterExpr: "service.name = 'user-service' AND name = 'GET /api/users'",
},
{
name: "query with service.name and deployment.environment",
request: &spanpercentiletypes.SpanPercentileRequest{
DurationNano: 250000,
Name: "POST /api/orders",
ServiceName: "order-service",
ResourceAttributes: map[string]string{
"deployment.environment": "production",
},
Start: 1640995200000,
End: 1640995800000,
},
expectedFilterExpr: "service.name = 'order-service' AND name = 'POST /api/orders' AND deployment.environment = 'production'",
},
{
name: "query with multiple resource attributes",
request: &spanpercentiletypes.SpanPercentileRequest{
DurationNano: 500000,
Name: "DELETE /api/items",
ServiceName: "inventory-service",
ResourceAttributes: map[string]string{
"cloud.platform": "aws",
"deployment.environment": "staging",
"k8s.cluster.name": "staging-cluster",
},
Start: 1640995200000,
End: 1640995800000,
},
expectedFilterExpr: "service.name = 'inventory-service' AND name = 'DELETE /api/items' AND cloud.platform = 'aws' AND deployment.environment = 'staging' AND k8s.cluster.name = 'staging-cluster'",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
ctx := context.Background()
result, err := buildSpanPercentileQuery(ctx, tc.request)
require.NoError(t, err)
require.NotNil(t, result)
query, ok := result.CompositeQuery.Queries[0].Spec.(qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation])
require.True(t, ok, "Spec should be QueryBuilderQuery type")
require.Equal(t, tc.expectedFilterExpr, query.Filter.Expression)
require.Equal(t, 4, len(query.Aggregations))
require.Equal(t, "p50(duration_nano)", query.Aggregations[0].Expression)
require.Equal(t, "p90(duration_nano)", query.Aggregations[1].Expression)
require.Equal(t, "p99(duration_nano)", query.Aggregations[2].Expression)
require.Contains(t, query.Aggregations[3].Expression, fmt.Sprintf("countIf(duration_nano <= %d)", tc.request.DurationNano))
expectedGroupByCount := 2 + len(tc.request.ResourceAttributes)
require.Equal(t, expectedGroupByCount, len(query.GroupBy))
require.Equal(t, "service.name", query.GroupBy[0].TelemetryFieldKey.Name)
require.Equal(t, "name", query.GroupBy[1].TelemetryFieldKey.Name)
for i, key := range getSortedKeys(tc.request.ResourceAttributes) {
require.Equal(t, key, query.GroupBy[2+i].TelemetryFieldKey.Name)
require.Equal(t, telemetrytypes.FieldContextResource, query.GroupBy[2+i].TelemetryFieldKey.FieldContext)
}
})
}
}
func getSortedKeys(m map[string]string) []string {
keys := make([]string, 0, len(m))
for k := range m {
keys = append(keys, k)
}
sort.Strings(keys)
return keys
}

View File

@@ -0,0 +1,17 @@
package spanpercentile
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/types/spanpercentiletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Module interface {
GetSpanPercentile(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, req *spanpercentiletypes.SpanPercentileRequest) (*spanpercentiletypes.SpanPercentileResponse, error)
}
type Handler interface {
GetSpanPercentileDetails(http.ResponseWriter, *http.Request)
}

View File

@@ -625,6 +625,8 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// Export
router.HandleFunc("/api/v1/export_raw_data", am.ViewAccess(aH.Signoz.Handlers.RawDataExport.ExportRawData)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost)
}
func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.AuthZ) {
@@ -814,6 +816,10 @@ func (aH *APIHandler) createDowntimeSchedule(w http.ResponseWriter, r *http.Requ
return
}
if len(schedule.RuleIDs) == 0 {
schedule.SilenceAll = true
}
_, err = aH.ruleManager.MaintenanceStore().CreatePlannedMaintenance(r.Context(), schedule)
if err != nil {
render.Error(w, err)
@@ -841,6 +847,10 @@ func (aH *APIHandler) editDowntimeSchedule(w http.ResponseWriter, r *http.Reques
return
}
if len(schedule.RuleIDs) == 0 {
schedule.SilenceAll = true
}
err = aH.ruleManager.MaintenanceStore().EditPlannedMaintenance(r.Context(), schedule, id)
if err != nil {
render.Error(w, err)

View File

@@ -79,6 +79,10 @@ func (m *MockSQLRuleStore) ExpectEditRule(rule *ruletypes.Rule) {
// ExpectDeleteRule sets up SQL expectations for DeleteRule operation
func (m *MockSQLRuleStore) ExpectDeleteRule(ruleID valuer.UUID) {
plannedMaintenancePattern := `DELETE FROM "planned_maintenance_rule".+WHERE \(rule_id = '` + ruleID.StringValue() + `'\)`
m.mock.ExpectExec(plannedMaintenancePattern).
WillReturnResult(sqlmock.NewResult(0, 1))
expectedPattern := `DELETE FROM "rule".+WHERE \(id = '` + ruleID.StringValue() + `'\)`
m.mock.ExpectExec(expectedPattern).
WillReturnResult(sqlmock.NewResult(1, 1))

View File

@@ -78,6 +78,7 @@ func (r *maintenance) CreatePlannedMaintenance(ctx context.Context, maintenance
Description: maintenance.Description,
Schedule: maintenance.Schedule,
OrgID: claims.OrgID,
SilenceAll: maintenance.SilenceAll,
}
maintenanceRules := make([]*ruletypes.StorablePlannedMaintenanceRule, 0)
@@ -163,6 +164,7 @@ func (r *maintenance) EditPlannedMaintenance(ctx context.Context, maintenance ru
Description: maintenance.Description,
Schedule: maintenance.Schedule,
OrgID: claims.OrgID,
SilenceAll: maintenance.SilenceAll,
}
storablePlannedMaintenanceRules := make([]*ruletypes.StorablePlannedMaintenanceRule, 0)

View File

@@ -44,6 +44,7 @@ func (r *rule) EditRule(ctx context.Context, storedRule *ruletypes.Rule, cb func
NewUpdate().
Model(storedRule).
Where("id = ?", storedRule.ID.StringValue()).
Where("deleted = ?", false).
Exec(ctx)
if err != nil {
return err
@@ -58,8 +59,20 @@ func (r *rule) DeleteRule(ctx context.Context, id valuer.UUID, cb func(context.C
_, err := r.sqlstore.
BunDBCtx(ctx).
NewDelete().
Model(new(ruletypes.Rule)).
Model(new(ruletypes.StorablePlannedMaintenanceRule)).
Where("rule_id = ?", id.StringValue()).
Exec(ctx)
if err != nil {
return err
}
_, err = r.sqlstore.
BunDBCtx(ctx).
NewUpdate().
Model((*ruletypes.Rule)(nil)).
Set("deleted = ?", true).
Where("id = ?", id.StringValue()).
Where("deleted = ?", false).
Exec(ctx)
if err != nil {
return err
@@ -79,7 +92,7 @@ func (r *rule) GetStoredRules(ctx context.Context, orgID string) ([]*ruletypes.R
BunDB().
NewSelect().
Model(&rules).
Where("org_id = ?", orgID).
Where("org_id = ?", orgID).Where("deleted = ?", false).
Scan(ctx)
if err != nil {
return rules, err
@@ -94,7 +107,7 @@ func (r *rule) GetStoredRule(ctx context.Context, id valuer.UUID) (*ruletypes.Ru
BunDB().
NewSelect().
Model(rule).
Where("id = ?", id.StringValue()).
Where("id = ?", id.StringValue()).Where("deleted = ?", false).
Scan(ctx)
if err != nil {
return nil, err

View File

@@ -2,7 +2,6 @@ package signoz
import (
"context"
"io"
"log/slog"
"testing"
@@ -13,7 +12,7 @@ import (
// This is a test to ensure that all fields of config implement the factory.Config interface and are valid with
// their default values.
func TestValidateConfig(t *testing.T) {
logger := slog.New(slog.NewTextHandler(io.Discard, nil))
logger := slog.New(slog.DiscardHandler)
_, err := NewConfig(context.Background(), logger, configtest.NewResolverConfig(), DeprecatedFlags{})
assert.NoError(t, err)
}

View File

@@ -20,6 +20,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/session/implsession"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile/implspanpercentile"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/modules/user"
@@ -27,31 +29,33 @@ import (
)
type Handlers struct {
Organization organization.Handler
Preference preference.Handler
User user.Handler
SavedView savedview.Handler
Apdex apdex.Handler
Dashboard dashboard.Handler
QuickFilter quickfilter.Handler
TraceFunnel tracefunnel.Handler
RawDataExport rawdataexport.Handler
AuthDomain authdomain.Handler
Session session.Handler
Organization organization.Handler
Preference preference.Handler
User user.Handler
SavedView savedview.Handler
Apdex apdex.Handler
Dashboard dashboard.Handler
QuickFilter quickfilter.Handler
TraceFunnel tracefunnel.Handler
RawDataExport rawdataexport.Handler
AuthDomain authdomain.Handler
Session session.Handler
SpanPercentile spanpercentile.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings) Handlers {
return Handlers{
Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter),
Preference: implpreference.NewHandler(modules.Preference),
User: impluser.NewHandler(modules.User, modules.UserGetter),
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings),
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),
AuthDomain: implauthdomain.NewHandler(modules.AuthDomain),
Session: implsession.NewHandler(modules.Session),
Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter),
Preference: implpreference.NewHandler(modules.Preference),
User: impluser.NewHandler(modules.User, modules.UserGetter),
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings),
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),
AuthDomain: implauthdomain.NewHandler(modules.AuthDomain),
Session: implsession.NewHandler(modules.Session),
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
}
}

View File

@@ -24,6 +24,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/session/implsession"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile/implspanpercentile"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/modules/user"
@@ -36,19 +38,20 @@ import (
)
type Modules struct {
OrgGetter organization.Getter
OrgSetter organization.Setter
Preference preference.Module
User user.Module
UserGetter user.Getter
SavedView savedview.Module
Apdex apdex.Module
Dashboard dashboard.Module
QuickFilter quickfilter.Module
TraceFunnel tracefunnel.Module
RawDataExport rawdataexport.Module
AuthDomain authdomain.Module
Session session.Module
OrgGetter organization.Getter
OrgSetter organization.Setter
Preference preference.Module
User user.Module
UserGetter user.Getter
SavedView savedview.Module
Apdex apdex.Module
Dashboard dashboard.Module
QuickFilter quickfilter.Module
TraceFunnel tracefunnel.Module
RawDataExport rawdataexport.Module
AuthDomain authdomain.Module
Session session.Module
SpanPercentile spanpercentile.Module
}
func NewModules(
@@ -66,19 +69,21 @@ func NewModules(
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, analytics)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
return Modules{
OrgGetter: orgGetter,
OrgSetter: orgSetter,
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()),
SavedView: implsavedview.NewModule(sqlstore),
Apdex: implapdex.NewModule(sqlstore),
Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics),
User: user,
UserGetter: userGetter,
QuickFilter: quickfilter,
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
RawDataExport: implrawdataexport.NewModule(querier),
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore)),
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore)), tokenizer, orgGetter),
OrgGetter: orgGetter,
OrgSetter: orgSetter,
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()),
SavedView: implsavedview.NewModule(sqlstore),
Apdex: implapdex.NewModule(sqlstore),
Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics),
User: user,
UserGetter: userGetter,
QuickFilter: quickfilter,
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
RawDataExport: implrawdataexport.NewModule(querier),
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore)),
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore)), tokenizer, orgGetter),
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
}
}

View File

@@ -138,6 +138,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewUpdateTTLSettingForCustomRetentionFactory(sqlstore, sqlschema),
sqlmigration.NewAddRoutePolicyFactory(sqlstore, sqlschema),
sqlmigration.NewAddAuthTokenFactory(sqlstore, sqlschema),
sqlmigration.NewAddSilenceAllColumnFactory(sqlstore, sqlschema),
)
}

View File

@@ -0,0 +1,155 @@
package sqlmigration
import (
"context"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
"log/slog"
)
type addSilenceAllColumn struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
logger *slog.Logger
}
type plannedMaintenance struct {
bun.BaseModel `bun:"table:planned_maintenance"`
types.Identifiable
SilenceAll bool `bun:"silence_all,type:boolean"`
}
type plannedMaintenanceRule struct {
bun.BaseModel `bun:"table:planned_maintenance_rule"`
types.Identifiable
PlannedMaintenanceID valuer.UUID `bun:"planned_maintenance_id,type:text"`
RuleID valuer.UUID `bun:"rule_id,type:text"`
}
func NewAddSilenceAllColumnFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_silence_all_column"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
return newAddSilenceAllColumn(ctx, providerSettings, config, sqlstore, sqlschema)
})
}
func newAddSilenceAllColumn(_ context.Context, settings factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) (SQLMigration, error) {
return &addSilenceAllColumn{
sqlstore: sqlstore,
sqlschema: sqlschema,
logger: settings.Logger,
}, nil
}
func (migration *addSilenceAllColumn) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addSilenceAllColumn) Up(ctx context.Context, db *bun.DB) error {
table, _, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("planned_maintenance"))
if err != nil {
return err
}
for _, column := range table.Columns {
if column.Name == "silence_all" {
return nil
}
}
var joinTableBackup []plannedMaintenanceRule
err = db.NewSelect().
Model(&joinTableBackup).
Scan(ctx)
if err != nil {
return errors.NewInternalf(errors.CodeInternal, "failed to backup planned_maintenance_rule data: %v", err)
}
maintenanceIDsMap := make(map[string]bool)
for _, record := range joinTableBackup {
maintenanceIDsMap[record.PlannedMaintenanceID.StringValue()] = true
}
var maintenanceIDsWithRules []string
for id := range maintenanceIDsMap {
maintenanceIDsWithRules = append(maintenanceIDsWithRules, id)
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
sqls := [][]byte{}
column := &sqlschema.Column{
Name: "silence_all",
DataType: sqlschema.DataTypeBoolean,
Nullable: false,
Default: "false",
}
columnSQLs := migration.sqlschema.Operator().AddColumn(table, nil, column, nil)
sqls = append(sqls, columnSQLs...)
for _, sqlStmt := range sqls {
if _, err := tx.ExecContext(ctx, string(sqlStmt)); err != nil {
return err
}
}
if len(joinTableBackup) > 0 {
_, err = tx.NewInsert().
Model(&joinTableBackup).
Exec(ctx)
if err != nil {
return errors.NewInternalf(errors.CodeInternal, "failed to restore planned_maintenance_rule data: %v", err)
}
}
err = migration.backfillSilenceAll(ctx, tx, maintenanceIDsWithRules)
if err != nil {
return err
}
if err := tx.Commit(); err != nil {
return err
}
return nil
}
func (migration *addSilenceAllColumn) backfillSilenceAll(ctx context.Context, tx bun.Tx, maintenanceIDsWithRules []string) error {
if len(maintenanceIDsWithRules) == 0 {
_, err := tx.NewUpdate().
Model((*plannedMaintenance)(nil)).
Set("silence_all = ?", true).
Where("1 = 1").
Exec(ctx)
return err
}
_, err := tx.NewUpdate().
Model((*plannedMaintenance)(nil)).
Set("silence_all = ?", true).
Where("id NOT IN (?)", bun.In(maintenanceIDsWithRules)).
Exec(ctx)
return err
}
func (migration *addSilenceAllColumn) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@@ -20,6 +20,7 @@ var (
NameNavShortcuts = Name{valuer.NewString("nav_shortcuts")}
NameLastSeenChangelogVersion = Name{valuer.NewString("last_seen_changelog_version")}
NameSpanDetailsPinnedAttributes = Name{valuer.NewString("span_details_pinned_attributes")}
NameSpanPercentileResourceAttributes = Name{valuer.NewString("span_percentile_resource_attributes")}
)
type Name struct{ valuer.String }
@@ -39,6 +40,7 @@ func NewName(name string) (Name, error) {
NameNavShortcuts.StringValue(),
NameLastSeenChangelogVersion.StringValue(),
NameSpanDetailsPinnedAttributes.StringValue(),
NameSpanPercentileResourceAttributes.StringValue(),
},
name,
)

View File

@@ -163,6 +163,15 @@ func NewAvailablePreference() map[Name]Preference {
AllowedValues: []string{},
Value: MustNewValue([]any{}, ValueTypeArray),
},
NameSpanPercentileResourceAttributes: {
Name: NameSpanPercentileResourceAttributes,
Description: "Additional resource attributes for span percentile filtering (beyond mandatory name and service.name).",
ValueType: ValueTypeArray,
DefaultValue: MustNewValue([]any{"deployment.environment"}, ValueTypeArray),
AllowedScopes: []Scope{ScopeUser},
AllowedValues: []string{},
Value: MustNewValue([]any{"deployment.environment"}, ValueTypeArray),
},
}
}

View File

@@ -24,6 +24,7 @@ type StorablePlannedMaintenance struct {
Description string `bun:"description,type:text"`
Schedule *Schedule `bun:"schedule,type:text,notnull"`
OrgID string `bun:"org_id,type:text"`
SilenceAll bool `bun:"silence_all,type:boolean"`
}
type GettablePlannedMaintenance struct {
@@ -38,6 +39,7 @@ type GettablePlannedMaintenance struct {
UpdatedBy string `json:"updatedBy"`
Status string `json:"status"`
Kind string `json:"kind"`
SilenceAll bool `json:"silenceAll"`
}
type StorablePlannedMaintenanceRule struct {
@@ -64,7 +66,7 @@ func (m *GettablePlannedMaintenance) ShouldSkip(ruleID string, now time.Time) bo
}
}
// If no alert ids, then skip all alerts
if len(m.RuleIDs) == 0 {
if m.SilenceAll {
found = true
}
@@ -295,6 +297,7 @@ func (m GettablePlannedMaintenance) MarshalJSON() ([]byte, error) {
UpdatedBy string `json:"updatedBy" db:"updated_by"`
Status string `json:"status"`
Kind string `json:"kind"`
SilenceAll bool `json:"silenceAll" db:"silence_all"`
}{
Id: m.Id,
Name: m.Name,
@@ -307,6 +310,7 @@ func (m GettablePlannedMaintenance) MarshalJSON() ([]byte, error) {
UpdatedBy: m.UpdatedBy,
Status: status,
Kind: kind,
SilenceAll: m.SilenceAll,
})
}
@@ -328,6 +332,7 @@ func (m *GettablePlannedMaintenanceRule) ConvertGettableMaintenanceRuleToGettabl
UpdatedAt: m.UpdatedAt,
CreatedBy: m.CreatedBy,
UpdatedBy: m.UpdatedBy,
SilenceAll: m.SilenceAll,
}
}

View File

@@ -21,6 +21,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "only-on-saturday",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "Europe/London",
Recurrence: &Recurrence{
@@ -38,6 +39,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "weekly-across-midnight-previous-day",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -55,6 +57,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "weekly-across-midnight-previous-day",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -72,6 +75,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "weekly-across-midnight-previous-day",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -89,6 +93,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "weekly-across-midnight-previous-day-not-in-repeaton",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -106,6 +111,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "daily-maintenance-across-midnight",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -122,6 +128,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "at-start-time-boundary",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -138,6 +145,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "at-end-time-boundary",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -154,6 +162,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "monthly-multi-day-duration",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -170,6 +179,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "weekly-multi-day-duration",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -187,6 +197,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "monthly-crosses-to-next-month",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -203,6 +214,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "timezone-offset-test",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "America/New_York", // UTC-5 or UTC-4 depending on DST
Recurrence: &Recurrence{
@@ -219,6 +231,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "daily-maintenance-time-outside-window",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -235,6 +248,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring-maintenance-with-past-end-date",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -252,6 +266,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "monthly-maintenance-spans-month-end",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -268,6 +283,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "weekly-empty-repeaton",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -285,6 +301,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "monthly-maintenance-february-fewer-days",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -300,6 +317,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "daily-maintenance-crosses-midnight",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -315,6 +333,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "monthly-maintenance-crosses-month-end",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -330,6 +349,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "monthly-maintenance-crosses-month-end-and-duration-is-2-days",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -345,6 +365,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "weekly-maintenance-crosses-midnight",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -361,6 +382,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "monthly-maintenance-crosses-month-end-and-duration-is-2-days",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -376,6 +398,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "daily-maintenance-crosses-midnight",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -391,6 +414,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "monthly-maintenance-crosses-month-end-and-duration-is-2-hours",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -406,6 +430,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "fixed planned maintenance start <= ts <= end",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
StartTime: time.Now().UTC().Add(-time.Hour),
@@ -418,6 +443,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "fixed planned maintenance start >= ts",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
StartTime: time.Now().UTC().Add(time.Hour),
@@ -430,6 +456,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "fixed planned maintenance ts < start",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
StartTime: time.Now().UTC().Add(time.Hour),
@@ -442,6 +469,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat sunday, saturday, weekly for 24 hours, in Us/Eastern timezone",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "US/Eastern",
Recurrence: &Recurrence{
@@ -458,6 +486,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat daily from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -473,6 +502,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat daily from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -488,6 +518,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat daily from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -503,6 +534,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -519,6 +551,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -535,6 +568,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -551,6 +585,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -567,6 +602,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -583,6 +619,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat monthly on 4th from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -598,6 +635,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat monthly on 4th from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -613,6 +651,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
{
name: "recurring maintenance, repeat monthly on 4th from 12:00 to 14:00",
maintenance: &GettablePlannedMaintenance{
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
@@ -634,3 +673,144 @@ func TestShouldSkipMaintenance(t *testing.T) {
}
}
}
func TestSilenceAllFunctionality(t *testing.T) {
cases := []struct {
name string
maintenance *GettablePlannedMaintenance
ruleID string
ts time.Time
skip bool
}{
{
name: "SilenceAll=true with specific ruleIDs - should silence rule in list during maintenance window",
maintenance: &GettablePlannedMaintenance{
RuleIDs: []string{"rule-1", "rule-2"},
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
},
ruleID: "rule-1",
ts: time.Date(2024, 1, 1, 12, 30, 0, 0, time.UTC),
skip: true,
},
{
name: "SilenceAll=true with specific ruleIDs - should silence rule NOT in list during maintenance window",
maintenance: &GettablePlannedMaintenance{
RuleIDs: []string{"rule-1", "rule-2"},
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
},
ruleID: "rule-3",
ts: time.Date(2024, 1, 1, 12, 30, 0, 0, time.UTC),
skip: true,
},
{
name: "SilenceAll=false with specific ruleIDs - should silence rule in list during maintenance window",
maintenance: &GettablePlannedMaintenance{
RuleIDs: []string{"rule-1", "rule-2"},
SilenceAll: false,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
},
ruleID: "rule-1",
ts: time.Date(2024, 1, 1, 12, 30, 0, 0, time.UTC),
skip: true,
},
{
name: "SilenceAll=true with empty ruleIDs - should silence all rules during maintenance window",
maintenance: &GettablePlannedMaintenance{
RuleIDs: []string{},
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
},
ruleID: "any-rule",
ts: time.Date(2024, 1, 1, 12, 30, 0, 0, time.UTC),
skip: true,
},
{
name: "SilenceAll=false with empty ruleIDs - should not silence any rules",
maintenance: &GettablePlannedMaintenance{
RuleIDs: []string{},
SilenceAll: false,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
},
ruleID: "any-rule",
ts: time.Date(2024, 1, 1, 12, 30, 0, 0, time.UTC),
skip: false,
},
{
name: "SilenceAll=true with fixed maintenance window - should not skip outside window",
maintenance: &GettablePlannedMaintenance{
RuleIDs: []string{},
SilenceAll: true,
Schedule: &Schedule{
Timezone: "UTC",
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
EndTime: time.Date(2024, 1, 1, 14, 0, 0, 0, time.UTC),
},
},
ruleID: "any-rule",
ts: time.Date(2024, 1, 1, 15, 0, 0, 0, time.UTC),
skip: false,
},
{
name: "SilenceAll=false with nil ruleIDs - should not silence any rules",
maintenance: &GettablePlannedMaintenance{
RuleIDs: nil,
SilenceAll: false,
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
},
ruleID: "any-rule",
ts: time.Date(2024, 1, 1, 12, 30, 0, 0, time.UTC),
skip: false,
},
}
for idx, c := range cases {
result := c.maintenance.ShouldSkip(c.ruleID, c.ts)
if result != c.skip {
t.Errorf("skip %v, got %v, case:%d - %s", c.skip, result, idx, c.name)
}
}
}

View File

@@ -0,0 +1,17 @@
package spanpercentiletypes
type SpanPercentileResponse struct {
Percentiles PercentileStats `json:"percentiles"`
Position PercentilePosition `json:"position"`
}
type PercentileStats struct {
P50 float64 `json:"p50"`
P90 float64 `json:"p90"`
P99 float64 `json:"p99"`
}
type PercentilePosition struct {
Percentile float64 `json:"percentile"`
Description string `json:"description"`
}

View File

@@ -0,0 +1,43 @@
package spanpercentiletypes
import (
"github.com/SigNoz/signoz/pkg/errors"
)
type SpanPercentileRequest struct {
DurationNano int64 `json:"spanDuration"`
Name string `json:"name"`
ServiceName string `json:"serviceName"`
ResourceAttributes map[string]string `json:"resourceAttributes"`
Start uint64 `json:"start"`
End uint64 `json:"end"`
}
func (req *SpanPercentileRequest) Validate() error {
if req.Name == "" {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "name is required")
}
if req.ServiceName == "" {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "service_name is required")
}
if req.DurationNano <= 0 {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "duration_nano must be greater than 0")
}
if req.Start >= req.End {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "start time must be before end time")
}
for key, val := range req.ResourceAttributes {
if key == "" {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "resource attribute key cannot be empty")
}
if val == "" {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "resource attribute value cannot be empty")
}
}
return nil
}