Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d52b54aeb3 | ||
|
|
c8608c18ae | ||
|
|
cde99ba1a0 | ||
|
|
a7e9d442b7 | ||
|
|
0b0d622f6b | ||
|
|
127e760b00 | ||
|
|
63e333de0d | ||
|
|
af57d11b6a | ||
|
|
8d61ee338b | ||
|
|
5d9dc17645 | ||
|
|
5288022ffd | ||
|
|
cdc18af4a2 | ||
|
|
918a90e3c1 | ||
|
|
e8ce7b22f5 | ||
|
|
b752fdd30a | ||
|
|
d73b7fadab |
@@ -1,6 +1,6 @@
|
||||
services:
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:25.10.1
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
container_name: clickhouse
|
||||
volumes:
|
||||
- ${PWD}/fs/etc/clickhouse-server/config.d/config.xml:/etc/clickhouse-server/config.d/config.xml
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -49,7 +49,6 @@ ee/query-service/tests/test-deploy/data/
|
||||
# local data
|
||||
*.backup
|
||||
*.db
|
||||
**/db
|
||||
/deploy/docker/clickhouse-setup/data/
|
||||
/deploy/docker-swarm/clickhouse-setup/data/
|
||||
bin/
|
||||
|
||||
6
Makefile
6
Makefile
@@ -72,12 +72,6 @@ devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhou
|
||||
@echo " - ClickHouse: http://localhost:8123"
|
||||
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
|
||||
|
||||
.PHONY: devenv-clickhouse-clean
|
||||
devenv-clickhouse-clean: ## Clean all ClickHouse data from filesystem
|
||||
@echo "Removing ClickHouse data..."
|
||||
@rm -rf .devenv/docker/clickhouse/fs/tmp/*
|
||||
@echo "ClickHouse data cleaned!"
|
||||
|
||||
##############################################################
|
||||
# go commands
|
||||
##############################################################
|
||||
|
||||
@@ -11,7 +11,7 @@ x-common: &common
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.10.1
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
tty: true
|
||||
deploy:
|
||||
labels:
|
||||
@@ -65,7 +65,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.10.1
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.103.0
|
||||
image: signoz/signoz:v0.103.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@@ -11,7 +11,7 @@ x-common: &common
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.10.1
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
tty: true
|
||||
deploy:
|
||||
labels:
|
||||
@@ -62,7 +62,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.10.1
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.103.0
|
||||
image: signoz/signoz:v0.103.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@@ -10,7 +10,7 @@ x-common: &common
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:25.10.1
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
tty: true
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
@@ -67,7 +67,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.10.1
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
container_name: signoz-init-clickhouse
|
||||
command:
|
||||
- bash
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.103.0}
|
||||
image: signoz/signoz:${VERSION:-v0.103.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -9,7 +9,7 @@ x-common: &common
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.10.1
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
tty: true
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
@@ -62,7 +62,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.10.1
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
container_name: signoz-init-clickhouse
|
||||
command:
|
||||
- bash
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.103.0}
|
||||
image: signoz/signoz:${VERSION:-v0.103.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -129,6 +129,12 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (*oidc.Provider, *oauth2.Config, error) {
|
||||
if authDomain.AuthDomainConfig().OIDC.IssuerAlias != "" {
|
||||
ctx = oidc.InsecureIssuerURLContext(ctx, authDomain.AuthDomainConfig().OIDC.IssuerAlias)
|
||||
|
||||
@@ -99,6 +99,14 @@ func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*aut
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
state := authtypes.NewState(&url.URL{Path: "login"}, authDomain.StorableAuthDomain().ID).URL.String()
|
||||
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: &state,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) serviceProvider(siteURL *url.URL, authDomain *authtypes.AuthDomain) (*saml2.SAMLServiceProvider, error) {
|
||||
certStore, err := a.getCertificateStore(authDomain)
|
||||
if err != nil {
|
||||
|
||||
@@ -9,7 +9,6 @@ var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
|
||||
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
var BodyJSONQueryEnabled = GetOrDefaultEnv("BODY_JSON_QUERY_ENABLED", "false") == "true"
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
module.exports = {
|
||||
ignorePatterns: ['src/parser/*.ts'],
|
||||
ignorePatterns: ['src/parser/*.ts', 'scripts/update-registry.js'],
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"jest": "jest",
|
||||
"jest:coverage": "jest --coverage",
|
||||
"jest:watch": "jest --watch",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure) && node scripts/update-registry.js",
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
|
||||
1
frontend/public/Logos/amazon-bedrock.svg
Normal file
1
frontend/public/Logos/amazon-bedrock.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" fill="currentColor" fill-rule="evenodd" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>AWS</title><path d="M6.763 11.212q.002.446.088.71c.064.176.144.368.256.576.04.063.056.127.056.183q.002.12-.152.24l-.503.335a.4.4 0 0 1-.208.072q-.12-.002-.239-.112a2.5 2.5 0 0 1-.287-.375 6 6 0 0 1-.248-.471q-.934 1.101-2.347 1.101c-.67 0-1.205-.191-1.596-.574-.39-.384-.59-.894-.59-1.533 0-.678.24-1.23.726-1.644.487-.415 1.133-.623 1.955-.623.272 0 .551.024.846.064.296.04.6.104.918.176v-.583q-.001-.908-.375-1.277c-.255-.248-.686-.367-1.3-.367-.28 0-.568.031-.863.103s-.583.16-.862.272a2 2 0 0 1-.28.104.5.5 0 0 1-.127.023q-.168.002-.168-.247v-.391c0-.128.016-.224.056-.28a.6.6 0 0 1 .224-.167 4.6 4.6 0 0 1 1.005-.36 4.8 4.8 0 0 1 1.246-.151c.95 0 1.644.216 2.091.647q.661.646.662 1.963v2.586zm-3.24 1.214c.263 0 .534-.048.822-.144a1.8 1.8 0 0 0 .758-.51 1.3 1.3 0 0 0 .272-.512c.047-.191.08-.423.08-.694v-.335a7 7 0 0 0-.735-.136 6 6 0 0 0-.75-.048c-.535 0-.926.104-1.19.32-.263.215-.39.518-.39.917 0 .375.095.655.295.846.191.2.47.296.838.296m6.41.862c-.144 0-.24-.024-.304-.08-.064-.048-.12-.16-.168-.311L7.586 6.726a1.4 1.4 0 0 1-.072-.32c0-.128.064-.2.191-.2h.783q.227-.001.31.08c.065.048.113.16.16.312l1.342 5.284 1.245-5.284q.058-.24.151-.312a.55.55 0 0 1 .32-.08h.638c.152 0 .256.025.32.08.063.048.12.16.151.312l1.261 5.348 1.381-5.348q.074-.24.16-.312a.52.52 0 0 1 .311-.08h.743c.127 0 .2.065.2.2 0 .04-.009.08-.017.128a1 1 0 0 1-.056.2l-1.923 6.17q-.072.24-.168.311a.5.5 0 0 1-.303.08h-.687c-.15 0-.255-.024-.32-.08-.063-.056-.119-.16-.15-.32L12.32 7.747l-1.23 5.14c-.04.16-.087.264-.15.32-.065.056-.177.08-.32.08zm10.256.215c-.415 0-.83-.048-1.229-.143-.399-.096-.71-.2-.918-.32-.128-.071-.215-.151-.247-.223a.6.6 0 0 1-.048-.224v-.407c0-.167.064-.247.183-.247q.072 0 .144.024c.048.016.12.048.2.08q.408.181.878.279c.32.064.63.096.95.096.502 0 .894-.088 1.165-.264a.86.86 0 0 0 .415-.758.78.78 0 0 0-.215-.559c-.144-.151-.416-.287-.807-.415l-1.157-.36c-.583-.183-1.014-.454-1.277-.813a1.9 1.9 0 0 1-.4-1.158q0-.502.216-.886c.144-.255.335-.479.575-.654.24-.184.51-.32.83-.415.32-.096.655-.136 1.006-.136.175 0 .36.008.535.032.183.024.35.056.518.088q.24.058.455.127.216.072.336.144a.7.7 0 0 1 .24.2.43.43 0 0 1 .071.263v.375q-.002.254-.184.256a.8.8 0 0 1-.303-.096 3.65 3.65 0 0 0-1.532-.311c-.455 0-.815.071-1.062.223s-.375.383-.375.71c0 .224.08.416.24.567.16.152.454.304.877.44l1.134.358c.574.184.99.44 1.237.767s.367.702.367 1.117c0 .343-.072.655-.207.926a2.2 2.2 0 0 1-.583.703c-.248.2-.543.343-.886.447-.36.111-.734.167-1.142.167"/><path fill="#f90" d="M.378 15.475c3.384 1.963 7.56 3.153 11.877 3.153 2.914 0 6.114-.607 9.06-1.852.44-.2.814.287.383.607-2.626 1.94-6.442 2.969-9.722 2.969-4.598 0-8.74-1.7-11.87-4.526-.247-.223-.024-.527.272-.351m23.531-.2c.287.36-.08 2.826-1.485 4.007-.215.184-.423.088-.327-.151l.175-.439c.343-.88.802-2.198.52-2.555-.336-.43-2.22-.207-3.074-.103-.255.032-.295-.192-.063-.36 1.5-1.053 3.967-.75 4.254-.399"/></svg>
|
||||
|
After Width: | Height: | Size: 3.0 KiB |
1
frontend/public/Logos/autogen.svg
Normal file
1
frontend/public/Logos/autogen.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 20 KiB |
1
frontend/public/Logos/azure-openai.svg
Normal file
1
frontend/public/Logos/azure-openai.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>Azure</title><path fill="url(#a)" d="M7.242 1.613A1.11 1.11 0 0 1 8.295.857h6.977L8.03 22.316a1.11 1.11 0 0 1-1.052.755h-5.43a1.11 1.11 0 0 1-1.053-1.466z"/><path fill="#0078d4" d="M18.397 15.296H7.4a.51.51 0 0 0-.347.882l7.066 6.595c.206.192.477.298.758.298h6.226z"/><path fill="url(#b)" d="M15.272.857H7.497L0 23.071h7.775l1.596-4.73 5.068 4.73h6.665l-2.707-7.775h-7.998z"/><path fill="url(#c)" d="M17.193 1.613a1.11 1.11 0 0 0-1.052-.756h-7.81.035c.477 0 .9.304 1.052.756l6.748 19.992a1.11 1.11 0 0 1-1.052 1.466h-.12 7.895a1.11 1.11 0 0 0 1.052-1.466z"/><defs><linearGradient id="a" x1="8.247" x2="1.002" y1="1.626" y2="23.03" gradientUnits="userSpaceOnUse"><stop stop-color="#114a8b"/><stop offset="1" stop-color="#0669bc"/></linearGradient><linearGradient id="b" x1="14.042" x2="12.324" y1="15.302" y2="15.888" gradientUnits="userSpaceOnUse"><stop stop-opacity=".3"/><stop offset=".071" stop-opacity=".2"/><stop offset=".321" stop-opacity=".1"/><stop offset=".623" stop-opacity=".05"/><stop offset="1" stop-opacity="0"/></linearGradient><linearGradient id="c" x1="12.841" x2="20.793" y1="1.626" y2="22.814" gradientUnits="userSpaceOnUse"><stop stop-color="#3ccbf4"/><stop offset="1" stop-color="#2892df"/></linearGradient></defs></svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
1
frontend/public/Logos/crew-ai.svg
Normal file
1
frontend/public/Logos/crew-ai.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>CrewAI</title><path fill="#461816" d="M19.41 10.783a2.75 2.75 0 0 1 2.471 1.355c.483.806.622 1.772.385 2.68l-.136.522a10 10 0 0 1-3.156 5.058c-.605.517-1.283 1.062-2.083 1.524l-.028.017c-.402.232-.884.511-1.398.756-1.19.602-2.475.997-3.798 1.167-.854.111-1.716.155-2.577.132h-.018a8.6 8.6 0 0 1-5.046-1.87l-.012-.01-.012-.01A8.02 8.02 0 0 1 1.22 17.42a10.9 10.9 0 0 1-.102-3.779A15.6 15.6 0 0 1 2.88 8.4a21.8 21.8 0 0 1 2.432-3.678 15.4 15.4 0 0 1 3.56-3.182A10 10 0 0 1 12.44.104h.004l.003-.002c2.057-.384 3.743.374 5.024 1.26a8.3 8.3 0 0 1 2.395 2.513l.024.04.023.042a5.47 5.47 0 0 1 .508 4.012c-.239.97-.577 1.914-1.01 2.814z"/><path fill="#fff" d="M18.861 13.165a.748.748 0 0 1 1.256.031c.199.332.256.73.159 1.103l-.137.522a7.94 7.94 0 0 1-2.504 4.014c-.572.49-1.138.939-1.774 1.306-.427.247-.857.496-1.303.707a9.6 9.6 0 0 1-3.155.973 14.3 14.3 0 0 1-2.257.116 6.53 6.53 0 0 1-3.837-1.422 5.97 5.97 0 0 1-2.071-3.494 8.9 8.9 0 0 1-.085-3.08 13.6 13.6 0 0 1 1.54-4.568 19.7 19.7 0 0 1 2.212-3.348 13.4 13.4 0 0 1 3.088-2.76 7.9 7.9 0 0 1 2.832-1.14c1.307-.245 2.434.207 3.481.933a6.2 6.2 0 0 1 1.806 1.892c.423.767.536 1.668.314 2.515a12.4 12.4 0 0 1-.99 2.67l-.223.497q-.48 1.07-.97 2.137a.76.76 0 0 1-.97.467 3.39 3.39 0 0 1-2.283-2.49c-.095-.83.04-1.669.39-2.426.288-.746.61-1.477.933-2.208l.248-.563a.53.53 0 0 0-.204-.742 2.35 2.35 0 0 0-1.2.702 25 25 0 0 0-1.614 1.767 21.6 21.6 0 0 0-2.619 4.184 7.6 7.6 0 0 0-.816 2.753 7 7 0 0 0 .07 2.219 2.055 2.055 0 0 0 1.934 1.715c1.801.1 3.59-.363 5.116-1.328a19 19 0 0 0 1.675-1.294c.752-.71 1.376-1.519 1.958-2.36"/></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
1
frontend/public/Logos/litellm.svg
Normal file
1
frontend/public/Logos/litellm.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 19 KiB |
1
frontend/public/Logos/pydantic-ai.svg
Normal file
1
frontend/public/Logos/pydantic-ai.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>PydanticAI</title><path fill="#e72564" d="M13.223 22.86c-.605.83-1.844.83-2.448 0L5.74 15.944a1.514 1.514 0 0 1 .73-2.322l5.035-1.738c.32-.11.668-.11.988 0l5.035 1.738c.962.332 1.329 1.5.73 2.322zm-1.224-1.259 4.688-6.439-4.688-1.618-4.688 1.618L12 21.602z"/><path fill="#e723a0" d="M23.71 13.463c.604.832.221 2.01-.756 2.328l-8.133 2.652a1.514 1.514 0 0 1-1.983-1.412l-.097-5.326c-.006-.338.101-.67.305-.94l3.209-4.25a1.514 1.514 0 0 1 2.434.022l5.022 6.926zm-1.574.775L17.46 7.79l-2.988 3.958.09 4.959z"/><path fill="#e520e9" d="M18.016.591a1.514 1.514 0 0 1 1.98 1.44l.009 8.554a1.514 1.514 0 0 1-1.956 1.45l-5.095-1.554a1.5 1.5 0 0 1-.8-.58l-3.05-4.366a1.514 1.514 0 0 1 .774-2.308zm.25 1.738L10.69 4.783l2.841 4.065 4.744 1.446-.008-7.965z"/><path fill="#e520e9" d="M5.99.595a1.514 1.514 0 0 0-1.98 1.44L4 10.588a1.514 1.514 0 0 0 1.956 1.45l5.095-1.554c.323-.098.605-.303.799-.58l3.052-4.366a1.514 1.514 0 0 0-.775-2.308zm-.25 1.738 7.577 2.454-2.842 4.065-4.743 1.446.007-7.965z"/><path fill="#e723a0" d="M.29 13.461a1.514 1.514 0 0 0 .756 2.329l8.133 2.651a1.514 1.514 0 0 0 1.983-1.412l.097-5.325a1.5 1.5 0 0 0-.305-.94L7.745 6.513a1.514 1.514 0 0 0-2.434.023L.289 13.461zm1.574.776L6.54 7.788l2.988 3.959-.09 4.958z"/><path fill="#ff96d1" d="m16.942 17.751 1.316-1.806q.178-.248.245-.523l-2.63.858-1.627 2.235a1.5 1.5 0 0 0 .575-.072zm-4.196-5.78.033 1.842 1.742.602-.034-1.843-1.741-.6zm7.257-3.622-1.314-1.812a1.5 1.5 0 0 0-.419-.393l.003 2.767 1.624 2.24q.107-.261.108-.566zm-5.038 2.746-1.762-.537 1.11-1.471 1.762.537zm-2.961-1.41 1.056-1.51-1.056-1.51-1.056 1.51zM9.368 3.509c.145-.122.316-.219.51-.282l2.12-.686 2.13.69c.191.062.36.157.503.276l-2.634.853zm1.433 7.053L9.691 9.09l-1.762.537 1.11 1.47 1.762-.537zm-6.696.584L5.733 8.9l.003-2.763c-.16.1-.305.232-.425.398L4.003 8.339l-.002 2.25q.002.299.104.557m7.149.824-1.741.601-.034 1.843 1.742-.601zM9.75 18.513l-1.628-2.237-2.629-.857q.068.276.247.525l1.313 1.804 2.126.693c.192.062.385.085.571.072"/></svg>
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
50
frontend/scripts/update-registry.js
Normal file
50
frontend/scripts/update-registry.js
Normal file
@@ -0,0 +1,50 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires, import/no-dynamic-require, simple-import-sort/imports, simple-import-sort/exports */
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// 1. Define paths
|
||||
const packageJsonPath = path.resolve(__dirname, '../package.json');
|
||||
const registryPath = path.resolve(
|
||||
__dirname,
|
||||
'../src/auto-import-registry.d.ts',
|
||||
);
|
||||
|
||||
// 2. Read package.json
|
||||
const packageJson = require(packageJsonPath);
|
||||
|
||||
// 3. Combine dependencies and devDependencies
|
||||
const allDeps = {
|
||||
...packageJson.dependencies,
|
||||
...packageJson.devDependencies,
|
||||
};
|
||||
|
||||
// 4. Filter for @signozhq packages
|
||||
const signozPackages = Object.keys(allDeps).filter((dep) =>
|
||||
dep.startsWith('@signozhq/'),
|
||||
);
|
||||
|
||||
// 5. Generate file content
|
||||
const fileContent = `// -------------------------------------------------------------------------
|
||||
// AUTO-GENERATED FILE
|
||||
// -------------------------------------------------------------------------
|
||||
// This file is generated by scripts/update-registry.js automatically
|
||||
// whenever you run 'yarn install' or 'npm install'.
|
||||
//
|
||||
// It forces VS Code to index these specific packages to fix auto-import
|
||||
// performance issues in TypeScript 4.x.
|
||||
//
|
||||
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
${signozPackages.map((pkg) => `import '${pkg}';`).join('\n')}
|
||||
`;
|
||||
|
||||
// 6. Write the file
|
||||
try {
|
||||
fs.writeFileSync(registryPath, fileContent);
|
||||
console.log(
|
||||
`✅ Auto-import registry updated with ${signozPackages.length} @signozhq packages.`,
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('❌ Failed to update auto-import registry:', err);
|
||||
}
|
||||
23
frontend/src/auto-import-registry.d.ts
vendored
Normal file
23
frontend/src/auto-import-registry.d.ts
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
// -------------------------------------------------------------------------
|
||||
// AUTO-GENERATED FILE
|
||||
// -------------------------------------------------------------------------
|
||||
// This file is generated by scripts/update-registry.js automatically
|
||||
// whenever you run 'yarn install' or 'npm install'.
|
||||
//
|
||||
// It forces VS Code to index these specific packages to fix auto-import
|
||||
// performance issues in TypeScript 4.x.
|
||||
//
|
||||
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
import '@signozhq/badge';
|
||||
import '@signozhq/button';
|
||||
import '@signozhq/calendar';
|
||||
import '@signozhq/callout';
|
||||
import '@signozhq/design-tokens';
|
||||
import '@signozhq/input';
|
||||
import '@signozhq/popover';
|
||||
import '@signozhq/resizable';
|
||||
import '@signozhq/sonner';
|
||||
import '@signozhq/table';
|
||||
import '@signozhq/tooltip';
|
||||
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { getYAxisFormattedValue, PrecisionOptionsEnum } from '../yAxisConfig';
|
||||
import { PrecisionOptionsEnum } from '../types';
|
||||
import { getYAxisFormattedValue } from '../yAxisConfig';
|
||||
|
||||
const testFullPrecisionGetYAxisFormattedValue = (
|
||||
value: string,
|
||||
|
||||
@@ -78,3 +78,18 @@ export interface ITimeRange {
|
||||
minTime: number | null;
|
||||
maxTime: number | null;
|
||||
}
|
||||
|
||||
export const DEFAULT_SIGNIFICANT_DIGITS = 15;
|
||||
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
export const MAX_DECIMALS = 15;
|
||||
|
||||
export enum PrecisionOptionsEnum {
|
||||
ZERO = 0,
|
||||
ONE = 1,
|
||||
TWO = 2,
|
||||
THREE = 3,
|
||||
FOUR = 4,
|
||||
FULL = 'full',
|
||||
}
|
||||
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;
|
||||
|
||||
@@ -16,8 +16,12 @@ import {
|
||||
} from './Plugin/IntersectionCursor';
|
||||
import {
|
||||
CustomChartOptions,
|
||||
DEFAULT_SIGNIFICANT_DIGITS,
|
||||
GraphOnClickHandler,
|
||||
IAxisTimeConfig,
|
||||
MAX_DECIMALS,
|
||||
PrecisionOption,
|
||||
PrecisionOptionsEnum,
|
||||
StaticLineProps,
|
||||
} from './types';
|
||||
import { getToolTipValue, getYAxisFormattedValue } from './yAxisConfig';
|
||||
@@ -242,3 +246,68 @@ declare module 'chart.js' {
|
||||
custom: TooltipPositionerFunction<ChartType>;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a number for display, preserving leading zeros after the decimal point
|
||||
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
|
||||
* It avoids scientific notation and removes unnecessary trailing zeros.
|
||||
*
|
||||
* @example
|
||||
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
|
||||
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
|
||||
* formatDecimalWithLeadingZeros(5.0); // "5"
|
||||
*
|
||||
* @param value The number to format.
|
||||
* @returns The formatted string.
|
||||
*/
|
||||
export const formatDecimalWithLeadingZeros = (
|
||||
value: number,
|
||||
precision: PrecisionOption,
|
||||
): string => {
|
||||
if (value === 0) {
|
||||
return '0';
|
||||
}
|
||||
|
||||
// Use toLocaleString to get a full decimal representation without scientific notation.
|
||||
const numStr = value.toLocaleString('en-US', {
|
||||
useGrouping: false,
|
||||
maximumFractionDigits: 20,
|
||||
});
|
||||
|
||||
const [integerPart, decimalPart = ''] = numStr.split('.');
|
||||
|
||||
// If there's no decimal part, the integer part is the result.
|
||||
if (!decimalPart) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Find the index of the first non-zero digit in the decimal part.
|
||||
const firstNonZeroIndex = decimalPart.search(/[^0]/);
|
||||
|
||||
// If the decimal part consists only of zeros, return just the integer part.
|
||||
if (firstNonZeroIndex === -1) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
|
||||
const significantDigits =
|
||||
precision === PrecisionOptionsEnum.FULL
|
||||
? DEFAULT_SIGNIFICANT_DIGITS
|
||||
: precision;
|
||||
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
|
||||
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
|
||||
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
|
||||
|
||||
// If precision is 0, we drop the decimal part entirely.
|
||||
if (precision === 0) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Remove any trailing zeros from the result to keep it clean.
|
||||
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
|
||||
|
||||
// Return the integer part, or the integer and decimal parts combined.
|
||||
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
|
||||
};
|
||||
|
||||
@@ -1,86 +1,17 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { formattedValueToString, getValueFormat } from '@grafana/data';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
import { isUniversalUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { isNaN } from 'lodash-es';
|
||||
|
||||
const DEFAULT_SIGNIFICANT_DIGITS = 15;
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
const MAX_DECIMALS = 15;
|
||||
|
||||
export enum PrecisionOptionsEnum {
|
||||
ZERO = 0,
|
||||
ONE = 1,
|
||||
TWO = 2,
|
||||
THREE = 3,
|
||||
FOUR = 4,
|
||||
FULL = 'full',
|
||||
}
|
||||
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;
|
||||
|
||||
/**
|
||||
* Formats a number for display, preserving leading zeros after the decimal point
|
||||
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
|
||||
* It avoids scientific notation and removes unnecessary trailing zeros.
|
||||
*
|
||||
* @example
|
||||
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
|
||||
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
|
||||
* formatDecimalWithLeadingZeros(5.0); // "5"
|
||||
*
|
||||
* @param value The number to format.
|
||||
* @returns The formatted string.
|
||||
*/
|
||||
const formatDecimalWithLeadingZeros = (
|
||||
value: number,
|
||||
precision: PrecisionOption,
|
||||
): string => {
|
||||
if (value === 0) {
|
||||
return '0';
|
||||
}
|
||||
|
||||
// Use toLocaleString to get a full decimal representation without scientific notation.
|
||||
const numStr = value.toLocaleString('en-US', {
|
||||
useGrouping: false,
|
||||
maximumFractionDigits: 20,
|
||||
});
|
||||
|
||||
const [integerPart, decimalPart = ''] = numStr.split('.');
|
||||
|
||||
// If there's no decimal part, the integer part is the result.
|
||||
if (!decimalPart) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Find the index of the first non-zero digit in the decimal part.
|
||||
const firstNonZeroIndex = decimalPart.search(/[^0]/);
|
||||
|
||||
// If the decimal part consists only of zeros, return just the integer part.
|
||||
if (firstNonZeroIndex === -1) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
|
||||
const significantDigits =
|
||||
precision === PrecisionOptionsEnum.FULL
|
||||
? DEFAULT_SIGNIFICANT_DIGITS
|
||||
: precision;
|
||||
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
|
||||
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
|
||||
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
|
||||
|
||||
// If precision is 0, we drop the decimal part entirely.
|
||||
if (precision === 0) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Remove any trailing zeros from the result to keep it clean.
|
||||
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
|
||||
|
||||
// Return the integer part, or the integer and decimal parts combined.
|
||||
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
|
||||
};
|
||||
import { formatUniversalUnit } from '../YAxisUnitSelector/formatter';
|
||||
import {
|
||||
DEFAULT_SIGNIFICANT_DIGITS,
|
||||
PrecisionOption,
|
||||
PrecisionOptionsEnum,
|
||||
} from './types';
|
||||
import { formatDecimalWithLeadingZeros } from './utils';
|
||||
|
||||
/**
|
||||
* Formats a Y-axis value based on a given format string.
|
||||
@@ -126,6 +57,17 @@ export const getYAxisFormattedValue = (
|
||||
return formatDecimalWithLeadingZeros(numValue, precision);
|
||||
}
|
||||
|
||||
// Separate logic for universal units// Separate logic for universal units
|
||||
if (format && isUniversalUnit(format)) {
|
||||
const decimals = computeDecimals();
|
||||
return formatUniversalUnit(
|
||||
numValue,
|
||||
format as UniversalYAxisUnit,
|
||||
precision,
|
||||
decimals,
|
||||
);
|
||||
}
|
||||
|
||||
const formatter = getValueFormat(format);
|
||||
const formattedValue = formatter(numValue, computeDecimals(), undefined);
|
||||
if (formattedValue.text && formattedValue.text.includes('.')) {
|
||||
@@ -134,6 +76,7 @@ export const getYAxisFormattedValue = (
|
||||
precision,
|
||||
);
|
||||
}
|
||||
|
||||
return formattedValueToString(formattedValue);
|
||||
} catch (error) {
|
||||
Sentry.captureEvent({
|
||||
|
||||
@@ -3,9 +3,9 @@ import './styles.scss';
|
||||
import { Select } from 'antd';
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
|
||||
import { UniversalYAxisUnitMappings, Y_AXIS_CATEGORIES } from './constants';
|
||||
import { UniversalYAxisUnitMappings } from './constants';
|
||||
import { UniversalYAxisUnit, YAxisUnitSelectorProps } from './types';
|
||||
import { mapMetricUnitToUniversalUnit } from './utils';
|
||||
import { getYAxisCategories, mapMetricUnitToUniversalUnit } from './utils';
|
||||
|
||||
function YAxisUnitSelector({
|
||||
value,
|
||||
@@ -13,6 +13,7 @@ function YAxisUnitSelector({
|
||||
placeholder = 'Please select a unit',
|
||||
loading = false,
|
||||
'data-testid': dataTestId,
|
||||
source,
|
||||
}: YAxisUnitSelectorProps): JSX.Element {
|
||||
const universalUnit = mapMetricUnitToUniversalUnit(value);
|
||||
|
||||
@@ -37,6 +38,8 @@ function YAxisUnitSelector({
|
||||
return aliases.some((alias) => alias.toLowerCase().includes(search));
|
||||
};
|
||||
|
||||
const categories = getYAxisCategories(source);
|
||||
|
||||
return (
|
||||
<div className="y-axis-unit-selector-component">
|
||||
<Select
|
||||
@@ -48,7 +51,7 @@ function YAxisUnitSelector({
|
||||
loading={loading}
|
||||
data-testid={dataTestId}
|
||||
>
|
||||
{Y_AXIS_CATEGORIES.map((category) => (
|
||||
{categories.map((category) => (
|
||||
<Select.OptGroup key={category.name} label={category.name}>
|
||||
{category.units.map((unit) => (
|
||||
<Select.Option key={unit.id} value={unit.id}>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
|
||||
import { YAxisSource } from '../types';
|
||||
import YAxisUnitSelector from '../YAxisUnitSelector';
|
||||
|
||||
describe('YAxisUnitSelector', () => {
|
||||
@@ -10,7 +11,13 @@ describe('YAxisUnitSelector', () => {
|
||||
});
|
||||
|
||||
it('renders with default placeholder', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
expect(screen.getByText('Please select a unit')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
@@ -20,13 +27,20 @@ describe('YAxisUnitSelector', () => {
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
placeholder="Custom placeholder"
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
expect(screen.queryByText('Custom placeholder')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls onChange when a value is selected', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
@@ -41,18 +55,30 @@ describe('YAxisUnitSelector', () => {
|
||||
});
|
||||
|
||||
it('filters options based on search input', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
const input = screen.getByRole('combobox');
|
||||
fireEvent.change(input, { target: { value: 'byte' } });
|
||||
fireEvent.change(input, { target: { value: 'bytes/sec' } });
|
||||
|
||||
expect(screen.getByText('Bytes/sec')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows all categories and their units', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
|
||||
@@ -0,0 +1,951 @@
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
|
||||
import {
|
||||
AdditionalLabelsMappingForGrafanaUnits,
|
||||
UniversalUnitToGrafanaUnit,
|
||||
} from '../constants';
|
||||
import { formatUniversalUnit } from '../formatter';
|
||||
|
||||
describe('formatUniversalUnit', () => {
|
||||
describe('Time', () => {
|
||||
test.each([
|
||||
// Days
|
||||
[31, UniversalYAxisUnit.DAYS, '4.43 weeks'],
|
||||
[7, UniversalYAxisUnit.DAYS, '1 week'],
|
||||
[6, UniversalYAxisUnit.DAYS, '6 days'],
|
||||
[1, UniversalYAxisUnit.DAYS, '1 day'],
|
||||
// Hours
|
||||
[25, UniversalYAxisUnit.HOURS, '1.04 days'],
|
||||
[23, UniversalYAxisUnit.HOURS, '23 hour'],
|
||||
[1, UniversalYAxisUnit.HOURS, '1 hour'],
|
||||
// Minutes
|
||||
[61, UniversalYAxisUnit.MINUTES, '1.02 hours'],
|
||||
[60, UniversalYAxisUnit.MINUTES, '1 hour'],
|
||||
[45, UniversalYAxisUnit.MINUTES, '45 min'],
|
||||
[1, UniversalYAxisUnit.MINUTES, '1 min'],
|
||||
// Seconds
|
||||
[100000, UniversalYAxisUnit.SECONDS, '1.16 days'],
|
||||
[10065, UniversalYAxisUnit.SECONDS, '2.8 hours'],
|
||||
[61, UniversalYAxisUnit.SECONDS, '1.02 mins'],
|
||||
[60, UniversalYAxisUnit.SECONDS, '1 min'],
|
||||
[12, UniversalYAxisUnit.SECONDS, '12 s'],
|
||||
[1, UniversalYAxisUnit.SECONDS, '1 s'],
|
||||
// Milliseconds
|
||||
[1006, UniversalYAxisUnit.MILLISECONDS, '1.01 s'],
|
||||
[10000000, UniversalYAxisUnit.MILLISECONDS, '2.78 hours'],
|
||||
[100006, UniversalYAxisUnit.MICROSECONDS, '100 ms'],
|
||||
[1, UniversalYAxisUnit.MICROSECONDS, '1 µs'],
|
||||
[12, UniversalYAxisUnit.MICROSECONDS, '12 µs'],
|
||||
// Nanoseconds
|
||||
[10000000000, UniversalYAxisUnit.NANOSECONDS, '10 s'],
|
||||
[10000006, UniversalYAxisUnit.NANOSECONDS, '10 ms'],
|
||||
[1006, UniversalYAxisUnit.NANOSECONDS, '1.01 µs'],
|
||||
[1, UniversalYAxisUnit.NANOSECONDS, '1 ns'],
|
||||
[12, UniversalYAxisUnit.NANOSECONDS, '12 ns'],
|
||||
])('formats time value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data', () => {
|
||||
test.each([
|
||||
// Bytes
|
||||
[864, UniversalYAxisUnit.BYTES, '864 B'],
|
||||
[1000, UniversalYAxisUnit.BYTES, '1 kB'],
|
||||
[1020, UniversalYAxisUnit.BYTES, '1.02 kB'],
|
||||
// Kilobytes
|
||||
[512, UniversalYAxisUnit.KILOBYTES, '512 kB'],
|
||||
[1000, UniversalYAxisUnit.KILOBYTES, '1 MB'],
|
||||
[1023, UniversalYAxisUnit.KILOBYTES, '1.02 MB'],
|
||||
// Megabytes
|
||||
[777, UniversalYAxisUnit.MEGABYTES, '777 MB'],
|
||||
[1000, UniversalYAxisUnit.MEGABYTES, '1 GB'],
|
||||
[1023, UniversalYAxisUnit.MEGABYTES, '1.02 GB'],
|
||||
// Gigabytes
|
||||
[432, UniversalYAxisUnit.GIGABYTES, '432 GB'],
|
||||
[1000, UniversalYAxisUnit.GIGABYTES, '1 TB'],
|
||||
[1023, UniversalYAxisUnit.GIGABYTES, '1.02 TB'],
|
||||
// Terabytes
|
||||
[678, UniversalYAxisUnit.TERABYTES, '678 TB'],
|
||||
[1000, UniversalYAxisUnit.TERABYTES, '1 PB'],
|
||||
[1023, UniversalYAxisUnit.TERABYTES, '1.02 PB'],
|
||||
// Petabytes
|
||||
[845, UniversalYAxisUnit.PETABYTES, '845 PB'],
|
||||
[1000, UniversalYAxisUnit.PETABYTES, '1 EB'],
|
||||
[1023, UniversalYAxisUnit.PETABYTES, '1.02 EB'],
|
||||
// Exabytes
|
||||
[921, UniversalYAxisUnit.EXABYTES, '921 EB'],
|
||||
[1000, UniversalYAxisUnit.EXABYTES, '1 ZB'],
|
||||
[1023, UniversalYAxisUnit.EXABYTES, '1.02 ZB'],
|
||||
// Zettabytes
|
||||
[921, UniversalYAxisUnit.ZETTABYTES, '921 ZB'],
|
||||
[1000, UniversalYAxisUnit.ZETTABYTES, '1 YB'],
|
||||
[1023, UniversalYAxisUnit.ZETTABYTES, '1.02 YB'],
|
||||
// Yottabytes
|
||||
[921, UniversalYAxisUnit.YOTTABYTES, '921 YB'],
|
||||
[1000, UniversalYAxisUnit.YOTTABYTES, '1000 YB'],
|
||||
[1023, UniversalYAxisUnit.YOTTABYTES, '1023 YB'],
|
||||
])('formats data value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data rate', () => {
|
||||
test.each([
|
||||
// Bytes/second
|
||||
[864, UniversalYAxisUnit.BYTES_SECOND, '864 B/s'],
|
||||
[1000, UniversalYAxisUnit.BYTES_SECOND, '1 kB/s'],
|
||||
[1020, UniversalYAxisUnit.BYTES_SECOND, '1.02 kB/s'],
|
||||
// Kilobytes/second
|
||||
[512, UniversalYAxisUnit.KILOBYTES_SECOND, '512 kB/s'],
|
||||
[1000, UniversalYAxisUnit.KILOBYTES_SECOND, '1 MB/s'],
|
||||
[1023, UniversalYAxisUnit.KILOBYTES_SECOND, '1.02 MB/s'],
|
||||
// Megabytes/second
|
||||
[777, UniversalYAxisUnit.MEGABYTES_SECOND, '777 MB/s'],
|
||||
[1000, UniversalYAxisUnit.MEGABYTES_SECOND, '1 GB/s'],
|
||||
[1023, UniversalYAxisUnit.MEGABYTES_SECOND, '1.02 GB/s'],
|
||||
// Gigabytes/second
|
||||
[432, UniversalYAxisUnit.GIGABYTES_SECOND, '432 GB/s'],
|
||||
[1000, UniversalYAxisUnit.GIGABYTES_SECOND, '1 TB/s'],
|
||||
[1023, UniversalYAxisUnit.GIGABYTES_SECOND, '1.02 TB/s'],
|
||||
// Terabytes/second
|
||||
[678, UniversalYAxisUnit.TERABYTES_SECOND, '678 TB/s'],
|
||||
[1000, UniversalYAxisUnit.TERABYTES_SECOND, '1 PB/s'],
|
||||
[1023, UniversalYAxisUnit.TERABYTES_SECOND, '1.02 PB/s'],
|
||||
// Petabytes/second
|
||||
[845, UniversalYAxisUnit.PETABYTES_SECOND, '845 PB/s'],
|
||||
[1000, UniversalYAxisUnit.PETABYTES_SECOND, '1 EB/s'],
|
||||
[1023, UniversalYAxisUnit.PETABYTES_SECOND, '1.02 EB/s'],
|
||||
// Exabytes/second
|
||||
[921, UniversalYAxisUnit.EXABYTES_SECOND, '921 EB/s'],
|
||||
[1000, UniversalYAxisUnit.EXABYTES_SECOND, '1 ZB/s'],
|
||||
[1023, UniversalYAxisUnit.EXABYTES_SECOND, '1.02 ZB/s'],
|
||||
// Zettabytes/second
|
||||
[921, UniversalYAxisUnit.ZETTABYTES_SECOND, '921 ZB/s'],
|
||||
[1000, UniversalYAxisUnit.ZETTABYTES_SECOND, '1 YB/s'],
|
||||
[1023, UniversalYAxisUnit.ZETTABYTES_SECOND, '1.02 YB/s'],
|
||||
// Yottabytes/second
|
||||
[921, UniversalYAxisUnit.YOTTABYTES_SECOND, '921 YB/s'],
|
||||
[1000, UniversalYAxisUnit.YOTTABYTES_SECOND, '1000 YB/s'],
|
||||
[1023, UniversalYAxisUnit.YOTTABYTES_SECOND, '1023 YB/s'],
|
||||
])('formats data value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Bit', () => {
|
||||
test.each([
|
||||
// Bits
|
||||
[1, UniversalYAxisUnit.BITS, '1 b'],
|
||||
[250, UniversalYAxisUnit.BITS, '250 b'],
|
||||
[1000, UniversalYAxisUnit.BITS, '1 kb'],
|
||||
[1023, UniversalYAxisUnit.BITS, '1.02 kb'],
|
||||
// Kilobits
|
||||
[0.5, UniversalYAxisUnit.KILOBITS, '500 b'],
|
||||
[375, UniversalYAxisUnit.KILOBITS, '375 kb'],
|
||||
[1000, UniversalYAxisUnit.KILOBITS, '1 Mb'],
|
||||
[1023, UniversalYAxisUnit.KILOBITS, '1.02 Mb'],
|
||||
// Megabits
|
||||
[0.5, UniversalYAxisUnit.MEGABITS, '500 kb'],
|
||||
[640, UniversalYAxisUnit.MEGABITS, '640 Mb'],
|
||||
[1000, UniversalYAxisUnit.MEGABITS, '1 Gb'],
|
||||
[1023, UniversalYAxisUnit.MEGABITS, '1.02 Gb'],
|
||||
// Gigabits
|
||||
[0.5, UniversalYAxisUnit.GIGABITS, '500 Mb'],
|
||||
[875, UniversalYAxisUnit.GIGABITS, '875 Gb'],
|
||||
[1000, UniversalYAxisUnit.GIGABITS, '1 Tb'],
|
||||
[1023, UniversalYAxisUnit.GIGABITS, '1.02 Tb'],
|
||||
// Terabits
|
||||
[0.5, UniversalYAxisUnit.TERABITS, '500 Gb'],
|
||||
[430, UniversalYAxisUnit.TERABITS, '430 Tb'],
|
||||
[1000, UniversalYAxisUnit.TERABITS, '1 Pb'],
|
||||
[1023, UniversalYAxisUnit.TERABITS, '1.02 Pb'],
|
||||
// Petabits
|
||||
[0.5, UniversalYAxisUnit.PETABITS, '500 Tb'],
|
||||
[590, UniversalYAxisUnit.PETABITS, '590 Pb'],
|
||||
[1000, UniversalYAxisUnit.PETABITS, '1 Eb'],
|
||||
[1023, UniversalYAxisUnit.PETABITS, '1.02 Eb'],
|
||||
// Exabits
|
||||
[0.5, UniversalYAxisUnit.EXABITS, '500 Pb'],
|
||||
[715, UniversalYAxisUnit.EXABITS, '715 Eb'],
|
||||
[1000, UniversalYAxisUnit.EXABITS, '1 Zb'],
|
||||
[1023, UniversalYAxisUnit.EXABITS, '1.02 Zb'],
|
||||
// Zettabits
|
||||
[0.5, UniversalYAxisUnit.ZETTABITS, '500 Eb'],
|
||||
[840, UniversalYAxisUnit.ZETTABITS, '840 Zb'],
|
||||
[1000, UniversalYAxisUnit.ZETTABITS, '1 Yb'],
|
||||
[1023, UniversalYAxisUnit.ZETTABITS, '1.02 Yb'],
|
||||
// Yottabits
|
||||
[0.5, UniversalYAxisUnit.YOTTABITS, '500 Zb'],
|
||||
[965, UniversalYAxisUnit.YOTTABITS, '965 Yb'],
|
||||
[1000, UniversalYAxisUnit.YOTTABITS, '1000 Yb'],
|
||||
[1023, UniversalYAxisUnit.YOTTABITS, '1023 Yb'],
|
||||
])('formats bit value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Bit rate', () => {
|
||||
test.each([
|
||||
// Bits/second
|
||||
[512, UniversalYAxisUnit.BITS_SECOND, '512 b/s'],
|
||||
[1000, UniversalYAxisUnit.BITS_SECOND, '1 kb/s'],
|
||||
[1023, UniversalYAxisUnit.BITS_SECOND, '1.02 kb/s'],
|
||||
// Kilobits/second
|
||||
[0.5, UniversalYAxisUnit.KILOBITS_SECOND, '500 b/s'],
|
||||
[512, UniversalYAxisUnit.KILOBITS_SECOND, '512 kb/s'],
|
||||
[1000, UniversalYAxisUnit.KILOBITS_SECOND, '1 Mb/s'],
|
||||
[1023, UniversalYAxisUnit.KILOBITS_SECOND, '1.02 Mb/s'],
|
||||
// Megabits/second
|
||||
[0.5, UniversalYAxisUnit.MEGABITS_SECOND, '500 kb/s'],
|
||||
[512, UniversalYAxisUnit.MEGABITS_SECOND, '512 Mb/s'],
|
||||
[1000, UniversalYAxisUnit.MEGABITS_SECOND, '1 Gb/s'],
|
||||
[1023, UniversalYAxisUnit.MEGABITS_SECOND, '1.02 Gb/s'],
|
||||
// Gigabits/second
|
||||
[0.5, UniversalYAxisUnit.GIGABITS_SECOND, '500 Mb/s'],
|
||||
[512, UniversalYAxisUnit.GIGABITS_SECOND, '512 Gb/s'],
|
||||
[1000, UniversalYAxisUnit.GIGABITS_SECOND, '1 Tb/s'],
|
||||
[1023, UniversalYAxisUnit.GIGABITS_SECOND, '1.02 Tb/s'],
|
||||
// Terabits/second
|
||||
[0.5, UniversalYAxisUnit.TERABITS_SECOND, '500 Gb/s'],
|
||||
[512, UniversalYAxisUnit.TERABITS_SECOND, '512 Tb/s'],
|
||||
[1000, UniversalYAxisUnit.TERABITS_SECOND, '1 Pb/s'],
|
||||
[1023, UniversalYAxisUnit.TERABITS_SECOND, '1.02 Pb/s'],
|
||||
// Petabits/second
|
||||
[0.5, UniversalYAxisUnit.PETABITS_SECOND, '500 Tb/s'],
|
||||
[512, UniversalYAxisUnit.PETABITS_SECOND, '512 Pb/s'],
|
||||
[1000, UniversalYAxisUnit.PETABITS_SECOND, '1 Eb/s'],
|
||||
[1023, UniversalYAxisUnit.PETABITS_SECOND, '1.02 Eb/s'],
|
||||
// Exabits/second
|
||||
[512, UniversalYAxisUnit.EXABITS_SECOND, '512 Eb/s'],
|
||||
[1000, UniversalYAxisUnit.EXABITS_SECOND, '1 Zb/s'],
|
||||
[1023, UniversalYAxisUnit.EXABITS_SECOND, '1.02 Zb/s'],
|
||||
// Zettabits/second
|
||||
[0.5, UniversalYAxisUnit.ZETTABITS_SECOND, '500 Eb/s'],
|
||||
[512, UniversalYAxisUnit.ZETTABITS_SECOND, '512 Zb/s'],
|
||||
[1000, UniversalYAxisUnit.ZETTABITS_SECOND, '1 Yb/s'],
|
||||
[1023, UniversalYAxisUnit.ZETTABITS_SECOND, '1.02 Yb/s'],
|
||||
// Yottabits/second
|
||||
[0.5, UniversalYAxisUnit.YOTTABITS_SECOND, '500 Zb/s'],
|
||||
[512, UniversalYAxisUnit.YOTTABITS_SECOND, '512 Yb/s'],
|
||||
[1000, UniversalYAxisUnit.YOTTABITS_SECOND, '1000 Yb/s'],
|
||||
[1023, UniversalYAxisUnit.YOTTABITS_SECOND, '1023 Yb/s'],
|
||||
])('formats bit rate value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Count', () => {
|
||||
test.each([
|
||||
[100, UniversalYAxisUnit.COUNT, '100'],
|
||||
[875, UniversalYAxisUnit.COUNT, '875'],
|
||||
[1000, UniversalYAxisUnit.COUNT, '1 K'],
|
||||
[2500, UniversalYAxisUnit.COUNT, '2.5 K'],
|
||||
[10000, UniversalYAxisUnit.COUNT, '10 K'],
|
||||
[25000, UniversalYAxisUnit.COUNT, '25 K'],
|
||||
[100000, UniversalYAxisUnit.COUNT, '100 K'],
|
||||
[1000000, UniversalYAxisUnit.COUNT, '1 Mil'],
|
||||
[10000000, UniversalYAxisUnit.COUNT, '10 Mil'],
|
||||
[100000000, UniversalYAxisUnit.COUNT, '100 Mil'],
|
||||
[1000000000, UniversalYAxisUnit.COUNT, '1 Bil'],
|
||||
[10000000000, UniversalYAxisUnit.COUNT, '10 Bil'],
|
||||
[100000000000, UniversalYAxisUnit.COUNT, '100 Bil'],
|
||||
[1000000000000, UniversalYAxisUnit.COUNT, '1 Tri'],
|
||||
[10000000000000, UniversalYAxisUnit.COUNT, '10 Tri'],
|
||||
])('formats count value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
|
||||
test.each([
|
||||
[100, UniversalYAxisUnit.COUNT_SECOND, '100 c/s'],
|
||||
[875, UniversalYAxisUnit.COUNT_SECOND, '875 c/s'],
|
||||
[1000, UniversalYAxisUnit.COUNT_SECOND, '1K c/s'],
|
||||
[2500, UniversalYAxisUnit.COUNT_SECOND, '2.5K c/s'],
|
||||
[10000, UniversalYAxisUnit.COUNT_SECOND, '10K c/s'],
|
||||
[25000, UniversalYAxisUnit.COUNT_SECOND, '25K c/s'],
|
||||
])('formats count per time value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
|
||||
test.each([
|
||||
[100, UniversalYAxisUnit.COUNT_MINUTE, '100 c/m'],
|
||||
[875, UniversalYAxisUnit.COUNT_MINUTE, '875 c/m'],
|
||||
[1000, UniversalYAxisUnit.COUNT_MINUTE, '1K c/m'],
|
||||
[2500, UniversalYAxisUnit.COUNT_MINUTE, '2.5K c/m'],
|
||||
[10000, UniversalYAxisUnit.COUNT_MINUTE, '10K c/m'],
|
||||
[25000, UniversalYAxisUnit.COUNT_MINUTE, '25K c/m'],
|
||||
])('formats count per time value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Operations units', () => {
|
||||
test.each([
|
||||
[780, UniversalYAxisUnit.OPS_SECOND, '780 ops/s'],
|
||||
[1000, UniversalYAxisUnit.OPS_SECOND, '1K ops/s'],
|
||||
[520, UniversalYAxisUnit.OPS_MINUTE, '520 ops/m'],
|
||||
[1000, UniversalYAxisUnit.OPS_MINUTE, '1K ops/m'],
|
||||
[2500, UniversalYAxisUnit.OPS_MINUTE, '2.5K ops/m'],
|
||||
[10000, UniversalYAxisUnit.OPS_MINUTE, '10K ops/m'],
|
||||
[25000, UniversalYAxisUnit.OPS_MINUTE, '25K ops/m'],
|
||||
])(
|
||||
'formats operations per time value %s %s as %s',
|
||||
(value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('Request units', () => {
|
||||
test.each([
|
||||
[615, UniversalYAxisUnit.REQUESTS_SECOND, '615 req/s'],
|
||||
[1000, UniversalYAxisUnit.REQUESTS_SECOND, '1K req/s'],
|
||||
[480, UniversalYAxisUnit.REQUESTS_MINUTE, '480 req/m'],
|
||||
[1000, UniversalYAxisUnit.REQUESTS_MINUTE, '1K req/m'],
|
||||
[2500, UniversalYAxisUnit.REQUESTS_MINUTE, '2.5K req/m'],
|
||||
[10000, UniversalYAxisUnit.REQUESTS_MINUTE, '10K req/m'],
|
||||
[25000, UniversalYAxisUnit.REQUESTS_MINUTE, '25K req/m'],
|
||||
])('formats requests per time value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Read/Write units', () => {
|
||||
test.each([
|
||||
[505, UniversalYAxisUnit.READS_SECOND, '505 rd/s'],
|
||||
[1000, UniversalYAxisUnit.READS_SECOND, '1K rd/s'],
|
||||
[610, UniversalYAxisUnit.WRITES_SECOND, '610 wr/s'],
|
||||
[1000, UniversalYAxisUnit.WRITES_SECOND, '1K wr/s'],
|
||||
[715, UniversalYAxisUnit.READS_MINUTE, '715 rd/m'],
|
||||
[1000, UniversalYAxisUnit.READS_MINUTE, '1K rd/m'],
|
||||
[2500, UniversalYAxisUnit.READS_MINUTE, '2.5K rd/m'],
|
||||
[10000, UniversalYAxisUnit.READS_MINUTE, '10K rd/m'],
|
||||
[25000, UniversalYAxisUnit.READS_MINUTE, '25K rd/m'],
|
||||
[830, UniversalYAxisUnit.WRITES_MINUTE, '830 wr/m'],
|
||||
[1000, UniversalYAxisUnit.WRITES_MINUTE, '1K wr/m'],
|
||||
[2500, UniversalYAxisUnit.WRITES_MINUTE, '2.5K wr/m'],
|
||||
[10000, UniversalYAxisUnit.WRITES_MINUTE, '10K wr/m'],
|
||||
[25000, UniversalYAxisUnit.WRITES_MINUTE, '25K wr/m'],
|
||||
])(
|
||||
'formats reads and writes per time value %s %s as %s',
|
||||
(value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('IO Operations units', () => {
|
||||
test.each([
|
||||
[777, UniversalYAxisUnit.IOOPS_SECOND, '777 io/s'],
|
||||
[1000, UniversalYAxisUnit.IOOPS_SECOND, '1K io/s'],
|
||||
[2500, UniversalYAxisUnit.IOOPS_SECOND, '2.5K io/s'],
|
||||
[10000, UniversalYAxisUnit.IOOPS_SECOND, '10K io/s'],
|
||||
[25000, UniversalYAxisUnit.IOOPS_SECOND, '25K io/s'],
|
||||
])('formats IOPS value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Percent units', () => {
|
||||
it('formats percent as-is', () => {
|
||||
expect(formatUniversalUnit(456, UniversalYAxisUnit.PERCENT)).toBe('456%');
|
||||
});
|
||||
|
||||
it('multiplies percent_unit by 100', () => {
|
||||
expect(formatUniversalUnit(9, UniversalYAxisUnit.PERCENT_UNIT)).toBe('900%');
|
||||
});
|
||||
});
|
||||
|
||||
describe('None unit', () => {
|
||||
it('formats as plain number', () => {
|
||||
expect(formatUniversalUnit(742, UniversalYAxisUnit.NONE)).toBe('742');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Time (additional)', () => {
|
||||
test.each([
|
||||
[900, UniversalYAxisUnit.DURATION_MS, '900 milliseconds'],
|
||||
[1000, UniversalYAxisUnit.DURATION_MS, '1 second'],
|
||||
[1, UniversalYAxisUnit.DURATION_MS, '1 millisecond'],
|
||||
[900, UniversalYAxisUnit.DURATION_S, '15 minutes'],
|
||||
[1, UniversalYAxisUnit.DURATION_HMS, '00:00:01'],
|
||||
[90005, UniversalYAxisUnit.DURATION_HMS, '25:00:05'],
|
||||
[90005, UniversalYAxisUnit.DURATION_DHMS, '1 d 01:00:05'],
|
||||
[900, UniversalYAxisUnit.TIMETICKS, '9 s'],
|
||||
[1, UniversalYAxisUnit.TIMETICKS, '10 ms'],
|
||||
[900, UniversalYAxisUnit.CLOCK_MS, '900ms'],
|
||||
[1, UniversalYAxisUnit.CLOCK_MS, '001ms'],
|
||||
[1, UniversalYAxisUnit.CLOCK_S, '01s:000ms'],
|
||||
[900, UniversalYAxisUnit.CLOCK_S, '15m:00s:000ms'],
|
||||
[900, UniversalYAxisUnit.TIME_HERTZ, '900 Hz'],
|
||||
[1000, UniversalYAxisUnit.TIME_HERTZ, '1 kHz'],
|
||||
[1000000, UniversalYAxisUnit.TIME_HERTZ, '1 MHz'],
|
||||
[1000000000, UniversalYAxisUnit.TIME_HERTZ, '1 GHz'],
|
||||
[1008, UniversalYAxisUnit.TIME_HERTZ, '1.01 kHz'],
|
||||
])('formats duration value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data (IEC/Binary)', () => {
|
||||
test.each([
|
||||
// Bytes
|
||||
[900, UniversalYAxisUnit.BYTES_IEC, '900 B'],
|
||||
[1024, UniversalYAxisUnit.BYTES_IEC, '1 KiB'],
|
||||
[1080, UniversalYAxisUnit.BYTES_IEC, '1.05 KiB'],
|
||||
// Kibibytes
|
||||
[900, UniversalYAxisUnit.KIBIBYTES, '900 KiB'],
|
||||
[1024, UniversalYAxisUnit.KIBIBYTES, '1 MiB'],
|
||||
[1080, UniversalYAxisUnit.KIBIBYTES, '1.05 MiB'],
|
||||
// Mebibytes
|
||||
[900, UniversalYAxisUnit.MEBIBYTES, '900 MiB'],
|
||||
[1024, UniversalYAxisUnit.MEBIBYTES, '1 GiB'],
|
||||
[1080, UniversalYAxisUnit.MEBIBYTES, '1.05 GiB'],
|
||||
// Gibibytes
|
||||
[900, UniversalYAxisUnit.GIBIBYTES, '900 GiB'],
|
||||
[1024, UniversalYAxisUnit.GIBIBYTES, '1 TiB'],
|
||||
[1080, UniversalYAxisUnit.GIBIBYTES, '1.05 TiB'],
|
||||
// Tebibytes
|
||||
[900, UniversalYAxisUnit.TEBIBYTES, '900 TiB'],
|
||||
[1024, UniversalYAxisUnit.TEBIBYTES, '1 PiB'],
|
||||
[1080, UniversalYAxisUnit.TEBIBYTES, '1.05 PiB'],
|
||||
// Pebibytes
|
||||
[900, UniversalYAxisUnit.PEBIBYTES, '900 PiB'],
|
||||
[1024, UniversalYAxisUnit.PEBIBYTES, '1 EiB'],
|
||||
[1080, UniversalYAxisUnit.PEBIBYTES, '1.05 EiB'],
|
||||
// Exbibytes
|
||||
[900, UniversalYAxisUnit.EXBIBYTES, '900 EiB'],
|
||||
[1024, UniversalYAxisUnit.EXBIBYTES, '1 ZiB'],
|
||||
[1080, UniversalYAxisUnit.EXBIBYTES, '1.05 ZiB'],
|
||||
// Zebibytes
|
||||
[900, UniversalYAxisUnit.ZEBIBYTES, '900 ZiB'],
|
||||
[1024, UniversalYAxisUnit.ZEBIBYTES, '1 YiB'],
|
||||
[1080, UniversalYAxisUnit.ZEBIBYTES, '1.05 YiB'],
|
||||
// Yobibytes
|
||||
[900, UniversalYAxisUnit.YOBIBYTES, '900 YiB'],
|
||||
[1024, UniversalYAxisUnit.YOBIBYTES, '1024 YiB'],
|
||||
])('formats IEC bytes value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data Rate (IEC/Binary)', () => {
|
||||
test.each([
|
||||
// Kibibytes/second
|
||||
[900, UniversalYAxisUnit.KIBIBYTES_SECOND, '900 KiB/s'],
|
||||
[1024, UniversalYAxisUnit.KIBIBYTES_SECOND, '1 MiB/s'],
|
||||
[1080, UniversalYAxisUnit.KIBIBYTES_SECOND, '1.05 MiB/s'],
|
||||
// Mebibytes/second
|
||||
[900, UniversalYAxisUnit.MEBIBYTES_SECOND, '900 MiB/s'],
|
||||
[1024, UniversalYAxisUnit.MEBIBYTES_SECOND, '1 GiB/s'],
|
||||
[1080, UniversalYAxisUnit.MEBIBYTES_SECOND, '1.05 GiB/s'],
|
||||
// Gibibytes/second
|
||||
[900, UniversalYAxisUnit.GIBIBYTES_SECOND, '900 GiB/s'],
|
||||
[1024, UniversalYAxisUnit.GIBIBYTES_SECOND, '1 TiB/s'],
|
||||
[1080, UniversalYAxisUnit.GIBIBYTES_SECOND, '1.05 TiB/s'],
|
||||
// Tebibytes/second
|
||||
[900, UniversalYAxisUnit.TEBIBYTES_SECOND, '900 TiB/s'],
|
||||
[1024, UniversalYAxisUnit.TEBIBYTES_SECOND, '1 PiB/s'],
|
||||
[1080, UniversalYAxisUnit.TEBIBYTES_SECOND, '1.05 PiB/s'],
|
||||
// Pebibytes/second
|
||||
[900, UniversalYAxisUnit.PEBIBYTES_SECOND, '900 PiB/s'],
|
||||
[1024, UniversalYAxisUnit.PEBIBYTES_SECOND, '1 EiB/s'],
|
||||
[1080, UniversalYAxisUnit.PEBIBYTES_SECOND, '1.05 EiB/s'],
|
||||
// Exbibytes/second
|
||||
[900, UniversalYAxisUnit.EXBIBYTES_SECOND, '900 EiB/s'],
|
||||
[1024, UniversalYAxisUnit.EXBIBYTES_SECOND, '1 ZiB/s'],
|
||||
[1080, UniversalYAxisUnit.EXBIBYTES_SECOND, '1.05 ZiB/s'],
|
||||
// Zebibytes/second
|
||||
[900, UniversalYAxisUnit.ZEBIBYTES_SECOND, '900 ZiB/s'],
|
||||
[1024, UniversalYAxisUnit.ZEBIBYTES_SECOND, '1 YiB/s'],
|
||||
[1080, UniversalYAxisUnit.ZEBIBYTES_SECOND, '1.05 YiB/s'],
|
||||
// Yobibytes/second
|
||||
[900, UniversalYAxisUnit.YOBIBYTES_SECOND, '900 YiB/s'],
|
||||
[1024, UniversalYAxisUnit.YOBIBYTES_SECOND, '1024 YiB/s'],
|
||||
[1080, UniversalYAxisUnit.YOBIBYTES_SECOND, '1080 YiB/s'],
|
||||
// Packets/second
|
||||
[900, UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND, '900 p/s'],
|
||||
[1000, UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND, '1 kp/s'],
|
||||
[1080, UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND, '1.08 kp/s'],
|
||||
])('formats IEC byte rates value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Bits (IEC)', () => {
|
||||
test.each([
|
||||
[900, UniversalYAxisUnit.BITS_IEC, '900 b'],
|
||||
[1024, UniversalYAxisUnit.BITS_IEC, '1 Kib'],
|
||||
[1080, UniversalYAxisUnit.BITS_IEC, '1.05 Kib'],
|
||||
])('formats IEC bits value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Hash Rate', () => {
|
||||
test.each([
|
||||
// Hashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '412 H/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '1 kH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '1.02 kH/s'],
|
||||
// Kilohashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_KILOHASHES_PER_SECOND, '412 kH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_KILOHASHES_PER_SECOND, '1 MH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_KILOHASHES_PER_SECOND, '1.02 MH/s'],
|
||||
// Megahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_MEGAHASHES_PER_SECOND, '412 MH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_MEGAHASHES_PER_SECOND, '1 GH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_MEGAHASHES_PER_SECOND, '1.02 GH/s'],
|
||||
// Gigahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_GIGAHASHES_PER_SECOND, '412 GH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_GIGAHASHES_PER_SECOND, '1 TH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_GIGAHASHES_PER_SECOND, '1.02 TH/s'],
|
||||
// Terahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_TERAHASHES_PER_SECOND, '412 TH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_TERAHASHES_PER_SECOND, '1 PH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_TERAHASHES_PER_SECOND, '1.02 PH/s'],
|
||||
// Petahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_PETAHASHES_PER_SECOND, '412 PH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_PETAHASHES_PER_SECOND, '1 EH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_PETAHASHES_PER_SECOND, '1.02 EH/s'],
|
||||
// Exahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_EXAHASHES_PER_SECOND, '412 EH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_EXAHASHES_PER_SECOND, '1 ZH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_EXAHASHES_PER_SECOND, '1.02 ZH/s'],
|
||||
])('formats hash rate value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Miscellaneous', () => {
|
||||
test.each([
|
||||
[742, UniversalYAxisUnit.MISC_STRING, '742'],
|
||||
[688, UniversalYAxisUnit.MISC_SHORT, '688'],
|
||||
[555, UniversalYAxisUnit.MISC_HUMIDITY, '555 %H'],
|
||||
[812, UniversalYAxisUnit.MISC_DECIBEL, '812 dB'],
|
||||
[1024, UniversalYAxisUnit.MISC_HEXADECIMAL, '400'],
|
||||
[1024, UniversalYAxisUnit.MISC_HEXADECIMAL_0X, '0x400'],
|
||||
[900, UniversalYAxisUnit.MISC_SCIENTIFIC_NOTATION, '9e+2'],
|
||||
[678, UniversalYAxisUnit.MISC_LOCALE_FORMAT, '678'],
|
||||
[444, UniversalYAxisUnit.MISC_PIXELS, '444 px'],
|
||||
])('formats miscellaneous value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Acceleration', () => {
|
||||
test.each([
|
||||
[
|
||||
875,
|
||||
UniversalYAxisUnit.ACCELERATION_METERS_PER_SECOND_SQUARED,
|
||||
'875 m/sec²',
|
||||
],
|
||||
[640, UniversalYAxisUnit.ACCELERATION_FEET_PER_SECOND_SQUARED, '640 f/sec²'],
|
||||
[512, UniversalYAxisUnit.ACCELERATION_G_UNIT, '512 g'],
|
||||
[
|
||||
2500,
|
||||
UniversalYAxisUnit.ACCELERATION_METERS_PER_SECOND_SQUARED,
|
||||
'2500 m/sec²',
|
||||
],
|
||||
])('formats acceleration value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Angular', () => {
|
||||
test.each([
|
||||
[415, UniversalYAxisUnit.ANGULAR_DEGREE, '415 °'],
|
||||
[732, UniversalYAxisUnit.ANGULAR_RADIAN, '732 rad'],
|
||||
[128, UniversalYAxisUnit.ANGULAR_GRADIAN, '128 grad'],
|
||||
[560, UniversalYAxisUnit.ANGULAR_ARC_MINUTE, '560 arcmin'],
|
||||
[945, UniversalYAxisUnit.ANGULAR_ARC_SECOND, '945 arcsec'],
|
||||
])('formats angular value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Area', () => {
|
||||
test.each([
|
||||
[210, UniversalYAxisUnit.AREA_SQUARE_METERS, '210 m²'],
|
||||
[152, UniversalYAxisUnit.AREA_SQUARE_FEET, '152 ft²'],
|
||||
[64, UniversalYAxisUnit.AREA_SQUARE_MILES, '64 mi²'],
|
||||
])('formats area value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FLOPs', () => {
|
||||
test.each([
|
||||
// FLOPS
|
||||
[150, UniversalYAxisUnit.FLOPS_FLOPS, '150 FLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_FLOPS, '1 kFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_FLOPS, '1.08 kFLOPS'],
|
||||
// MFLOPS
|
||||
[275, UniversalYAxisUnit.FLOPS_MFLOPS, '275 MFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_MFLOPS, '1 GFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_MFLOPS, '1.08 GFLOPS'],
|
||||
// GFLOPS
|
||||
[640, UniversalYAxisUnit.FLOPS_GFLOPS, '640 GFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_GFLOPS, '1 TFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_GFLOPS, '1.08 TFLOPS'],
|
||||
// TFLOPS
|
||||
[875, UniversalYAxisUnit.FLOPS_TFLOPS, '875 TFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_TFLOPS, '1 PFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_TFLOPS, '1.08 PFLOPS'],
|
||||
// PFLOPS
|
||||
[430, UniversalYAxisUnit.FLOPS_PFLOPS, '430 PFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_PFLOPS, '1 EFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_PFLOPS, '1.08 EFLOPS'],
|
||||
// EFLOPS
|
||||
[590, UniversalYAxisUnit.FLOPS_EFLOPS, '590 EFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_EFLOPS, '1 ZFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_EFLOPS, '1.08 ZFLOPS'],
|
||||
// ZFLOPS
|
||||
[715, UniversalYAxisUnit.FLOPS_ZFLOPS, '715 ZFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_ZFLOPS, '1 YFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_ZFLOPS, '1.08 YFLOPS'],
|
||||
// YFLOPS
|
||||
[840, UniversalYAxisUnit.FLOPS_YFLOPS, '840 YFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_YFLOPS, '1000 YFLOPS'],
|
||||
])('formats FLOPs value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Concentration', () => {
|
||||
test.each([
|
||||
[415, UniversalYAxisUnit.CONCENTRATION_PPM, '415 ppm'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_PPM, '1000 ppm'],
|
||||
[732, UniversalYAxisUnit.CONCENTRATION_PPB, '732 ppb'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_PPB, '1000 ppb'],
|
||||
[128, UniversalYAxisUnit.CONCENTRATION_NG_M3, '128 ng/m³'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_NG_M3, '1000 ng/m³'],
|
||||
[560, UniversalYAxisUnit.CONCENTRATION_NG_NORMAL_CUBIC_METER, '560 ng/Nm³'],
|
||||
[
|
||||
1000,
|
||||
UniversalYAxisUnit.CONCENTRATION_NG_NORMAL_CUBIC_METER,
|
||||
'1000 ng/Nm³',
|
||||
],
|
||||
[945, UniversalYAxisUnit.CONCENTRATION_UG_M3, '945 μg/m³'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_UG_M3, '1000 μg/m³'],
|
||||
[210, UniversalYAxisUnit.CONCENTRATION_UG_NORMAL_CUBIC_METER, '210 μg/Nm³'],
|
||||
[
|
||||
1000,
|
||||
UniversalYAxisUnit.CONCENTRATION_UG_NORMAL_CUBIC_METER,
|
||||
'1000 μg/Nm³',
|
||||
],
|
||||
[152, UniversalYAxisUnit.CONCENTRATION_MG_M3, '152 mg/m³'],
|
||||
[64, UniversalYAxisUnit.CONCENTRATION_MG_NORMAL_CUBIC_METER, '64 mg/Nm³'],
|
||||
[508, UniversalYAxisUnit.CONCENTRATION_G_M3, '508 g/m³'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_G_M3, '1000 g/m³'],
|
||||
[377, UniversalYAxisUnit.CONCENTRATION_G_NORMAL_CUBIC_METER, '377 g/Nm³'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_G_NORMAL_CUBIC_METER, '1000 g/Nm³'],
|
||||
[286, UniversalYAxisUnit.CONCENTRATION_MG_PER_DL, '286 mg/dL'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_MG_PER_DL, '1000 mg/dL'],
|
||||
[675, UniversalYAxisUnit.CONCENTRATION_MMOL_PER_L, '675 mmol/L'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_MMOL_PER_L, '1000 mmol/L'],
|
||||
])('formats concentration value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Currency', () => {
|
||||
test.each([
|
||||
[812, UniversalYAxisUnit.CURRENCY_USD, '$812'],
|
||||
[645, UniversalYAxisUnit.CURRENCY_GBP, '£645'],
|
||||
[731, UniversalYAxisUnit.CURRENCY_EUR, '€731'],
|
||||
[508, UniversalYAxisUnit.CURRENCY_JPY, '¥508'],
|
||||
[963, UniversalYAxisUnit.CURRENCY_RUB, '₽963'],
|
||||
[447, UniversalYAxisUnit.CURRENCY_UAH, '₴447'],
|
||||
[592, UniversalYAxisUnit.CURRENCY_BRL, 'R$592'],
|
||||
[375, UniversalYAxisUnit.CURRENCY_DKK, '375kr'],
|
||||
[418, UniversalYAxisUnit.CURRENCY_ISK, '418kr'],
|
||||
[536, UniversalYAxisUnit.CURRENCY_NOK, '536kr'],
|
||||
[689, UniversalYAxisUnit.CURRENCY_SEK, '689kr'],
|
||||
[724, UniversalYAxisUnit.CURRENCY_CZK, 'czk724'],
|
||||
[381, UniversalYAxisUnit.CURRENCY_CHF, 'CHF381'],
|
||||
[267, UniversalYAxisUnit.CURRENCY_PLN, 'PLN267'],
|
||||
[154, UniversalYAxisUnit.CURRENCY_BTC, '฿154'],
|
||||
[999, UniversalYAxisUnit.CURRENCY_MBTC, 'mBTC999'],
|
||||
[423, UniversalYAxisUnit.CURRENCY_UBTC, 'μBTC423'],
|
||||
[611, UniversalYAxisUnit.CURRENCY_ZAR, 'R611'],
|
||||
[782, UniversalYAxisUnit.CURRENCY_INR, '₹782'],
|
||||
[834, UniversalYAxisUnit.CURRENCY_KRW, '₩834'],
|
||||
[455, UniversalYAxisUnit.CURRENCY_IDR, 'Rp455'],
|
||||
[978, UniversalYAxisUnit.CURRENCY_PHP, 'PHP978'],
|
||||
[366, UniversalYAxisUnit.CURRENCY_VND, '366đ'],
|
||||
])('formats currency value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Datetime', () => {
|
||||
it('formats datetime units', () => {
|
||||
expect(formatUniversalUnit(900, UniversalYAxisUnit.DATETIME_FROM_NOW)).toBe(
|
||||
'56 years ago',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Power/Electrical', () => {
|
||||
test.each([
|
||||
[715, UniversalYAxisUnit.POWER_WATT, '715 W'],
|
||||
[1000, UniversalYAxisUnit.POWER_WATT, '1 kW'],
|
||||
[1080, UniversalYAxisUnit.POWER_WATT, '1.08 kW'],
|
||||
[438, UniversalYAxisUnit.POWER_KILOWATT, '438 kW'],
|
||||
[1000, UniversalYAxisUnit.POWER_KILOWATT, '1 MW'],
|
||||
[1080, UniversalYAxisUnit.POWER_KILOWATT, '1.08 MW'],
|
||||
[582, UniversalYAxisUnit.POWER_MEGAWATT, '582 MW'],
|
||||
[1000, UniversalYAxisUnit.POWER_MEGAWATT, '1 GW'],
|
||||
[1080, UniversalYAxisUnit.POWER_MEGAWATT, '1.08 GW'],
|
||||
[267, UniversalYAxisUnit.POWER_GIGAWATT, '267 GW'],
|
||||
[853, UniversalYAxisUnit.POWER_MILLIWATT, '853 mW'],
|
||||
[693, UniversalYAxisUnit.POWER_WATT_PER_SQUARE_METER, '693 W/m²'],
|
||||
[544, UniversalYAxisUnit.POWER_VOLT_AMPERE, '544 VA'],
|
||||
[812, UniversalYAxisUnit.POWER_KILOVOLT_AMPERE, '812 kVA'],
|
||||
[478, UniversalYAxisUnit.POWER_VOLT_AMPERE_REACTIVE, '478 VAr'],
|
||||
[365, UniversalYAxisUnit.POWER_KILOVOLT_AMPERE_REACTIVE, '365 kVAr'],
|
||||
[629, UniversalYAxisUnit.POWER_WATT_HOUR, '629 Wh'],
|
||||
[471, UniversalYAxisUnit.POWER_WATT_HOUR_PER_KG, '471 Wh/kg'],
|
||||
[557, UniversalYAxisUnit.POWER_KILOWATT_HOUR, '557 kWh'],
|
||||
[389, UniversalYAxisUnit.POWER_KILOWATT_MINUTE, '389 kW-Min'],
|
||||
[642, UniversalYAxisUnit.POWER_AMPERE_HOUR, '642 Ah'],
|
||||
[731, UniversalYAxisUnit.POWER_KILOAMPERE_HOUR, '731 kAh'],
|
||||
[815, UniversalYAxisUnit.POWER_MILLIAMPERE_HOUR, '815 mAh'],
|
||||
[963, UniversalYAxisUnit.POWER_JOULE, '963 J'],
|
||||
[506, UniversalYAxisUnit.POWER_ELECTRON_VOLT, '506 eV'],
|
||||
[298, UniversalYAxisUnit.POWER_AMPERE, '298 A'],
|
||||
[654, UniversalYAxisUnit.POWER_KILOAMPERE, '654 kA'],
|
||||
[187, UniversalYAxisUnit.POWER_MILLIAMPERE, '187 mA'],
|
||||
[472, UniversalYAxisUnit.POWER_VOLT, '472 V'],
|
||||
[538, UniversalYAxisUnit.POWER_KILOVOLT, '538 kV'],
|
||||
[226, UniversalYAxisUnit.POWER_MILLIVOLT, '226 mV'],
|
||||
[592, UniversalYAxisUnit.POWER_DECIBEL_MILLIWATT, '592 dBm'],
|
||||
[333, UniversalYAxisUnit.POWER_OHM, '333 Ω'],
|
||||
[447, UniversalYAxisUnit.POWER_KILOOHM, '447 kΩ'],
|
||||
[781, UniversalYAxisUnit.POWER_MEGAOHM, '781 MΩ'],
|
||||
[650, UniversalYAxisUnit.POWER_FARAD, '650 F'],
|
||||
[512, UniversalYAxisUnit.POWER_MICROFARAD, '512 µF'],
|
||||
[478, UniversalYAxisUnit.POWER_NANOFARAD, '478 nF'],
|
||||
[341, UniversalYAxisUnit.POWER_PICOFARAD, '341 pF'],
|
||||
[129, UniversalYAxisUnit.POWER_FEMTOFARAD, '129 fF'],
|
||||
[904, UniversalYAxisUnit.POWER_HENRY, '904 H'],
|
||||
[1000, UniversalYAxisUnit.POWER_HENRY, '1 kH'],
|
||||
[275, UniversalYAxisUnit.POWER_MILLIHENRY, '275 mH'],
|
||||
[618, UniversalYAxisUnit.POWER_MICROHENRY, '618 µH'],
|
||||
[1000, UniversalYAxisUnit.POWER_MICROHENRY, '1 mH'],
|
||||
[1080, UniversalYAxisUnit.POWER_MICROHENRY, '1.08 mH'],
|
||||
[459, UniversalYAxisUnit.POWER_LUMENS, '459 Lm'],
|
||||
[1000, UniversalYAxisUnit.POWER_LUMENS, '1 kLm'],
|
||||
[1080, UniversalYAxisUnit.POWER_LUMENS, '1.08 kLm'],
|
||||
])('formats power value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Flow', () => {
|
||||
test.each([
|
||||
[512, UniversalYAxisUnit.FLOW_GALLONS_PER_MINUTE, '512 gpm'],
|
||||
[1000, UniversalYAxisUnit.FLOW_GALLONS_PER_MINUTE, '1000 gpm'],
|
||||
[678, UniversalYAxisUnit.FLOW_CUBIC_METERS_PER_SECOND, '678 cms'],
|
||||
[1000, UniversalYAxisUnit.FLOW_CUBIC_METERS_PER_SECOND, '1000 cms'],
|
||||
[245, UniversalYAxisUnit.FLOW_CUBIC_FEET_PER_SECOND, '245 cfs'],
|
||||
[389, UniversalYAxisUnit.FLOW_CUBIC_FEET_PER_MINUTE, '389 cfm'],
|
||||
[1000, UniversalYAxisUnit.FLOW_CUBIC_FEET_PER_MINUTE, '1000 cfm'],
|
||||
[731, UniversalYAxisUnit.FLOW_LITERS_PER_HOUR, '731 L/h'],
|
||||
[1000, UniversalYAxisUnit.FLOW_LITERS_PER_HOUR, '1000 L/h'],
|
||||
[864, UniversalYAxisUnit.FLOW_LITERS_PER_MINUTE, '864 L/min'],
|
||||
[1000, UniversalYAxisUnit.FLOW_LITERS_PER_MINUTE, '1000 L/min'],
|
||||
[150, UniversalYAxisUnit.FLOW_MILLILITERS_PER_MINUTE, '150 mL/min'],
|
||||
[1000, UniversalYAxisUnit.FLOW_MILLILITERS_PER_MINUTE, '1000 mL/min'],
|
||||
[947, UniversalYAxisUnit.FLOW_LUX, '947 lux'],
|
||||
[1000, UniversalYAxisUnit.FLOW_LUX, '1000 lux'],
|
||||
])('formats flow value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Force', () => {
|
||||
test.each([
|
||||
[845, UniversalYAxisUnit.FORCE_NEWTON_METERS, '845 Nm'],
|
||||
[1000, UniversalYAxisUnit.FORCE_NEWTON_METERS, '1 kNm'],
|
||||
[1080, UniversalYAxisUnit.FORCE_NEWTON_METERS, '1.08 kNm'],
|
||||
[268, UniversalYAxisUnit.FORCE_KILONEWTON_METERS, '268 kNm'],
|
||||
[1000, UniversalYAxisUnit.FORCE_KILONEWTON_METERS, '1 MNm'],
|
||||
[1080, UniversalYAxisUnit.FORCE_KILONEWTON_METERS, '1.08 MNm'],
|
||||
[593, UniversalYAxisUnit.FORCE_NEWTONS, '593 N'],
|
||||
[1000, UniversalYAxisUnit.FORCE_KILONEWTONS, '1 MN'],
|
||||
[1080, UniversalYAxisUnit.FORCE_KILONEWTONS, '1.08 MN'],
|
||||
])('formats force value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mass', () => {
|
||||
test.each([
|
||||
[120, UniversalYAxisUnit.MASS_MILLIGRAM, '120 mg'],
|
||||
[120000, UniversalYAxisUnit.MASS_MILLIGRAM, '120 g'],
|
||||
[987, UniversalYAxisUnit.MASS_GRAM, '987 g'],
|
||||
[1020, UniversalYAxisUnit.MASS_GRAM, '1.02 kg'],
|
||||
[456, UniversalYAxisUnit.MASS_POUND, '456 lb'],
|
||||
[321, UniversalYAxisUnit.MASS_KILOGRAM, '321 kg'],
|
||||
[654, UniversalYAxisUnit.MASS_METRIC_TON, '654 t'],
|
||||
])('formats mass value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Length', () => {
|
||||
test.each([
|
||||
[88, UniversalYAxisUnit.LENGTH_MILLIMETER, '88 mm'],
|
||||
[100, UniversalYAxisUnit.LENGTH_MILLIMETER, '100 mm'],
|
||||
[1000, UniversalYAxisUnit.LENGTH_MILLIMETER, '1 m'],
|
||||
[177, UniversalYAxisUnit.LENGTH_INCH, '177 in'],
|
||||
[266, UniversalYAxisUnit.LENGTH_FOOT, '266 ft'],
|
||||
[355, UniversalYAxisUnit.LENGTH_METER, '355 m'],
|
||||
[355000, UniversalYAxisUnit.LENGTH_METER, '355 km'],
|
||||
[444, UniversalYAxisUnit.LENGTH_KILOMETER, '444 km'],
|
||||
[533, UniversalYAxisUnit.LENGTH_MILE, '533 mi'],
|
||||
])('formats length value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pressure', () => {
|
||||
test.each([
|
||||
[45, UniversalYAxisUnit.PRESSURE_MILLIBAR, '45 mbar'],
|
||||
[1013, UniversalYAxisUnit.PRESSURE_MILLIBAR, '1.01 bar'],
|
||||
[27, UniversalYAxisUnit.PRESSURE_BAR, '27 bar'],
|
||||
[62, UniversalYAxisUnit.PRESSURE_KILOBAR, '62 kbar'],
|
||||
[845, UniversalYAxisUnit.PRESSURE_PASCAL, '845 Pa'],
|
||||
[540, UniversalYAxisUnit.PRESSURE_HECTOPASCAL, '540 hPa'],
|
||||
[378, UniversalYAxisUnit.PRESSURE_KILOPASCAL, '378 kPa'],
|
||||
[29, UniversalYAxisUnit.PRESSURE_INCHES_HG, '29 "Hg'],
|
||||
[65, UniversalYAxisUnit.PRESSURE_PSI, '65psi'],
|
||||
])('formats pressure value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Radiation', () => {
|
||||
test.each([
|
||||
[452, UniversalYAxisUnit.RADIATION_BECQUEREL, '452 Bq'],
|
||||
[37, UniversalYAxisUnit.RADIATION_CURIE, '37 Ci'],
|
||||
[128, UniversalYAxisUnit.RADIATION_GRAY, '128 Gy'],
|
||||
[512, UniversalYAxisUnit.RADIATION_RAD, '512 rad'],
|
||||
[256, UniversalYAxisUnit.RADIATION_SIEVERT, '256 Sv'],
|
||||
[640, UniversalYAxisUnit.RADIATION_MILLISIEVERT, '640 mSv'],
|
||||
[875, UniversalYAxisUnit.RADIATION_MICROSIEVERT, '875 µSv'],
|
||||
[875000, UniversalYAxisUnit.RADIATION_MICROSIEVERT, '875 mSv'],
|
||||
[92, UniversalYAxisUnit.RADIATION_REM, '92 rem'],
|
||||
[715, UniversalYAxisUnit.RADIATION_EXPOSURE_C_PER_KG, '715 C/kg'],
|
||||
[833, UniversalYAxisUnit.RADIATION_ROENTGEN, '833 R'],
|
||||
[468, UniversalYAxisUnit.RADIATION_SIEVERT_PER_HOUR, '468 Sv/h'],
|
||||
[590, UniversalYAxisUnit.RADIATION_MILLISIEVERT_PER_HOUR, '590 mSv/h'],
|
||||
[712, UniversalYAxisUnit.RADIATION_MICROSIEVERT_PER_HOUR, '712 µSv/h'],
|
||||
])('formats radiation value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rotation Speed', () => {
|
||||
test.each([
|
||||
[345, UniversalYAxisUnit.ROTATION_SPEED_REVOLUTIONS_PER_MINUTE, '345 rpm'],
|
||||
[789, UniversalYAxisUnit.ROTATION_SPEED_HERTZ, '789 Hz'],
|
||||
[789000, UniversalYAxisUnit.ROTATION_SPEED_HERTZ, '789 kHz'],
|
||||
[213, UniversalYAxisUnit.ROTATION_SPEED_RADIANS_PER_SECOND, '213 rad/s'],
|
||||
[654, UniversalYAxisUnit.ROTATION_SPEED_DEGREES_PER_SECOND, '654 °/s'],
|
||||
])('formats rotation speed value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Temperature', () => {
|
||||
test.each([
|
||||
[37, UniversalYAxisUnit.TEMPERATURE_CELSIUS, '37 °C'],
|
||||
[451, UniversalYAxisUnit.TEMPERATURE_FAHRENHEIT, '451 °F'],
|
||||
[310, UniversalYAxisUnit.TEMPERATURE_KELVIN, '310 K'],
|
||||
])('formats temperature value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Velocity', () => {
|
||||
test.each([
|
||||
[900, UniversalYAxisUnit.VELOCITY_METERS_PER_SECOND, '900 m/s'],
|
||||
[456, UniversalYAxisUnit.VELOCITY_KILOMETERS_PER_HOUR, '456 km/h'],
|
||||
[789, UniversalYAxisUnit.VELOCITY_MILES_PER_HOUR, '789 mph'],
|
||||
[222, UniversalYAxisUnit.VELOCITY_KNOT, '222 kn'],
|
||||
])('formats velocity value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Volume', () => {
|
||||
test.each([
|
||||
[1200, UniversalYAxisUnit.VOLUME_MILLILITER, '1.2 L'],
|
||||
[9000000, UniversalYAxisUnit.VOLUME_MILLILITER, '9 kL'],
|
||||
[9, UniversalYAxisUnit.VOLUME_LITER, '9 L'],
|
||||
[9000, UniversalYAxisUnit.VOLUME_LITER, '9 kL'],
|
||||
[9000000, UniversalYAxisUnit.VOLUME_LITER, '9 ML'],
|
||||
[9000000000, UniversalYAxisUnit.VOLUME_LITER, '9 GL'],
|
||||
[9000000000000, UniversalYAxisUnit.VOLUME_LITER, '9 TL'],
|
||||
[9000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9 PL'],
|
||||
[9010000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9.01 EL'],
|
||||
[9020000000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9.02 ZL'],
|
||||
[9030000000000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9.03 YL'],
|
||||
[900, UniversalYAxisUnit.VOLUME_CUBIC_METER, '900 m³'],
|
||||
[
|
||||
9000000000000000000000000000000,
|
||||
UniversalYAxisUnit.VOLUME_CUBIC_METER,
|
||||
'9e+30 m³',
|
||||
],
|
||||
[900, UniversalYAxisUnit.VOLUME_NORMAL_CUBIC_METER, '900 Nm³'],
|
||||
[
|
||||
9000000000000000000000000000000,
|
||||
UniversalYAxisUnit.VOLUME_NORMAL_CUBIC_METER,
|
||||
'9e+30 Nm³',
|
||||
],
|
||||
[900, UniversalYAxisUnit.VOLUME_CUBIC_DECIMETER, '900 dm³'],
|
||||
[
|
||||
9000000000000000000000000000000,
|
||||
UniversalYAxisUnit.VOLUME_CUBIC_DECIMETER,
|
||||
'9e+30 dm³',
|
||||
],
|
||||
[900, UniversalYAxisUnit.VOLUME_GALLON, '900 gal'],
|
||||
[
|
||||
9000000000000000000000000000000,
|
||||
UniversalYAxisUnit.VOLUME_GALLON,
|
||||
'9e+30 gal',
|
||||
],
|
||||
])('formats volume value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean', () => {
|
||||
it('formats boolean units', () => {
|
||||
expect(formatUniversalUnit(1, UniversalYAxisUnit.TRUE_FALSE)).toBe('True');
|
||||
expect(formatUniversalUnit(1, UniversalYAxisUnit.YES_NO)).toBe('Yes');
|
||||
expect(formatUniversalUnit(1, UniversalYAxisUnit.ON_OFF)).toBe('On');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mapping Validator', () => {
|
||||
it('validates that all units have a mapping', () => {
|
||||
// Each universal unit should have a mapping to a 1:1 Grafana unit in UniversalUnitToGrafanaUnit or an additional mapping in AdditionalLabelsMappingForGrafanaUnits
|
||||
const units = Object.values(UniversalYAxisUnit);
|
||||
expect(
|
||||
units.every((unit) => {
|
||||
const hasBaseMapping = unit in UniversalUnitToGrafanaUnit;
|
||||
const hasAdditionalMapping = unit in AdditionalLabelsMappingForGrafanaUnits;
|
||||
const hasMapping = hasBaseMapping || hasAdditionalMapping;
|
||||
if (!hasMapping) {
|
||||
throw new Error(`Unit ${unit} does not have a mapping`);
|
||||
}
|
||||
return hasMapping;
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,8 @@
|
||||
import { UniversalYAxisUnit } from '../types';
|
||||
import {
|
||||
getUniversalNameFromMetricUnit,
|
||||
mapMetricUnitToUniversalUnit,
|
||||
mergeCategories,
|
||||
} from '../utils';
|
||||
|
||||
describe('YAxisUnitSelector utils', () => {
|
||||
@@ -36,4 +38,43 @@ describe('YAxisUnitSelector utils', () => {
|
||||
expect(getUniversalNameFromMetricUnit('s')).toBe('Seconds (s)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeCategories', () => {
|
||||
it('merges categories correctly', () => {
|
||||
const categories1 = [
|
||||
{
|
||||
name: 'Data',
|
||||
units: [
|
||||
{ name: 'bytes', id: UniversalYAxisUnit.BYTES },
|
||||
{ name: 'kilobytes', id: UniversalYAxisUnit.KILOBYTES },
|
||||
],
|
||||
},
|
||||
];
|
||||
const categories2 = [
|
||||
{
|
||||
name: 'Data',
|
||||
units: [{ name: 'bits', id: UniversalYAxisUnit.BITS }],
|
||||
},
|
||||
{
|
||||
name: 'Time',
|
||||
units: [{ name: 'seconds', id: UniversalYAxisUnit.SECONDS }],
|
||||
},
|
||||
];
|
||||
const mergedCategories = mergeCategories(categories1, categories2);
|
||||
expect(mergedCategories).toEqual([
|
||||
{
|
||||
name: 'Data',
|
||||
units: [
|
||||
{ name: 'bytes', id: UniversalYAxisUnit.BYTES },
|
||||
{ name: 'kilobytes', id: UniversalYAxisUnit.KILOBYTES },
|
||||
{ name: 'bits', id: UniversalYAxisUnit.BITS },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Time',
|
||||
units: [{ name: 'seconds', id: UniversalYAxisUnit.SECONDS }],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1269
frontend/src/components/YAxisUnitSelector/data.ts
Normal file
1269
frontend/src/components/YAxisUnitSelector/data.ts
Normal file
File diff suppressed because it is too large
Load Diff
90
frontend/src/components/YAxisUnitSelector/formatter.ts
Normal file
90
frontend/src/components/YAxisUnitSelector/formatter.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { formattedValueToString, getValueFormat } from '@grafana/data';
|
||||
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import { formatDecimalWithLeadingZeros } from 'components/Graph/utils';
|
||||
import {
|
||||
AdditionalLabelsMappingForGrafanaUnits,
|
||||
CUSTOM_SCALING_FAMILIES,
|
||||
UniversalUnitToGrafanaUnit,
|
||||
} from 'components/YAxisUnitSelector/constants';
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
|
||||
function scaleValue(
|
||||
value: number,
|
||||
unit: UniversalYAxisUnit,
|
||||
family: UniversalYAxisUnit[],
|
||||
factor: number,
|
||||
): { value: number; label: string } {
|
||||
let idx = family.indexOf(unit);
|
||||
// If the unit is not in the family, return the unit with the additional label
|
||||
if (idx === -1) {
|
||||
return { value, label: AdditionalLabelsMappingForGrafanaUnits[unit] || '' };
|
||||
}
|
||||
|
||||
// Scale the value up or down to the nearest unit in the family
|
||||
let scaled = value;
|
||||
// Scale up
|
||||
while (scaled >= factor && idx < family.length - 1) {
|
||||
scaled /= factor;
|
||||
idx += 1;
|
||||
}
|
||||
// Scale down
|
||||
while (scaled < 1 && idx > 0) {
|
||||
scaled *= factor;
|
||||
idx -= 1;
|
||||
}
|
||||
|
||||
// Return the scaled value and the label of the nearest unit in the family
|
||||
return {
|
||||
value: scaled,
|
||||
label: AdditionalLabelsMappingForGrafanaUnits[family[idx]] || '',
|
||||
};
|
||||
}
|
||||
|
||||
export function formatUniversalUnit(
|
||||
value: number,
|
||||
unit: UniversalYAxisUnit,
|
||||
precision: PrecisionOption = PrecisionOptionsEnum.FULL,
|
||||
decimals: number | undefined = undefined,
|
||||
): string {
|
||||
// Check if this unit belongs to a family that needs custom scaling
|
||||
const family = CUSTOM_SCALING_FAMILIES.find((family) =>
|
||||
family.units.includes(unit),
|
||||
);
|
||||
if (family) {
|
||||
const scaled = scaleValue(value, unit, family.units, family.scaleFactor);
|
||||
const formatter = getValueFormat(scaled.label);
|
||||
const formatted = formatter(scaled.value, decimals);
|
||||
if (formatted.text && formatted.text.includes('.')) {
|
||||
formatted.text = formatDecimalWithLeadingZeros(
|
||||
parseFloat(formatted.text),
|
||||
precision,
|
||||
);
|
||||
}
|
||||
return `${formatted.text} ${scaled.label}`;
|
||||
}
|
||||
|
||||
// Use Grafana formatting with custom label mappings
|
||||
const grafanaFormat = UniversalUnitToGrafanaUnit[unit];
|
||||
if (grafanaFormat) {
|
||||
const formatter = getValueFormat(grafanaFormat);
|
||||
const formatted = formatter(value, decimals);
|
||||
if (formatted.text && formatted.text.includes('.')) {
|
||||
formatted.text = formatDecimalWithLeadingZeros(
|
||||
parseFloat(formatted.text),
|
||||
precision,
|
||||
);
|
||||
}
|
||||
return formattedValueToString(formatted);
|
||||
}
|
||||
|
||||
// Fallback to short format for other units
|
||||
const formatter = getValueFormat('short');
|
||||
const formatted = formatter(value, decimals);
|
||||
if (formatted.text && formatted.text.includes('.')) {
|
||||
formatted.text = formatDecimalWithLeadingZeros(
|
||||
parseFloat(formatted.text),
|
||||
precision,
|
||||
);
|
||||
}
|
||||
return `${formatted.text} ${unit}`;
|
||||
}
|
||||
@@ -5,11 +5,11 @@ export interface YAxisUnitSelectorProps {
|
||||
loading?: boolean;
|
||||
disabled?: boolean;
|
||||
'data-testid'?: string;
|
||||
source: YAxisSource;
|
||||
}
|
||||
|
||||
export enum UniversalYAxisUnit {
|
||||
// Time
|
||||
WEEKS = 'wk',
|
||||
DAYS = 'd',
|
||||
HOURS = 'h',
|
||||
MINUTES = 'min',
|
||||
@@ -17,6 +17,14 @@ export enum UniversalYAxisUnit {
|
||||
MICROSECONDS = 'us',
|
||||
MILLISECONDS = 'ms',
|
||||
NANOSECONDS = 'ns',
|
||||
DURATION_MS = 'dtdurationms',
|
||||
DURATION_S = 'dtdurations',
|
||||
DURATION_HMS = 'dthms',
|
||||
DURATION_DHMS = 'dtdhms',
|
||||
TIMETICKS = 'timeticks',
|
||||
CLOCK_MS = 'clockms',
|
||||
CLOCK_S = 'clocks',
|
||||
TIME_HERTZ = 'hertz',
|
||||
|
||||
// Data
|
||||
BYTES = 'By',
|
||||
@@ -29,6 +37,17 @@ export enum UniversalYAxisUnit {
|
||||
ZETTABYTES = 'ZBy',
|
||||
YOTTABYTES = 'YBy',
|
||||
|
||||
// Binary (IEC) Data
|
||||
BYTES_IEC = 'bytes',
|
||||
KIBIBYTES = 'KiBy',
|
||||
MEBIBYTES = 'MiBy',
|
||||
GIBIBYTES = 'GiBy',
|
||||
TEBIBYTES = 'TiBy',
|
||||
PEBIBYTES = 'PiBy',
|
||||
EXBIBYTES = 'EiBy',
|
||||
ZEBIBYTES = 'ZiBy',
|
||||
YOBIBYTES = 'YiBy',
|
||||
|
||||
// Data Rate
|
||||
BYTES_SECOND = 'By/s',
|
||||
KILOBYTES_SECOND = 'kBy/s',
|
||||
@@ -39,9 +58,21 @@ export enum UniversalYAxisUnit {
|
||||
EXABYTES_SECOND = 'EBy/s',
|
||||
ZETTABYTES_SECOND = 'ZBy/s',
|
||||
YOTTABYTES_SECOND = 'YBy/s',
|
||||
DATA_RATE_PACKETS_PER_SECOND = 'pps',
|
||||
|
||||
// Binary (IEC) Data Rate
|
||||
KIBIBYTES_SECOND = 'KiBy/s',
|
||||
MEBIBYTES_SECOND = 'MiBy/s',
|
||||
GIBIBYTES_SECOND = 'GiBy/s',
|
||||
TEBIBYTES_SECOND = 'TiBy/s',
|
||||
PEBIBYTES_SECOND = 'PiBy/s',
|
||||
EXBIBYTES_SECOND = 'EiBy/s',
|
||||
ZEBIBYTES_SECOND = 'ZiBy/s',
|
||||
YOBIBYTES_SECOND = 'YiBy/s',
|
||||
|
||||
// Bits
|
||||
BITS = 'bit',
|
||||
BITS_IEC = 'bits',
|
||||
KILOBITS = 'kbit',
|
||||
MEGABITS = 'Mbit',
|
||||
GIGABITS = 'Gbit',
|
||||
@@ -62,6 +93,16 @@ export enum UniversalYAxisUnit {
|
||||
ZETTABITS_SECOND = 'Zbit/s',
|
||||
YOTTABITS_SECOND = 'Ybit/s',
|
||||
|
||||
// Binary (IEC) Bit Rate
|
||||
KIBIBITS_SECOND = 'Kibit/s',
|
||||
MEBIBITS_SECOND = 'Mibit/s',
|
||||
GIBIBITS_SECOND = 'Gibit/s',
|
||||
TEBIBITS_SECOND = 'Tibit/s',
|
||||
PEBIBITS_SECOND = 'Pibit/s',
|
||||
EXBIBITS_SECOND = 'Eibit/s',
|
||||
ZEBIBITS_SECOND = 'Zibit/s',
|
||||
YOBIBITS_SECOND = 'Yibit/s',
|
||||
|
||||
// Count
|
||||
COUNT = '{count}',
|
||||
COUNT_SECOND = '{count}/s',
|
||||
@@ -87,7 +128,231 @@ export enum UniversalYAxisUnit {
|
||||
// Percent
|
||||
PERCENT = '%',
|
||||
PERCENT_UNIT = 'percentunit',
|
||||
|
||||
// Boolean
|
||||
TRUE_FALSE = '{bool}',
|
||||
YES_NO = '{bool_yn}',
|
||||
ON_OFF = 'bool_on_off',
|
||||
|
||||
// None
|
||||
NONE = '1',
|
||||
|
||||
// Hash rate
|
||||
HASH_RATE_HASHES_PER_SECOND = 'Hs',
|
||||
HASH_RATE_KILOHASHES_PER_SECOND = 'KHs',
|
||||
HASH_RATE_MEGAHASHES_PER_SECOND = 'MHs',
|
||||
HASH_RATE_GIGAHASHES_PER_SECOND = 'GHs',
|
||||
HASH_RATE_TERAHASHES_PER_SECOND = 'THs',
|
||||
HASH_RATE_PETAHASHES_PER_SECOND = 'PHs',
|
||||
HASH_RATE_EXAHASHES_PER_SECOND = 'EHs',
|
||||
|
||||
// Miscellaneous
|
||||
MISC_STRING = 'string',
|
||||
MISC_SHORT = 'short',
|
||||
MISC_HUMIDITY = 'humidity',
|
||||
MISC_DECIBEL = 'dB',
|
||||
MISC_HEXADECIMAL = 'hex',
|
||||
MISC_HEXADECIMAL_0X = 'hex0x',
|
||||
MISC_SCIENTIFIC_NOTATION = 'sci',
|
||||
MISC_LOCALE_FORMAT = 'locale',
|
||||
MISC_PIXELS = 'pixel',
|
||||
|
||||
// Acceleration
|
||||
ACCELERATION_METERS_PER_SECOND_SQUARED = 'accMS2',
|
||||
ACCELERATION_FEET_PER_SECOND_SQUARED = 'accFS2',
|
||||
ACCELERATION_G_UNIT = 'accG',
|
||||
|
||||
// Angular
|
||||
ANGULAR_DEGREE = 'degree',
|
||||
ANGULAR_RADIAN = 'radian',
|
||||
ANGULAR_GRADIAN = 'grad',
|
||||
ANGULAR_ARC_MINUTE = 'arcmin',
|
||||
ANGULAR_ARC_SECOND = 'arcsec',
|
||||
|
||||
// Area
|
||||
AREA_SQUARE_METERS = 'areaM2',
|
||||
AREA_SQUARE_FEET = 'areaF2',
|
||||
AREA_SQUARE_MILES = 'areaMI2',
|
||||
|
||||
// FLOPs
|
||||
FLOPS_FLOPS = 'flops',
|
||||
FLOPS_MFLOPS = 'mflops',
|
||||
FLOPS_GFLOPS = 'gflops',
|
||||
FLOPS_TFLOPS = 'tflops',
|
||||
FLOPS_PFLOPS = 'pflops',
|
||||
FLOPS_EFLOPS = 'eflops',
|
||||
FLOPS_ZFLOPS = 'zflops',
|
||||
FLOPS_YFLOPS = 'yflops',
|
||||
|
||||
// Concentration
|
||||
CONCENTRATION_PPM = 'ppm',
|
||||
CONCENTRATION_PPB = 'conppb',
|
||||
CONCENTRATION_NG_M3 = 'conngm3',
|
||||
CONCENTRATION_NG_NORMAL_CUBIC_METER = 'conngNm3',
|
||||
CONCENTRATION_UG_M3 = 'conμgm3',
|
||||
CONCENTRATION_UG_NORMAL_CUBIC_METER = 'conμgNm3',
|
||||
CONCENTRATION_MG_M3 = 'conmgm3',
|
||||
CONCENTRATION_MG_NORMAL_CUBIC_METER = 'conmgNm3',
|
||||
CONCENTRATION_G_M3 = 'congm3',
|
||||
CONCENTRATION_G_NORMAL_CUBIC_METER = 'congNm3',
|
||||
CONCENTRATION_MG_PER_DL = 'conmgdL',
|
||||
CONCENTRATION_MMOL_PER_L = 'conmmolL',
|
||||
|
||||
// Currency
|
||||
CURRENCY_USD = 'currencyUSD',
|
||||
CURRENCY_GBP = 'currencyGBP',
|
||||
CURRENCY_EUR = 'currencyEUR',
|
||||
CURRENCY_JPY = 'currencyJPY',
|
||||
CURRENCY_RUB = 'currencyRUB',
|
||||
CURRENCY_UAH = 'currencyUAH',
|
||||
CURRENCY_BRL = 'currencyBRL',
|
||||
CURRENCY_DKK = 'currencyDKK',
|
||||
CURRENCY_ISK = 'currencyISK',
|
||||
CURRENCY_NOK = 'currencyNOK',
|
||||
CURRENCY_SEK = 'currencySEK',
|
||||
CURRENCY_CZK = 'currencyCZK',
|
||||
CURRENCY_CHF = 'currencyCHF',
|
||||
CURRENCY_PLN = 'currencyPLN',
|
||||
CURRENCY_BTC = 'currencyBTC',
|
||||
CURRENCY_MBTC = 'currencymBTC',
|
||||
CURRENCY_UBTC = 'currencyμBTC',
|
||||
CURRENCY_ZAR = 'currencyZAR',
|
||||
CURRENCY_INR = 'currencyINR',
|
||||
CURRENCY_KRW = 'currencyKRW',
|
||||
CURRENCY_IDR = 'currencyIDR',
|
||||
CURRENCY_PHP = 'currencyPHP',
|
||||
CURRENCY_VND = 'currencyVND',
|
||||
|
||||
// Datetime
|
||||
DATETIME_ISO = 'dateTimeAsIso',
|
||||
DATETIME_ISO_NO_DATE_IF_TODAY = 'dateTimeAsIsoNoDateIfToday',
|
||||
DATETIME_US = 'dateTimeAsUS',
|
||||
DATETIME_US_NO_DATE_IF_TODAY = 'dateTimeAsUSNoDateIfToday',
|
||||
DATETIME_LOCAL = 'dateTimeAsLocal',
|
||||
DATETIME_LOCAL_NO_DATE_IF_TODAY = 'dateTimeAsLocalNoDateIfToday',
|
||||
DATETIME_SYSTEM = 'dateTimeAsSystem',
|
||||
DATETIME_FROM_NOW = 'dateTimeFromNow',
|
||||
|
||||
// Power/Electrical
|
||||
POWER_WATT = 'watt',
|
||||
POWER_KILOWATT = 'kwatt',
|
||||
POWER_MEGAWATT = 'megwatt',
|
||||
POWER_GIGAWATT = 'gwatt',
|
||||
POWER_MILLIWATT = 'mwatt',
|
||||
POWER_WATT_PER_SQUARE_METER = 'Wm2',
|
||||
POWER_VOLT_AMPERE = 'voltamp',
|
||||
POWER_KILOVOLT_AMPERE = 'kvoltamp',
|
||||
POWER_VOLT_AMPERE_REACTIVE = 'voltampreact',
|
||||
POWER_KILOVOLT_AMPERE_REACTIVE = 'kvoltampreact',
|
||||
POWER_WATT_HOUR = 'watth',
|
||||
POWER_WATT_HOUR_PER_KG = 'watthperkg',
|
||||
POWER_KILOWATT_HOUR = 'kwatth',
|
||||
POWER_KILOWATT_MINUTE = 'kwattm',
|
||||
POWER_AMPERE_HOUR = 'amph',
|
||||
POWER_KILOAMPERE_HOUR = 'kamph',
|
||||
POWER_MILLIAMPERE_HOUR = 'mamph',
|
||||
POWER_JOULE = 'joule',
|
||||
POWER_ELECTRON_VOLT = 'ev',
|
||||
POWER_AMPERE = 'amp',
|
||||
POWER_KILOAMPERE = 'kamp',
|
||||
POWER_MILLIAMPERE = 'mamp',
|
||||
POWER_VOLT = 'volt',
|
||||
POWER_KILOVOLT = 'kvolt',
|
||||
POWER_MILLIVOLT = 'mvolt',
|
||||
POWER_DECIBEL_MILLIWATT = 'dBm',
|
||||
POWER_OHM = 'ohm',
|
||||
POWER_KILOOHM = 'kohm',
|
||||
POWER_MEGAOHM = 'Mohm',
|
||||
POWER_FARAD = 'farad',
|
||||
POWER_MICROFARAD = 'µfarad',
|
||||
POWER_NANOFARAD = 'nfarad',
|
||||
POWER_PICOFARAD = 'pfarad',
|
||||
POWER_FEMTOFARAD = 'ffarad',
|
||||
POWER_HENRY = 'henry',
|
||||
POWER_MILLIHENRY = 'mhenry',
|
||||
POWER_MICROHENRY = 'µhenry',
|
||||
POWER_LUMENS = 'lumens',
|
||||
|
||||
// Flow
|
||||
FLOW_GALLONS_PER_MINUTE = 'flowgpm',
|
||||
FLOW_CUBIC_METERS_PER_SECOND = 'flowcms',
|
||||
FLOW_CUBIC_FEET_PER_SECOND = 'flowcfs',
|
||||
FLOW_CUBIC_FEET_PER_MINUTE = 'flowcfm',
|
||||
FLOW_LITERS_PER_HOUR = 'litreh',
|
||||
FLOW_LITERS_PER_MINUTE = 'flowlpm',
|
||||
FLOW_MILLILITERS_PER_MINUTE = 'flowmlpm',
|
||||
FLOW_LUX = 'lux',
|
||||
|
||||
// Force
|
||||
FORCE_NEWTON_METERS = 'forceNm',
|
||||
FORCE_KILONEWTON_METERS = 'forcekNm',
|
||||
FORCE_NEWTONS = 'forceN',
|
||||
FORCE_KILONEWTONS = 'forcekN',
|
||||
|
||||
// Mass
|
||||
MASS_MILLIGRAM = 'massmg',
|
||||
MASS_GRAM = 'massg',
|
||||
MASS_POUND = 'masslb',
|
||||
MASS_KILOGRAM = 'masskg',
|
||||
MASS_METRIC_TON = 'masst',
|
||||
|
||||
// Length
|
||||
LENGTH_MILLIMETER = 'lengthmm',
|
||||
LENGTH_INCH = 'lengthin',
|
||||
LENGTH_FOOT = 'lengthft',
|
||||
LENGTH_METER = 'lengthm',
|
||||
LENGTH_KILOMETER = 'lengthkm',
|
||||
LENGTH_MILE = 'lengthmi',
|
||||
|
||||
// Pressure
|
||||
PRESSURE_MILLIBAR = 'pressurembar',
|
||||
PRESSURE_BAR = 'pressurebar',
|
||||
PRESSURE_KILOBAR = 'pressurekbar',
|
||||
PRESSURE_PASCAL = 'pressurepa',
|
||||
PRESSURE_HECTOPASCAL = 'pressurehpa',
|
||||
PRESSURE_KILOPASCAL = 'pressurekpa',
|
||||
PRESSURE_INCHES_HG = 'pressurehg',
|
||||
PRESSURE_PSI = 'pressurepsi',
|
||||
|
||||
// Radiation
|
||||
RADIATION_BECQUEREL = 'radbq',
|
||||
RADIATION_CURIE = 'radci',
|
||||
RADIATION_GRAY = 'radgy',
|
||||
RADIATION_RAD = 'radrad',
|
||||
RADIATION_SIEVERT = 'radsv',
|
||||
RADIATION_MILLISIEVERT = 'radmsv',
|
||||
RADIATION_MICROSIEVERT = 'radusv',
|
||||
RADIATION_REM = 'radrem',
|
||||
RADIATION_EXPOSURE_C_PER_KG = 'radexpckg',
|
||||
RADIATION_ROENTGEN = 'radr',
|
||||
RADIATION_SIEVERT_PER_HOUR = 'radsvh',
|
||||
RADIATION_MILLISIEVERT_PER_HOUR = 'radmsvh',
|
||||
RADIATION_MICROSIEVERT_PER_HOUR = 'radusvh',
|
||||
|
||||
// Rotation speed
|
||||
ROTATION_SPEED_REVOLUTIONS_PER_MINUTE = 'rotrpm',
|
||||
ROTATION_SPEED_HERTZ = 'rothz',
|
||||
ROTATION_SPEED_RADIANS_PER_SECOND = 'rotrads',
|
||||
ROTATION_SPEED_DEGREES_PER_SECOND = 'rotdegs',
|
||||
|
||||
// Temperature
|
||||
TEMPERATURE_CELSIUS = 'celsius',
|
||||
TEMPERATURE_FAHRENHEIT = 'fahrenheit',
|
||||
TEMPERATURE_KELVIN = 'kelvin',
|
||||
|
||||
// Velocity
|
||||
VELOCITY_METERS_PER_SECOND = 'velocityms',
|
||||
VELOCITY_KILOMETERS_PER_HOUR = 'velocitykmh',
|
||||
VELOCITY_MILES_PER_HOUR = 'velocitymph',
|
||||
VELOCITY_KNOT = 'velocityknot',
|
||||
|
||||
// Volume
|
||||
VOLUME_MILLILITER = 'mlitre',
|
||||
VOLUME_LITER = 'litre',
|
||||
VOLUME_CUBIC_METER = 'm3',
|
||||
VOLUME_NORMAL_CUBIC_METER = 'Nm3',
|
||||
VOLUME_CUBIC_DECIMETER = 'dm3',
|
||||
VOLUME_GALLON = 'gallons',
|
||||
}
|
||||
|
||||
export enum YAxisUnit {
|
||||
@@ -293,6 +558,15 @@ export enum YAxisUnit {
|
||||
UCUM_PEBIBYTES = 'PiBy',
|
||||
OPEN_METRICS_PEBIBYTES = 'pebibytes',
|
||||
|
||||
UCUM_EXBIBYTES = 'EiBy',
|
||||
OPEN_METRICS_EXBIBYTES = 'exbibytes',
|
||||
|
||||
UCUM_ZEBIBYTES = 'ZiBy',
|
||||
OPEN_METRICS_ZEBIBYTES = 'zebibytes',
|
||||
|
||||
UCUM_YOBIBYTES = 'YiBy',
|
||||
OPEN_METRICS_YOBIBYTES = 'yobibytes',
|
||||
|
||||
UCUM_KIBIBYTES_SECOND = 'KiBy/s',
|
||||
OPEN_METRICS_KIBIBYTES_SECOND = 'kibibytes_per_second',
|
||||
|
||||
@@ -323,6 +597,24 @@ export enum YAxisUnit {
|
||||
UCUM_PEBIBITS_SECOND = 'Pibit/s',
|
||||
OPEN_METRICS_PEBIBITS_SECOND = 'pebibits_per_second',
|
||||
|
||||
UCUM_EXBIBYTES_SECOND = 'EiBy/s',
|
||||
OPEN_METRICS_EXBIBYTES_SECOND = 'exbibytes_per_second',
|
||||
|
||||
UCUM_EXBIBITS_SECOND = 'Eibit/s',
|
||||
OPEN_METRICS_EXBIBITS_SECOND = 'exbibits_per_second',
|
||||
|
||||
UCUM_ZEBIBYTES_SECOND = 'ZiBy/s',
|
||||
OPEN_METRICS_ZEBIBYTES_SECOND = 'zebibytes_per_second',
|
||||
|
||||
UCUM_ZEBIBITS_SECOND = 'Zibit/s',
|
||||
OPEN_METRICS_ZEBIBITS_SECOND = 'zebibits_per_second',
|
||||
|
||||
UCUM_YOBIBYTES_SECOND = 'YiBy/s',
|
||||
OPEN_METRICS_YOBIBYTES_SECOND = 'yobibytes_per_second',
|
||||
|
||||
UCUM_YOBIBITS_SECOND = 'Yibit/s',
|
||||
OPEN_METRICS_YOBIBITS_SECOND = 'yobibits_per_second',
|
||||
|
||||
UCUM_TRUE_FALSE = '{bool}',
|
||||
OPEN_METRICS_TRUE_FALSE = 'boolean_true_false',
|
||||
|
||||
@@ -364,3 +656,27 @@ export enum YAxisUnit {
|
||||
|
||||
OPEN_METRICS_PERCENT_UNIT = 'percentunit',
|
||||
}
|
||||
|
||||
export interface ScaledValue {
|
||||
value: number;
|
||||
label: string;
|
||||
}
|
||||
|
||||
export interface UnitFamilyConfig {
|
||||
units: UniversalYAxisUnit[];
|
||||
scaleFactor: number;
|
||||
}
|
||||
|
||||
export interface YAxisCategory {
|
||||
name: string;
|
||||
units: {
|
||||
name: string;
|
||||
id: UniversalYAxisUnit;
|
||||
}[];
|
||||
}
|
||||
|
||||
export enum YAxisSource {
|
||||
ALERTS = 'alerts',
|
||||
DASHBOARDS = 'dashboards',
|
||||
EXPLORER = 'explorer',
|
||||
}
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
import { UniversalYAxisUnitMappings, Y_AXIS_UNIT_NAMES } from './constants';
|
||||
import { UniversalYAxisUnit, YAxisUnit } from './types';
|
||||
import { ADDITIONAL_Y_AXIS_CATEGORIES, BASE_Y_AXIS_CATEGORIES } from './data';
|
||||
import {
|
||||
UniversalYAxisUnit,
|
||||
YAxisCategory,
|
||||
YAxisSource,
|
||||
YAxisUnit,
|
||||
} from './types';
|
||||
|
||||
export const mapMetricUnitToUniversalUnit = (
|
||||
unit: string | undefined,
|
||||
@@ -9,7 +15,7 @@ export const mapMetricUnitToUniversalUnit = (
|
||||
}
|
||||
|
||||
const universalUnit = Object.values(UniversalYAxisUnit).find(
|
||||
(u) => UniversalYAxisUnitMappings[u].has(unit as YAxisUnit) || unit === u,
|
||||
(u) => UniversalYAxisUnitMappings[u]?.has(unit as YAxisUnit) || unit === u,
|
||||
);
|
||||
|
||||
return universalUnit || (unit as UniversalYAxisUnit) || null;
|
||||
@@ -31,3 +37,44 @@ export const getUniversalNameFromMetricUnit = (
|
||||
|
||||
return universalName || unit || '-';
|
||||
};
|
||||
|
||||
export function isUniversalUnit(format: string): boolean {
|
||||
return Object.values(UniversalYAxisUnit).includes(
|
||||
format as UniversalYAxisUnit,
|
||||
);
|
||||
}
|
||||
|
||||
export function mergeCategories(
|
||||
categories1: YAxisCategory[],
|
||||
categories2: YAxisCategory[],
|
||||
): YAxisCategory[] {
|
||||
const mapOfCategories = new Map<string, YAxisCategory>();
|
||||
|
||||
categories1.forEach((category) => {
|
||||
mapOfCategories.set(category.name, category);
|
||||
});
|
||||
|
||||
categories2.forEach((category) => {
|
||||
if (mapOfCategories.has(category.name)) {
|
||||
mapOfCategories.set(category.name, {
|
||||
name: category.name,
|
||||
units: [
|
||||
...(mapOfCategories.get(category.name)?.units ?? []),
|
||||
...category.units,
|
||||
],
|
||||
});
|
||||
} else {
|
||||
mapOfCategories.set(category.name, category);
|
||||
}
|
||||
});
|
||||
|
||||
return Array.from(mapOfCategories.values());
|
||||
}
|
||||
|
||||
export function getYAxisCategories(source: YAxisSource): YAxisCategory[] {
|
||||
if (source !== YAxisSource.DASHBOARDS) {
|
||||
return BASE_Y_AXIS_CATEGORIES;
|
||||
}
|
||||
|
||||
return mergeCategories(BASE_Y_AXIS_CATEGORIES, ADDITIONAL_Y_AXIS_CATEGORIES);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { Button, Flex, Switch, Typography } from 'antd';
|
||||
import { BaseOptionType, DefaultOptionType, SelectProps } from 'antd/es/select';
|
||||
import { getInvolvedQueriesInTraceOperator } from 'components/QueryBuilderV2/QueryV2/TraceOperator/utils/utils';
|
||||
import { Y_AXIS_CATEGORIES } from 'components/YAxisUnitSelector/constants';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { getYAxisCategories } from 'components/YAxisUnitSelector/utils';
|
||||
import ROUTES from 'constants/routes';
|
||||
import {
|
||||
AlertThresholdMatchType,
|
||||
@@ -39,7 +40,8 @@ export function getQueryNames(currentQuery: Query): BaseOptionType[] {
|
||||
}
|
||||
|
||||
export function getCategoryByOptionId(id: string): string | undefined {
|
||||
return Y_AXIS_CATEGORIES.find((category) =>
|
||||
const categories = getYAxisCategories(YAxisSource.ALERTS);
|
||||
return categories.find((category) =>
|
||||
category.units.some((unit) => unit.id === id),
|
||||
)?.name;
|
||||
}
|
||||
@@ -47,14 +49,15 @@ export function getCategoryByOptionId(id: string): string | undefined {
|
||||
export function getCategorySelectOptionByName(
|
||||
name: string,
|
||||
): DefaultOptionType[] {
|
||||
const categories = getYAxisCategories(YAxisSource.ALERTS);
|
||||
return (
|
||||
Y_AXIS_CATEGORIES.find((category) => category.name === name)?.units.map(
|
||||
(unit) => ({
|
||||
categories
|
||||
.find((category) => category.name === name)
|
||||
?.units.map((unit) => ({
|
||||
label: unit.name,
|
||||
value: unit.id,
|
||||
'data-testid': `threshold-unit-select-option-${unit.id}`,
|
||||
}),
|
||||
) || []
|
||||
})) || []
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import YAxisUnitSelector from 'components/YAxisUnitSelector';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useCreateAlertState } from 'container/CreateAlertV2/context';
|
||||
import ChartPreviewComponent from 'container/FormAlertRules/ChartPreview';
|
||||
@@ -37,6 +38,7 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
|
||||
onChange={(value): void => {
|
||||
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: value });
|
||||
}}
|
||||
source={YAxisSource.ALERTS}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { TableProps } from 'antd';
|
||||
import { PrecisionOption } from 'components/Graph/yAxisConfig';
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { LogsExplorerTableProps } from 'container/LogsExplorerTable/LogsExplorerTable.interfaces';
|
||||
import {
|
||||
|
||||
@@ -175,7 +175,18 @@ function LiveLogsContainer(): JSX.Element {
|
||||
if (isConnectionError && reconnectDueToError) {
|
||||
// Small delay to prevent immediate reconnection attempts
|
||||
const reconnectTimer = setTimeout(() => {
|
||||
handleStartNewConnection();
|
||||
const fallbackFilterExpression =
|
||||
prevFilterExpressionRef.current ||
|
||||
currentQuery?.builder.queryData[0]?.filter?.expression?.trim() ||
|
||||
null;
|
||||
|
||||
const validationResult = validateQuery(fallbackFilterExpression || '');
|
||||
|
||||
if (validationResult.isValid) {
|
||||
handleStartNewConnection(fallbackFilterExpression);
|
||||
} else {
|
||||
handleStartNewConnection(null);
|
||||
}
|
||||
}, 1000);
|
||||
|
||||
return (): void => clearTimeout(reconnectTimer);
|
||||
@@ -186,6 +197,7 @@ function LiveLogsContainer(): JSX.Element {
|
||||
reconnectDueToError,
|
||||
compositeQuery,
|
||||
handleStartNewConnection,
|
||||
currentQuery,
|
||||
]);
|
||||
|
||||
// clean up the connection when the component unmounts
|
||||
|
||||
@@ -124,7 +124,7 @@
|
||||
|
||||
.builder-units-filter-label {
|
||||
margin-bottom: 0px !important;
|
||||
font-size: 13px;
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { UpdateMetricMetadataProps } from 'api/metricsExplorer/updateMetricMetadata';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import YAxisUnitSelector from 'components/YAxisUnitSelector';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import FieldRenderer from 'container/LogDetailedView/FieldRenderer';
|
||||
import { DataType } from 'container/LogDetailedView/TableView';
|
||||
@@ -120,6 +121,7 @@ function Metadata({
|
||||
setMetricMetadata((prev) => ({ ...prev, unit: value }));
|
||||
}}
|
||||
data-testid="unit-select"
|
||||
source={YAxisSource.EXPLORER}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -12,10 +12,7 @@ import {
|
||||
Switch,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import {
|
||||
PrecisionOption,
|
||||
PrecisionOptionsEnum,
|
||||
} from 'components/Graph/yAxisConfig';
|
||||
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import TimePreference from 'components/TimePreferenceDropDown';
|
||||
import { PANEL_TYPES, PanelDisplay } from 'constants/queryBuilder';
|
||||
import GraphTypes, {
|
||||
|
||||
@@ -4,10 +4,7 @@ import './NewWidget.styles.scss';
|
||||
import { WarningOutlined } from '@ant-design/icons';
|
||||
import { Button, Flex, Modal, Space, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import {
|
||||
PrecisionOption,
|
||||
PrecisionOptionsEnum,
|
||||
} from 'components/Graph/yAxisConfig';
|
||||
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { adjustQueryForV5 } from 'components/QueryBuilderV2/utils';
|
||||
import { QueryParams } from 'constants/query';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
import { omitIdFromQuery } from 'components/ExplorerCard/utils';
|
||||
import { PrecisionOptionsEnum } from 'components/Graph/yAxisConfig';
|
||||
import { PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import {
|
||||
initialQueryBuilderFormValuesMap,
|
||||
PANEL_TYPES,
|
||||
|
||||
@@ -1237,9 +1237,9 @@
|
||||
},
|
||||
{
|
||||
"dataSource": "opentelemetry-cloudflare",
|
||||
"label": "Cloudflare",
|
||||
"label": "Cloudflare - Tracing",
|
||||
"imgUrl": "/Logos/cloudflare.svg",
|
||||
"tags": ["apm/traces", "logs"],
|
||||
"tags": ["apm/traces"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"apm",
|
||||
@@ -1260,6 +1260,30 @@
|
||||
"id": "opentelemetry-cloudflare",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-cloudflare/"
|
||||
},
|
||||
{
|
||||
"dataSource": "opentelemetry-cloudflare-logs",
|
||||
"label": "Cloudflare Logs",
|
||||
"imgUrl": "/Logos/cloudflare.svg",
|
||||
"tags": ["logs"],
|
||||
"module": "logs",
|
||||
"relatedSearchKeywords": [
|
||||
"logs",
|
||||
"cloudflare",
|
||||
"cloudflare workers",
|
||||
"cloudflare monitoring",
|
||||
"cloudflare logging",
|
||||
"cloudflare observability",
|
||||
"opentelemetry cloudflare",
|
||||
"otel cloudflare",
|
||||
"cloudflare instrumentation",
|
||||
"monitor cloudflare workers",
|
||||
"cloudflare logs",
|
||||
"edge computing monitoring",
|
||||
"cloudflare to signoz"
|
||||
],
|
||||
"id": "opentelemetry-cloudflare-logs",
|
||||
"link": "https://signoz.io/docs/logs-management/send-logs/cloudflare-logs/"
|
||||
},
|
||||
{
|
||||
"dataSource": "kubernetes-pod-logs",
|
||||
"label": "Kubernetes Pod Logs",
|
||||
@@ -2821,6 +2845,133 @@
|
||||
],
|
||||
"link": "https://signoz.io/docs/vercel-ai-sdk-monitoring/"
|
||||
},
|
||||
{
|
||||
"dataSource": "amazon-bedrock",
|
||||
"label": "Amazon Bedrock",
|
||||
"imgUrl": "/Logos/amazon-bedrock.svg",
|
||||
"tags": ["LLM Monitoring"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"amazon bedrock monitoring",
|
||||
"amazon bedrock observability",
|
||||
"amazon bedrock performance tracking",
|
||||
"amazon bedrock latency tracing",
|
||||
"amazon bedrock metrics",
|
||||
"otel amazon bedrock integration",
|
||||
"amazon bedrock response time",
|
||||
"amazon bedrock logs",
|
||||
"amazon bedrock error tracking",
|
||||
"amazon bedrock debugging",
|
||||
"traces"
|
||||
],
|
||||
"link": "https://signoz.io/docs/amazon-bedrock-monitoring/"
|
||||
},
|
||||
{
|
||||
"dataSource": "autogen",
|
||||
"label": "AutoGen",
|
||||
"imgUrl": "/Logos/autogen.svg",
|
||||
"tags": ["LLM Monitoring"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"autogen monitoring",
|
||||
"autogen observability",
|
||||
"autogen performance tracking",
|
||||
"autogen latency tracing",
|
||||
"autogen metrics",
|
||||
"otel autogen integration",
|
||||
"autogen response time",
|
||||
"autogen logs",
|
||||
"autogen error tracking",
|
||||
"autogen debugging",
|
||||
"traces"
|
||||
],
|
||||
"link": "https://signoz.io/docs/autogen-observability/"
|
||||
},
|
||||
{
|
||||
"dataSource": "azure-openai",
|
||||
"label": "Azure OpenAI",
|
||||
"imgUrl": "/Logos/azure-openai.svg",
|
||||
"tags": ["LLM Monitoring"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"azure open ai monitoring",
|
||||
"azure open ai observability",
|
||||
"azure open ai performance tracking",
|
||||
"azure open ai latency tracing",
|
||||
"azure open ai metrics",
|
||||
"otel azure open ai integration",
|
||||
"azure open ai response time",
|
||||
"azure open ai logs",
|
||||
"azure open ai error tracking",
|
||||
"azure open ai debugging",
|
||||
"traces"
|
||||
],
|
||||
"link": "https://signoz.io/docs/azure-openai-monitoring/"
|
||||
},
|
||||
{
|
||||
"dataSource": "crew-ai",
|
||||
"label": "Crew AI",
|
||||
"imgUrl": "/Logos/crew-ai.svg",
|
||||
"tags": ["LLM Monitoring"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"crew ai monitoring",
|
||||
"crew ai observability",
|
||||
"crew ai performance tracking",
|
||||
"crew ai latency tracing",
|
||||
"crew ai metrics",
|
||||
"otel crew ai integration",
|
||||
"crew ai response time",
|
||||
"crew ai logs",
|
||||
"crew ai error tracking",
|
||||
"crew ai debugging",
|
||||
"traces"
|
||||
],
|
||||
"link": "https://signoz.io/docs/crewai-observability/"
|
||||
},
|
||||
{
|
||||
"dataSource": "litellm",
|
||||
"label": "LiteLLM",
|
||||
"imgUrl": "/Logos/litellm.svg",
|
||||
"tags": ["LLM Monitoring"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"litellm monitoring",
|
||||
"litellm observability",
|
||||
"litellm performance tracking",
|
||||
"litellm latency tracing",
|
||||
"litellm metrics",
|
||||
"otel litellm integration",
|
||||
"litellm response time",
|
||||
"litellm logs",
|
||||
"litellm error tracking",
|
||||
"litellm debugging",
|
||||
"traces"
|
||||
],
|
||||
"link": "https://signoz.io/docs/litellm-observability/"
|
||||
},
|
||||
{
|
||||
"dataSource": "pydantic-ai",
|
||||
"label": "Pydantic AI",
|
||||
"imgUrl": "/Logos/pydantic-ai.svg",
|
||||
"tags": ["LLM Monitoring"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"pydantic ai monitoring",
|
||||
"pydantic ai observability",
|
||||
"pydantic ai performance tracking",
|
||||
"pydantic ai latency tracing",
|
||||
"pydantic ai metrics",
|
||||
"otel pydantic ai integration",
|
||||
"pydantic ai response time",
|
||||
"pydantic ai logs",
|
||||
"pydantic ai error tracking",
|
||||
"pydantic ai debugging",
|
||||
"traces"
|
||||
],
|
||||
"link": "https://signoz.io/docs/pydantic-ai-observability/"
|
||||
},
|
||||
|
||||
{
|
||||
"dataSource": "mastra-monitoring",
|
||||
"label": "Mastra",
|
||||
|
||||
@@ -6,6 +6,7 @@ import { ColumnsType } from 'antd/lib/table';
|
||||
import deleteDomain from 'api/v1/domains/id/delete';
|
||||
import listAllDomain from 'api/v1/domains/list';
|
||||
import ErrorContent from 'components/ErrorModal/components/ErrorContent';
|
||||
import CopyToClipboard from 'periscope/components/CopyToClipboard';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
@@ -32,6 +33,23 @@ const columns: ColumnsType<GettableAuthDomain> = [
|
||||
<Toggle isDefaultChecked={value} record={record} />
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'IDP Initiated SSO URL',
|
||||
dataIndex: 'relayState',
|
||||
key: 'relayState',
|
||||
width: 80,
|
||||
render: (_, record: GettableAuthDomain): JSX.Element => {
|
||||
const relayPath = record.authNProviderInfo.relayStatePath;
|
||||
if (!relayPath) {
|
||||
return (
|
||||
<Typography.Text style={{ paddingLeft: '6px' }}>N/A</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
const href = `${window.location.origin}/${relayPath}`;
|
||||
return <CopyToClipboard textToCopy={href} />;
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Action',
|
||||
dataIndex: 'action',
|
||||
|
||||
@@ -116,12 +116,11 @@
|
||||
flex: 1 0 0;
|
||||
border-radius: 2px;
|
||||
background: var(--bg-cherry-500);
|
||||
border-color: none;
|
||||
border: none;
|
||||
}
|
||||
.cancel-run:hover {
|
||||
background-color: #ff7875 !important;
|
||||
color: var(--bg-vanilla-100) !important;
|
||||
border: none;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Select, SelectProps, Space } from 'antd';
|
||||
import { Select, SelectProps, Space, Typography } from 'antd';
|
||||
import { getCategorySelectOptionByName } from 'container/NewWidget/RightContainer/alertFomatCategories';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { categoryToSupport } from './config';
|
||||
import { DefaultLabel, selectStyles } from './styles';
|
||||
import { selectStyles } from './styles';
|
||||
import { IBuilderUnitsFilterProps } from './types';
|
||||
import { filterOption } from './utils';
|
||||
|
||||
@@ -31,9 +31,9 @@ function BuilderUnitsFilter({
|
||||
|
||||
return (
|
||||
<Space className="builder-units-filter">
|
||||
<DefaultLabel className="builder-units-filter-label">
|
||||
<Typography.Text className="builder-units-filter-label">
|
||||
Y-axis unit
|
||||
</DefaultLabel>
|
||||
</Typography.Text>
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={selectStyles}
|
||||
|
||||
@@ -8,4 +8,13 @@
|
||||
min-height: 350px;
|
||||
padding: 0px 12px;
|
||||
}
|
||||
|
||||
.time-series-view-container {
|
||||
.time-series-view-container-header {
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
padding: 12px 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
import EmptyMetricsSearch from 'container/MetricsExplorer/Explorer/EmptyMetricsSearch';
|
||||
import { MetricsLoading } from 'container/MetricsExplorer/MetricsLoading/MetricsLoading';
|
||||
import NoLogs from 'container/NoLogs/NoLogs';
|
||||
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters';
|
||||
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { TracesLoading } from 'container/TracesExplorer/TraceLoading/TraceLoading';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
@@ -81,6 +82,14 @@ function TimeSeriesView({
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
const [graphVisibility, setGraphVisibility] = useState<boolean[]>([]);
|
||||
const [yAxisUnitInternal, setYAxisUnitInternal] = useState<string>(
|
||||
yAxisUnit || '',
|
||||
);
|
||||
|
||||
const onUnitChangeHandler = (value: string): void => {
|
||||
setYAxisUnitInternal(value);
|
||||
};
|
||||
|
||||
const legendScrollPositionRef = useRef<{
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
@@ -189,7 +198,7 @@ function TimeSeriesView({
|
||||
const chartOptions = getUPlotChartOptions({
|
||||
id: 'time-series-explorer',
|
||||
onDragSelect,
|
||||
yAxisUnit: yAxisUnit || '',
|
||||
yAxisUnit: yAxisUnitInternal || '',
|
||||
apiResponse: data?.payload,
|
||||
dimensions: {
|
||||
width: containerDimensions.width,
|
||||
@@ -261,7 +270,17 @@ function TimeSeriesView({
|
||||
!isError &&
|
||||
chartData &&
|
||||
!isEmpty(chartData?.[0]) &&
|
||||
chartOptions && <Uplot data={chartData} options={chartOptions} />}
|
||||
chartOptions && (
|
||||
<div className="time-series-view-container">
|
||||
<div className="time-series-view-container-header">
|
||||
<BuilderUnitsFilter
|
||||
onChange={onUnitChangeHandler}
|
||||
yAxisUnit={yAxisUnitInternal}
|
||||
/>
|
||||
</div>
|
||||
<Uplot data={chartData} options={chartOptions} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { getToolTipValue, PrecisionOption } from 'components/Graph/yAxisConfig';
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { getToolTipValue } from 'components/Graph/yAxisConfig';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import dayjs from 'dayjs';
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
/* eslint-disable @typescript-eslint/ban-ts-comment */
|
||||
// @ts-nocheck
|
||||
import { getToolTipValue, PrecisionOption } from 'components/Graph/yAxisConfig';
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { getToolTipValue } from 'components/Graph/yAxisConfig';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
|
||||
import { uPlotXAxisValuesFormat } from './constants';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { PrecisionOption } from 'components/Graph/yAxisConfig';
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
|
||||
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
|
||||
|
||||
@@ -11,6 +11,7 @@ export interface GettableAuthDomain {
|
||||
orgId: string;
|
||||
ssoEnabled: boolean;
|
||||
ssoType: string;
|
||||
authNProviderInfo: AuthNProviderInfo;
|
||||
samlConfig?: SAMLConfig;
|
||||
googleAuthConfig?: GoogleAuthConfig;
|
||||
oidcConfig?: OIDCConfig;
|
||||
@@ -42,3 +43,7 @@ export interface OIDCConfig {
|
||||
export interface ClaimMapping {
|
||||
email: string;
|
||||
}
|
||||
|
||||
export interface AuthNProviderInfo {
|
||||
relayStatePath: string;
|
||||
}
|
||||
|
||||
12
go.mod
12
go.mod
@@ -8,10 +8,9 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.7
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.4
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/bytedance/sonic v1.14.1
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
||||
@@ -87,18 +86,12 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
@@ -121,7 +114,6 @@ require (
|
||||
github.com/armon/go-metrics v0.4.1 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.7 // indirect
|
||||
github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3 // indirect
|
||||
github.com/beevik/etree v1.1.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
|
||||
@@ -165,7 +157,6 @@ require (
|
||||
github.com/golang/snappy v1.0.0 // indirect
|
||||
github.com/google/btree v1.1.3 // indirect
|
||||
github.com/google/cel-go v0.26.1 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
@@ -331,7 +322,6 @@ require (
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0
|
||||
go.opentelemetry.io/proto/otlp v1.8.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/goleak v1.3.0 // indirect
|
||||
go.uber.org/mock v0.6.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/mod v0.27.0 // indirect
|
||||
|
||||
16
go.sum
16
go.sum
@@ -106,8 +106,8 @@ github.com/SigNoz/expr v1.17.7-beta h1:FyZkleM5dTQ0O6muQfwGpoH5A2ohmN/XTasRCO72g
|
||||
github.com/SigNoz/expr v1.17.7-beta/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.7 h1:r8/+t3ARWek9+X5aH05qavdA9ATbkssfssHh/zjzsEM=
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.7/go.mod h1:4eJCRUd/P4OiCHXvGYZK8q6oyBVGJFVj/G6qKSoN/TQ=
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.4 h1:DGDu9y1I1FU+HX4eECPGmfhnXE4ys4yr7LL6znbf6to=
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.4/go.mod h1:xyR+coBzzO04p6Eu+ql2RVYUl/jFD+8hD9lArcc9U7g=
|
||||
github.com/Yiling-J/theine-go v0.6.2 h1:1GeoXeQ0O0AUkiwj2S9Jc0Mzx+hpqzmqsJ4kIC4M9AY=
|
||||
github.com/Yiling-J/theine-go v0.6.2/go.mod h1:08QpMa5JZ2pKN+UJCRrCasWYO1IKCdl54Xa836rpmDU=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
@@ -162,12 +162,6 @@ github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||
github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M=
|
||||
github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM=
|
||||
github.com/bytedance/sonic v1.14.1 h1:FBMC0zVz5XUmE4z9wF4Jey0An5FueFvOsTKKKtwIl7w=
|
||||
github.com/bytedance/sonic v1.14.1/go.mod h1:gi6uhQLMbTdeP0muCnrjHLeCUPyb70ujhnNlhOylAFc=
|
||||
github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA=
|
||||
github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
||||
github.com/cactus/go-statsd-client/statsd v0.0.0-20200423205355-cb0885a1018c/go.mod h1:l/bIBLeOl9eX+wxJAzxS4TveKRtAqlyDpHjhkfO0MEI=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
@@ -184,8 +178,6 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
|
||||
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
|
||||
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M=
|
||||
github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
@@ -999,8 +991,6 @@ github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GH
|
||||
github.com/trivago/tgo v1.0.7 h1:uaWH/XIy9aWYWpjm2CU3RpcqZXmX2ysQ9/Go+d9gyrM=
|
||||
github.com/trivago/tgo v1.0.7/go.mod h1:w4dpD+3tzNIIiIfkWWa85w5/B77tlvdZckQ+6PkFnhc=
|
||||
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/uptrace/bun v1.2.9 h1:OOt2DlIcRUMSZPr6iXDFg/LaQd59kOxbAjpIVHddKRs=
|
||||
github.com/uptrace/bun v1.2.9/go.mod h1:r2ZaaGs9Ru5bpGTr8GQfp8jp+TlCav9grYCPOu2CJSg=
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.9 h1:caf5uFbOGiXvadV6pA5gn87k0awFFxL1kuuY3SpxnWk=
|
||||
@@ -1245,8 +1235,6 @@ go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
|
||||
@@ -22,4 +22,7 @@ type CallbackAuthN interface {
|
||||
|
||||
// Handle the callback from the provider.
|
||||
HandleCallback(context.Context, url.Values) (*authtypes.CallbackIdentity, error)
|
||||
|
||||
// Get provider info such as `relay state`
|
||||
ProviderInfo(context.Context, *authtypes.AuthDomain) *authtypes.AuthNProviderInfo
|
||||
}
|
||||
|
||||
@@ -117,6 +117,12 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) oauth2Config(siteURL *url.URL, authDomain *authtypes.AuthDomain, provider *oidc.Provider) *oauth2.Config {
|
||||
return &oauth2.Config{
|
||||
ClientID: authDomain.AuthDomainConfig().Google.ClientID,
|
||||
|
||||
@@ -208,8 +208,3 @@ func WrapUnexpectedf(cause error, code Code, format string, args ...any) *base {
|
||||
func NewUnexpectedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeInvalidInput, code, format, args...)
|
||||
}
|
||||
|
||||
// NewMethodNotAllowedf is a wrapper around Newf with TypeMethodNotAllowed.
|
||||
func NewMethodNotAllowedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeMethodNotAllowed, code, format, args...)
|
||||
}
|
||||
|
||||
@@ -18,18 +18,13 @@ type responseerroradditional struct {
|
||||
|
||||
func AsJSON(cause error) *JSON {
|
||||
// See if this is an instance of the base error or not
|
||||
_, c, m, cause, u, a := Unwrapb(cause)
|
||||
_, c, m, _, u, a := Unwrapb(cause)
|
||||
|
||||
rea := make([]responseerroradditional, len(a))
|
||||
for k, v := range a {
|
||||
rea[k] = responseerroradditional{v}
|
||||
}
|
||||
|
||||
// add the underlying error message
|
||||
if cause != nil {
|
||||
rea = append(rea, responseerroradditional{Message: cause.Error()})
|
||||
}
|
||||
|
||||
return &JSON{
|
||||
Code: c.String(),
|
||||
Message: m,
|
||||
|
||||
@@ -29,6 +29,9 @@ type Module interface {
|
||||
|
||||
// Delete an existing auth domain by id.
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
// Get the IDP info of the domain provided.
|
||||
GetAuthNProviderInfo(context.Context, *authtypes.AuthDomain) (*authtypes.AuthNProviderInfo)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
|
||||
@@ -95,7 +95,7 @@ func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
authDomains := make([]*authtypes.GettableAuthDomain, len(domains))
|
||||
for i, domain := range domains {
|
||||
authDomains[i] = authtypes.NewGettableAuthDomainFromAuthDomain(domain)
|
||||
authDomains[i] = authtypes.NewGettableAuthDomainFromAuthDomain(domain, handler.module.GetAuthNProviderInfo(ctx, domain))
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, authDomains)
|
||||
|
||||
@@ -3,17 +3,19 @@ package implauthdomain
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store authtypes.AuthDomainStore
|
||||
store authtypes.AuthDomainStore
|
||||
authNs map[authtypes.AuthNProvider]authn.AuthN
|
||||
}
|
||||
|
||||
func NewModule(store authtypes.AuthDomainStore) authdomain.Module {
|
||||
return &module{store: store}
|
||||
func NewModule(store authtypes.AuthDomainStore, authNs map[authtypes.AuthNProvider]authn.AuthN) authdomain.Module {
|
||||
return &module{store: store, authNs: authNs}
|
||||
}
|
||||
|
||||
func (module *module) Create(ctx context.Context, domain *authtypes.AuthDomain) error {
|
||||
@@ -24,6 +26,13 @@ func (module *module) Get(ctx context.Context, id valuer.UUID) (*authtypes.AuthD
|
||||
return module.store.Get(ctx, id)
|
||||
}
|
||||
|
||||
func (module *module) GetAuthNProviderInfo(ctx context.Context, domain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
if callbackAuthN, ok := module.authNs[domain.AuthDomainConfig().AuthNProvider].(authn.CallbackAuthN); ok {
|
||||
return callbackAuthN.ProviderInfo(ctx, domain)
|
||||
}
|
||||
return &authtypes.AuthNProviderInfo{}
|
||||
}
|
||||
|
||||
func (module *module) GetByOrgIDAndID(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*authtypes.AuthDomain, error) {
|
||||
return module.store.GetByOrgIDAndID(ctx, orgID, id)
|
||||
}
|
||||
|
||||
139
pkg/modules/metricsexplorer/implmetricsexplorer/handler.go
Normal file
139
pkg/modules/metricsexplorer/implmetricsexplorer/handler.go
Normal file
@@ -0,0 +1,139 @@
|
||||
package implmetricsexplorer
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module metricsexplorer.Module
|
||||
}
|
||||
|
||||
// NewHandler returns a metricsexplorer.Handler implementation.
|
||||
func NewHandler(m metricsexplorer.Module) metricsexplorer.Handler {
|
||||
return &handler{
|
||||
module: m,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) GetStats(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var in metricsexplorertypes.StatsRequest
|
||||
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
out, err := h.module.GetStats(req.Context(), orgID, &in)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, out)
|
||||
}
|
||||
|
||||
func (h *handler) GetTreemap(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var in metricsexplorertypes.TreemapRequest
|
||||
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
out, err := h.module.GetTreemap(req.Context(), orgID, &in)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, out)
|
||||
}
|
||||
|
||||
func (h *handler) UpdateMetricMetadata(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Extract metric_name from URL path
|
||||
vars := mux.Vars(req)
|
||||
metricName := vars["metric_name"]
|
||||
if metricName == "" {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metric_name is required in URL path"))
|
||||
return
|
||||
}
|
||||
|
||||
var in metricsexplorertypes.UpdateMetricMetadataRequest
|
||||
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Set metric name from URL path
|
||||
in.MetricName = metricName
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
err = h.module.UpdateMetricMetadata(req.Context(), orgID, &in)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, nil)
|
||||
}
|
||||
|
||||
func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
metricName := strings.TrimSpace(req.URL.Query().Get("metricName"))
|
||||
if metricName == "" {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
metadataMap, err := h.module.GetMetricMetadataMulti(req.Context(), orgID, []string{metricName})
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
metadata, ok := metadataMap[metricName]
|
||||
if !ok || metadata == nil {
|
||||
render.Error(rw, errors.NewNotFoundf(errors.CodeNotFound, "metadata not found for metric %q", metricName))
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, metadata)
|
||||
}
|
||||
73
pkg/modules/metricsexplorer/implmetricsexplorer/helpers.go
Normal file
73
pkg/modules/metricsexplorer/implmetricsexplorer/helpers.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package implmetricsexplorer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
// used for mapping the sqlColumns via orderBy
|
||||
const (
|
||||
sqlColumnTimeSeries = "timeseries"
|
||||
sqlColumnSamples = "samples"
|
||||
)
|
||||
|
||||
func generateMetricMetadataCacheKey(metricName string) string {
|
||||
return fmt.Sprintf("metrics::metadata::%s", metricName)
|
||||
}
|
||||
|
||||
func getStatsOrderByColumn(order *qbtypes.OrderBy) (string, string, error) {
|
||||
if order == nil {
|
||||
return sqlColumnTimeSeries, qbtypes.OrderDirectionDesc.StringValue(), nil
|
||||
}
|
||||
|
||||
var columnName string
|
||||
switch strings.ToLower(order.Key.Name) {
|
||||
case metricsexplorertypes.OrderByTimeSeries.StringValue():
|
||||
columnName = sqlColumnTimeSeries
|
||||
case metricsexplorertypes.OrderBySamples.StringValue():
|
||||
columnName = sqlColumnSamples
|
||||
default:
|
||||
return "", "", errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"unsupported order column %q: supported columns are %q or %q",
|
||||
order.Key.Name,
|
||||
metricsexplorertypes.OrderByTimeSeries,
|
||||
metricsexplorertypes.OrderBySamples,
|
||||
)
|
||||
}
|
||||
|
||||
// Extract direction from OrderDirection and convert to SQL format (uppercase)
|
||||
var direction qbtypes.OrderDirection
|
||||
var ok bool
|
||||
// Validate direction using OrderDirectionMap
|
||||
if direction, ok = qbtypes.OrderDirectionMap[strings.ToLower(order.Direction.StringValue())]; !ok {
|
||||
return "", "", errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported order direction %q, should be one of %s, %s", direction, qbtypes.OrderDirectionAsc, qbtypes.OrderDirectionDesc)
|
||||
}
|
||||
|
||||
return columnName, direction.StringValue(), nil
|
||||
}
|
||||
|
||||
func extractMissingMetricNamesInMap(metricNames []string, metricMetadataMap map[string]*metricsexplorertypes.MetricMetadata) []string {
|
||||
misses := make([]string, 0)
|
||||
for _, name := range metricNames {
|
||||
if _, ok := metricMetadataMap[name]; !ok {
|
||||
misses = append(misses, name)
|
||||
}
|
||||
}
|
||||
return misses
|
||||
}
|
||||
|
||||
// enrichStatsWithMetadata enriches metric stats with metadata from the provided metadata map.
|
||||
func enrichStatsWithMetadata(metricStats []metricsexplorertypes.Stat, metadata map[string]*metricsexplorertypes.MetricMetadata) {
|
||||
for i := range metricStats {
|
||||
if meta, ok := metadata[metricStats[i].MetricName]; ok {
|
||||
metricStats[i].Description = meta.Description
|
||||
metricStats[i].MetricType = meta.MetricType
|
||||
metricStats[i].MetricUnit = meta.MetricUnit
|
||||
}
|
||||
}
|
||||
}
|
||||
773
pkg/modules/metricsexplorer/implmetricsexplorer/module.go
Normal file
773
pkg/modules/metricsexplorer/implmetricsexplorer/module.go
Normal file
@@ -0,0 +1,773 @@
|
||||
package implmetricsexplorer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
sqlbuilder "github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
condBuilder qbtypes.ConditionBuilder
|
||||
logger *slog.Logger
|
||||
cache cache.Cache
|
||||
}
|
||||
|
||||
// NewModule constructs the metrics module with the provided dependencies.
|
||||
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, providerSettings factory.ProviderSettings) metricsexplorer.Module {
|
||||
fieldMapper := telemetrymetrics.NewFieldMapper()
|
||||
condBuilder := telemetrymetrics.NewConditionBuilder(fieldMapper)
|
||||
return &module{
|
||||
telemetryStore: ts,
|
||||
fieldMapper: fieldMapper,
|
||||
condBuilder: condBuilder,
|
||||
logger: providerSettings.Logger,
|
||||
telemetryMetadataStore: telemetryMetadataStore,
|
||||
cache: cache,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *module) GetStats(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.StatsRequest) (*metricsexplorertypes.StatsResponse, error) {
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
filterWhereClause, err := m.buildFilterClause(ctx, req.Filter, req.Start, req.End)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Single query to get stats with samples, timeseries counts in required sorting order
|
||||
metricStats, total, err := m.fetchMetricsStatsWithSamples(
|
||||
ctx,
|
||||
req,
|
||||
filterWhereClause,
|
||||
false,
|
||||
req.OrderBy,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(metricStats) == 0 {
|
||||
return &metricsexplorertypes.StatsResponse{
|
||||
Metrics: []metricsexplorertypes.Stat{},
|
||||
Total: 0,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Get metadata for all metrics
|
||||
metricNames := make([]string, len(metricStats))
|
||||
for i := range metricStats {
|
||||
metricNames[i] = metricStats[i].MetricName
|
||||
}
|
||||
|
||||
metadata, err := m.GetMetricMetadataMulti(ctx, orgID, metricNames)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Enrich stats with metadata
|
||||
enrichStatsWithMetadata(metricStats, metadata)
|
||||
|
||||
return &metricsexplorertypes.StatsResponse{
|
||||
Metrics: metricStats,
|
||||
Total: total,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetTreemap will return metrics treemap information once implemented.
|
||||
func (m *module) GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error) {
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
filterWhereClause, err := m.buildFilterClause(ctx, req.Filter, req.Start, req.End)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := &metricsexplorertypes.TreemapResponse{}
|
||||
switch req.Treemap {
|
||||
case metricsexplorertypes.TreemapModeSamples:
|
||||
entries, err := m.computeSamplesTreemap(ctx, req, filterWhereClause)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.Samples = entries
|
||||
default: // TreemapModeTimeSeries
|
||||
entries, err := m.computeTimeseriesTreemap(ctx, req, filterWhereClause)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.TimeSeries = entries
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (m *module) GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error) {
|
||||
if len(metricNames) == 0 {
|
||||
return map[string]*metricsexplorertypes.MetricMetadata{}, nil
|
||||
}
|
||||
|
||||
metadata := make(map[string]*metricsexplorertypes.MetricMetadata)
|
||||
cacheHits, cacheMisses := m.fetchMetadataFromCache(ctx, orgID, metricNames)
|
||||
for name, meta := range cacheHits {
|
||||
metadata[name] = meta
|
||||
}
|
||||
|
||||
if len(cacheMisses) == 0 {
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
updatedMetadata, err := m.fetchUpdatedMetadata(ctx, orgID, cacheMisses)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for name, meta := range updatedMetadata {
|
||||
metadata[name] = meta
|
||||
}
|
||||
|
||||
remainingMisses := extractMissingMetricNamesInMap(cacheMisses, updatedMetadata)
|
||||
if len(remainingMisses) == 0 {
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
timeseriesMetadata, err := m.fetchTimeseriesMetadata(ctx, orgID, remainingMisses)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for name, meta := range timeseriesMetadata {
|
||||
metadata[name] = meta
|
||||
}
|
||||
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
func (m *module) UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error {
|
||||
if req == nil {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
|
||||
}
|
||||
|
||||
if req.MetricName == "" {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric name is required")
|
||||
}
|
||||
|
||||
// Validate and normalize metric type and temporality
|
||||
if err := m.validateAndNormalizeMetricType(req); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Validate labels for histogram and summary types
|
||||
if err := m.validateMetricLabels(ctx, req); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Insert new metadata (keeping history of all updates)
|
||||
if err := m.insertMetricsMetadata(ctx, orgID, req); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *module) fetchMetadataFromCache(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, []string) {
|
||||
hits := make(map[string]*metricsexplorertypes.MetricMetadata)
|
||||
misses := make([]string, 0)
|
||||
for _, metricName := range metricNames {
|
||||
cacheKey := generateMetricMetadataCacheKey(metricName)
|
||||
var cachedMetadata metricsexplorertypes.MetricMetadata
|
||||
if err := m.cache.Get(ctx, orgID, cacheKey, &cachedMetadata); err == nil {
|
||||
hits[metricName] = &cachedMetadata
|
||||
} else {
|
||||
m.logger.WarnContext(ctx, "cache miss for metric metadata", "metric_name", metricName, "error", err)
|
||||
misses = append(misses, metricName)
|
||||
}
|
||||
}
|
||||
return hits, misses
|
||||
}
|
||||
|
||||
func (m *module) fetchUpdatedMetadata(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error) {
|
||||
if len(metricNames) == 0 {
|
||||
return map[string]*metricsexplorertypes.MetricMetadata{}, nil
|
||||
}
|
||||
|
||||
args := make([]any, len(metricNames))
|
||||
for i := range metricNames {
|
||||
args[i] = metricNames[i]
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select(
|
||||
"metric_name",
|
||||
"argMax(description, created_at) AS description",
|
||||
"argMax(type, created_at) AS type",
|
||||
"argMax(unit, created_at) AS unit",
|
||||
"argMax(temporality, created_at) AS temporality",
|
||||
"argMax(is_monotonic, created_at) AS is_monotonic",
|
||||
)
|
||||
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.UpdatedMetadataTableName))
|
||||
sb.Where(sb.In("metric_name", args...))
|
||||
sb.GroupBy("metric_name")
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to fetch updated metrics metadata")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
result := make(map[string]*metricsexplorertypes.MetricMetadata)
|
||||
for rows.Next() {
|
||||
var (
|
||||
metricMetadata metricsexplorertypes.MetricMetadata
|
||||
metricName string
|
||||
)
|
||||
|
||||
if err := rows.Scan(&metricName, &metricMetadata.Description, &metricMetadata.MetricType, &metricMetadata.MetricUnit, &metricMetadata.Temporality, &metricMetadata.IsMonotonic); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan updated metrics metadata")
|
||||
}
|
||||
result[metricName] = &metricMetadata
|
||||
|
||||
cacheKey := generateMetricMetadataCacheKey(metricName)
|
||||
if err := m.cache.Set(ctx, orgID, cacheKey, &metricMetadata, 0); err != nil {
|
||||
m.logger.WarnContext(ctx, "failed to set metric metadata in cache", "metric_name", metricName, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating updated metrics metadata rows")
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *module) fetchTimeseriesMetadata(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error) {
|
||||
if len(metricNames) == 0 {
|
||||
return map[string]*metricsexplorertypes.MetricMetadata{}, nil
|
||||
}
|
||||
|
||||
args := make([]any, len(metricNames))
|
||||
for i := range metricNames {
|
||||
args[i] = metricNames[i]
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select(
|
||||
"metric_name",
|
||||
"ANY_VALUE(description) AS description",
|
||||
"ANY_VALUE(type) AS metric_type",
|
||||
"ANY_VALUE(unit) AS metric_unit",
|
||||
"ANY_VALUE(temporality) AS temporality",
|
||||
"ANY_VALUE(is_monotonic) AS is_monotonic",
|
||||
)
|
||||
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.TimeseriesV4TableName))
|
||||
sb.Where(sb.In("metric_name", args...))
|
||||
sb.GroupBy("metric_name")
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to fetch metrics metadata from timeseries table")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
result := make(map[string]*metricsexplorertypes.MetricMetadata)
|
||||
for rows.Next() {
|
||||
var (
|
||||
metricMetadata metricsexplorertypes.MetricMetadata
|
||||
metricName string
|
||||
)
|
||||
|
||||
if err := rows.Scan(&metricName, &metricMetadata.Description, &metricMetadata.MetricType, &metricMetadata.MetricUnit, &metricMetadata.Temporality, &metricMetadata.IsMonotonic); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan timeseries metadata")
|
||||
}
|
||||
result[metricName] = &metricMetadata
|
||||
|
||||
cacheKey := generateMetricMetadataCacheKey(metricName)
|
||||
if err := m.cache.Set(ctx, orgID, cacheKey, &metricMetadata, 0); err != nil {
|
||||
m.logger.WarnContext(ctx, "failed to set metric metadata in cache", "metric_name", metricName, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating timeseries metadata rows")
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *module) validateAndNormalizeMetricType(req *metricsexplorertypes.UpdateMetricMetadataRequest) error {
|
||||
switch req.Type {
|
||||
case metrictypes.SumType:
|
||||
if req.Temporality.IsZero() {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "temporality is required when metric type is Sum")
|
||||
}
|
||||
if req.Temporality != metrictypes.Delta && req.Temporality != metrictypes.Cumulative {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid value for temporality")
|
||||
}
|
||||
// Special case: if Sum is not monotonic and cumulative, convert to Gauge
|
||||
if !req.IsMonotonic && req.Temporality == metrictypes.Cumulative {
|
||||
req.Type = metrictypes.GaugeType
|
||||
req.Temporality = metrictypes.Unspecified
|
||||
}
|
||||
|
||||
case metrictypes.HistogramType:
|
||||
if req.Temporality.IsZero() {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "temporality is required when metric type is Histogram")
|
||||
}
|
||||
if req.Temporality != metrictypes.Delta && req.Temporality != metrictypes.Cumulative {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid value for temporality")
|
||||
}
|
||||
|
||||
case metrictypes.ExpHistogramType:
|
||||
if req.Temporality.IsZero() {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "temporality is required when metric type is exponential histogram")
|
||||
}
|
||||
if req.Temporality != metrictypes.Delta && req.Temporality != metrictypes.Cumulative {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid value for temporality")
|
||||
}
|
||||
|
||||
case metrictypes.GaugeType:
|
||||
// Gauge always has unspecified temporality
|
||||
req.Temporality = metrictypes.Unspecified
|
||||
|
||||
case metrictypes.SummaryType:
|
||||
// Summary always has cumulative temporality
|
||||
req.Temporality = metrictypes.Cumulative
|
||||
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid metric type")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *module) validateMetricLabels(ctx context.Context, req *metricsexplorertypes.UpdateMetricMetadataRequest) error {
|
||||
if req.Type == metrictypes.HistogramType {
|
||||
hasLabel, err := m.checkForLabelInMetric(ctx, req.MetricName, "le")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !hasLabel {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric '%s' cannot be set as histogram type", req.MetricName)
|
||||
}
|
||||
}
|
||||
|
||||
if req.Type == metrictypes.SummaryType {
|
||||
hasLabel, err := m.checkForLabelInMetric(ctx, req.MetricName, "quantile")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !hasLabel {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric '%s' cannot be set as summary type", req.MetricName)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *module) checkForLabelInMetric(ctx context.Context, metricName string, label string) (bool, error) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("count(*) > 0 AS has_label")
|
||||
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.AttributesMetadataTableName))
|
||||
sb.Where(sb.E("metric_name", metricName))
|
||||
sb.Where(sb.E("attr_name", label))
|
||||
sb.Limit(1)
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
var hasLabel bool
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
err := db.QueryRow(ctx, query, args...).Scan(&hasLabel)
|
||||
if err != nil {
|
||||
return false, errors.WrapInternalf(err, errors.CodeInternal, "error checking metric label %q", label)
|
||||
}
|
||||
|
||||
return hasLabel, nil
|
||||
}
|
||||
|
||||
func (m *module) insertMetricsMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error {
|
||||
createdAt := time.Now().UnixMilli()
|
||||
|
||||
ib := sqlbuilder.NewInsertBuilder()
|
||||
ib.InsertInto(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.UpdatedMetadataTableName))
|
||||
ib.Cols("metric_name", "temporality", "is_monotonic", "type", "description", "unit", "created_at")
|
||||
ib.Values(
|
||||
req.MetricName,
|
||||
req.Temporality,
|
||||
req.IsMonotonic,
|
||||
req.Type,
|
||||
req.Description,
|
||||
req.Unit,
|
||||
createdAt,
|
||||
)
|
||||
|
||||
query, args := ib.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
if err := db.Exec(ctx, query, args...); err != nil {
|
||||
return errors.WrapInternalf(err, errors.CodeInternal, "failed to insert metrics metadata")
|
||||
}
|
||||
|
||||
// Set in cache after successful DB insert
|
||||
metricMetadata := &metricsexplorertypes.MetricMetadata{
|
||||
Description: req.Description,
|
||||
MetricType: req.Type,
|
||||
MetricUnit: req.Unit,
|
||||
Temporality: req.Temporality,
|
||||
IsMonotonic: req.IsMonotonic,
|
||||
}
|
||||
cacheKey := generateMetricMetadataCacheKey(req.MetricName)
|
||||
if err := m.cache.Set(ctx, orgID, cacheKey, metricMetadata, 0); err != nil {
|
||||
m.logger.WarnContext(ctx, "failed to set metric metadata in cache after insert", "metric_name", req.MetricName, "error", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *module) buildFilterClause(ctx context.Context, filter *qbtypes.Filter, startMillis, endMillis int64) (*sqlbuilder.WhereClause, error) {
|
||||
expression := ""
|
||||
if filter != nil {
|
||||
expression = strings.TrimSpace(filter.Expression)
|
||||
}
|
||||
if expression == "" {
|
||||
return sqlbuilder.NewWhereClause(), nil
|
||||
}
|
||||
|
||||
// TODO(nikhilmantri0902, srikanthccv): if this is the right way of dealing with whereClauseSelectors
|
||||
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(expression)
|
||||
for idx := range whereClauseSelectors {
|
||||
whereClauseSelectors[idx].Signal = telemetrytypes.SignalMetrics
|
||||
whereClauseSelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
|
||||
// whereClauseSelectors[idx].MetricContext = &telemetrytypes.MetricContext{
|
||||
// MetricName: query.Aggregations[0].MetricName,
|
||||
// }
|
||||
// whereClauseSelectors[idx].Source = query.Source
|
||||
}
|
||||
|
||||
keys, _, err := m.telemetryMetadataStore.GetKeysMulti(ctx, whereClauseSelectors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Logger: m.logger,
|
||||
FieldMapper: m.fieldMapper,
|
||||
ConditionBuilder: m.condBuilder,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "labels"},
|
||||
FieldKeys: keys,
|
||||
}
|
||||
|
||||
startNs := uint64(startMillis * 1_000_000)
|
||||
endNs := uint64(endMillis * 1_000_000)
|
||||
|
||||
whereClause, err := querybuilder.PrepareWhereClause(expression, opts, startNs, endNs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if whereClause == nil || whereClause.WhereClause == nil {
|
||||
return sqlbuilder.NewWhereClause(), nil
|
||||
}
|
||||
|
||||
return whereClause.WhereClause, nil
|
||||
}
|
||||
|
||||
func (m *module) fetchMetricsStatsWithSamples(
|
||||
ctx context.Context,
|
||||
req *metricsexplorertypes.StatsRequest,
|
||||
filterWhereClause *sqlbuilder.WhereClause,
|
||||
normalized bool,
|
||||
orderBy *qbtypes.OrderBy,
|
||||
) ([]metricsexplorertypes.Stat, uint64, error) {
|
||||
|
||||
start, end, distributedTsTable, localTsTable := telemetrymetrics.WhichTSTableToUse(uint64(req.Start), uint64(req.End), nil)
|
||||
samplesTable := telemetrymetrics.WhichSamplesTableToUse(uint64(req.Start), uint64(req.End), metrictypes.UnspecifiedType, metrictypes.TimeAggregationUnspecified, nil)
|
||||
countExp := telemetrymetrics.CountExpressionForSamplesTable(samplesTable)
|
||||
|
||||
// Timeseries counts per metric
|
||||
tsSB := sqlbuilder.NewSelectBuilder()
|
||||
tsSB.Select(
|
||||
"metric_name",
|
||||
"uniq(fingerprint) AS timeseries",
|
||||
)
|
||||
tsSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
|
||||
tsSB.Where(tsSB.Between("unix_milli", start, end))
|
||||
tsSB.Where("NOT startsWith(metric_name, 'signoz')")
|
||||
tsSB.Where(tsSB.E("__normalized", normalized))
|
||||
if filterWhereClause != nil {
|
||||
tsSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
|
||||
}
|
||||
tsSB.GroupBy("metric_name")
|
||||
|
||||
// Samples counts per metric
|
||||
samplesSB := sqlbuilder.NewSelectBuilder()
|
||||
samplesSB.Select(
|
||||
"metric_name",
|
||||
fmt.Sprintf("%s AS samples", countExp),
|
||||
)
|
||||
samplesSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, samplesTable))
|
||||
samplesSB.Where(samplesSB.Between("unix_milli", req.Start, req.End))
|
||||
samplesSB.Where("NOT startsWith(metric_name, 'signoz')")
|
||||
|
||||
ctes := []*sqlbuilder.CTEQueryBuilder{
|
||||
sqlbuilder.CTEQuery("__time_series_counts").As(tsSB),
|
||||
}
|
||||
|
||||
if filterWhereClause != nil {
|
||||
fingerprintSB := sqlbuilder.NewSelectBuilder()
|
||||
fingerprintSB.Select("fingerprint")
|
||||
fingerprintSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, localTsTable))
|
||||
fingerprintSB.Where(fingerprintSB.Between("unix_milli", start, end))
|
||||
fingerprintSB.Where("NOT startsWith(metric_name, 'signoz')")
|
||||
fingerprintSB.Where(fingerprintSB.E("__normalized", normalized))
|
||||
fingerprintSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
|
||||
fingerprintSB.GroupBy("fingerprint")
|
||||
|
||||
ctes = append(ctes, sqlbuilder.CTEQuery("__filtered_fingerprints").As(fingerprintSB))
|
||||
samplesSB.Where("fingerprint IN (SELECT fingerprint FROM __filtered_fingerprints)")
|
||||
}
|
||||
samplesSB.GroupBy("metric_name")
|
||||
|
||||
ctes = append(ctes, sqlbuilder.CTEQuery("__sample_counts").As(samplesSB))
|
||||
cteBuilder := sqlbuilder.With(ctes...)
|
||||
|
||||
finalSB := cteBuilder.Select(
|
||||
"COALESCE(ts.metric_name, s.metric_name) AS metric_name",
|
||||
"COALESCE(ts.timeseries, 0) AS timeseries",
|
||||
"COALESCE(s.samples, 0) AS samples",
|
||||
"COUNT(*) OVER() AS total",
|
||||
)
|
||||
finalSB.From("__time_series_counts ts")
|
||||
finalSB.JoinWithOption(sqlbuilder.FullOuterJoin, "__sample_counts s", "ts.metric_name = s.metric_name")
|
||||
finalSB.Where("(COALESCE(ts.timeseries, 0) > 0 OR COALESCE(s.samples, 0) > 0)")
|
||||
|
||||
orderByColumn, orderDirection, err := getStatsOrderByColumn(orderBy)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
finalSB.OrderBy(
|
||||
fmt.Sprintf("%s %s", orderByColumn, strings.ToUpper(orderDirection)),
|
||||
"metric_name ASC",
|
||||
)
|
||||
finalSB.Limit(req.Limit)
|
||||
finalSB.Offset(req.Offset)
|
||||
|
||||
query, args := finalSB.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute metrics stats with samples query")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
metricStats := make([]metricsexplorertypes.Stat, 0)
|
||||
var total uint64
|
||||
|
||||
for rows.Next() {
|
||||
var (
|
||||
metricStat metricsexplorertypes.Stat
|
||||
rowTotal uint64
|
||||
)
|
||||
if err := rows.Scan(&metricStat.MetricName, &metricStat.TimeSeries, &metricStat.Samples, &rowTotal); err != nil {
|
||||
return nil, 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan metrics stats row")
|
||||
}
|
||||
metricStats = append(metricStats, metricStat)
|
||||
total = rowTotal
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, 0, errors.WrapInternalf(err, errors.CodeInternal, "error iterating metrics stats rows")
|
||||
}
|
||||
|
||||
return metricStats, total, nil
|
||||
}
|
||||
|
||||
func (m *module) computeTimeseriesTreemap(ctx context.Context, req *metricsexplorertypes.TreemapRequest, filterWhereClause *sqlbuilder.WhereClause) ([]metricsexplorertypes.TreemapEntry, error) {
|
||||
start, end, distributedTsTable, _ := telemetrymetrics.WhichTSTableToUse(uint64(req.Start), uint64(req.End), nil)
|
||||
|
||||
totalTSBuilder := sqlbuilder.NewSelectBuilder()
|
||||
totalTSBuilder.Select("uniq(fingerprint) AS total_time_series")
|
||||
totalTSBuilder.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
|
||||
totalTSBuilder.Where(totalTSBuilder.Between("unix_milli", start, end))
|
||||
totalTSBuilder.Where(totalTSBuilder.E("__normalized", false))
|
||||
|
||||
metricsSB := sqlbuilder.NewSelectBuilder()
|
||||
metricsSB.Select(
|
||||
"metric_name",
|
||||
"uniq(fingerprint) AS total_value",
|
||||
)
|
||||
metricsSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
|
||||
metricsSB.Where(metricsSB.Between("unix_milli", start, end))
|
||||
metricsSB.Where("NOT startsWith(metric_name, 'signoz')")
|
||||
metricsSB.Where(metricsSB.E("__normalized", false))
|
||||
if filterWhereClause != nil {
|
||||
metricsSB.WhereClause.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
|
||||
}
|
||||
metricsSB.GroupBy("metric_name")
|
||||
|
||||
cteBuilder := sqlbuilder.With(
|
||||
sqlbuilder.CTEQuery("__total_time_series").As(totalTSBuilder),
|
||||
sqlbuilder.CTEQuery("__metric_totals").As(metricsSB),
|
||||
)
|
||||
|
||||
finalSB := cteBuilder.Select(
|
||||
"mt.metric_name",
|
||||
"mt.total_value",
|
||||
"CASE WHEN tts.total_time_series = 0 THEN 0 ELSE (mt.total_value * 100.0 / tts.total_time_series) END AS percentage",
|
||||
)
|
||||
finalSB.From("__metric_totals mt")
|
||||
finalSB.Join("__total_time_series tts", "1=1")
|
||||
finalSB.OrderBy("percentage").Desc()
|
||||
finalSB.Limit(req.Limit)
|
||||
|
||||
query, args := finalSB.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute timeseries treemap query")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
entries := make([]metricsexplorertypes.TreemapEntry, 0)
|
||||
for rows.Next() {
|
||||
var treemapEntry metricsexplorertypes.TreemapEntry
|
||||
if err := rows.Scan(&treemapEntry.MetricName, &treemapEntry.TotalValue, &treemapEntry.Percentage); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan timeseries treemap row")
|
||||
}
|
||||
entries = append(entries, treemapEntry)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating timeseries treemap rows")
|
||||
}
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
func (m *module) computeSamplesTreemap(ctx context.Context, req *metricsexplorertypes.TreemapRequest, filterWhereClause *sqlbuilder.WhereClause) ([]metricsexplorertypes.TreemapEntry, error) {
|
||||
start, end, distributedTsTable, localTsTable := telemetrymetrics.WhichTSTableToUse(uint64(req.Start), uint64(req.End), nil)
|
||||
samplesTable := telemetrymetrics.WhichSamplesTableToUse(uint64(req.Start), uint64(req.End), metrictypes.UnspecifiedType, metrictypes.TimeAggregationUnspecified, nil)
|
||||
countExp := telemetrymetrics.CountExpressionForSamplesTable(samplesTable)
|
||||
|
||||
candidateLimit := req.Limit + 50
|
||||
|
||||
metricCandidatesSB := sqlbuilder.NewSelectBuilder()
|
||||
metricCandidatesSB.Select("metric_name")
|
||||
metricCandidatesSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
|
||||
metricCandidatesSB.Where("NOT startsWith(metric_name, 'signoz')")
|
||||
metricCandidatesSB.Where(metricCandidatesSB.E("__normalized", false))
|
||||
metricCandidatesSB.Where(metricCandidatesSB.Between("unix_milli", start, end))
|
||||
if filterWhereClause != nil {
|
||||
metricCandidatesSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
|
||||
}
|
||||
metricCandidatesSB.GroupBy("metric_name")
|
||||
metricCandidatesSB.OrderBy("uniq(fingerprint) DESC")
|
||||
metricCandidatesSB.Limit(candidateLimit)
|
||||
|
||||
cteQueries := []*sqlbuilder.CTEQueryBuilder{
|
||||
sqlbuilder.CTEQuery("__metric_candidates").As(metricCandidatesSB),
|
||||
}
|
||||
|
||||
totalSamplesSB := sqlbuilder.NewSelectBuilder()
|
||||
totalSamplesSB.Select(fmt.Sprintf("%s AS total_samples", countExp))
|
||||
totalSamplesSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, samplesTable))
|
||||
totalSamplesSB.Where(totalSamplesSB.Between("unix_milli", req.Start, req.End))
|
||||
|
||||
sampleCountsSB := sqlbuilder.NewSelectBuilder()
|
||||
sampleCountsSB.Select(
|
||||
"metric_name",
|
||||
fmt.Sprintf("%s AS samples", countExp),
|
||||
)
|
||||
sampleCountsSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, samplesTable))
|
||||
sampleCountsSB.Where(sampleCountsSB.Between("unix_milli", req.Start, req.End))
|
||||
sampleCountsSB.Where("metric_name IN (SELECT metric_name FROM __metric_candidates)")
|
||||
|
||||
if filterWhereClause != nil {
|
||||
fingerprintSB := sqlbuilder.NewSelectBuilder()
|
||||
fingerprintSB.Select("fingerprint")
|
||||
fingerprintSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, localTsTable))
|
||||
fingerprintSB.Where(fingerprintSB.Between("unix_milli", start, end))
|
||||
fingerprintSB.Where("NOT startsWith(metric_name, 'signoz')")
|
||||
fingerprintSB.Where(fingerprintSB.E("__normalized", false))
|
||||
fingerprintSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
|
||||
fingerprintSB.Where("metric_name IN (SELECT metric_name FROM __metric_candidates)")
|
||||
fingerprintSB.GroupBy("fingerprint")
|
||||
|
||||
sampleCountsSB.Where("fingerprint IN (SELECT fingerprint FROM __filtered_fingerprints)")
|
||||
|
||||
cteQueries = append(cteQueries, sqlbuilder.CTEQuery("__filtered_fingerprints").As(fingerprintSB))
|
||||
}
|
||||
|
||||
sampleCountsSB.GroupBy("metric_name")
|
||||
|
||||
cteQueries = append(cteQueries,
|
||||
sqlbuilder.CTEQuery("__sample_counts").As(sampleCountsSB),
|
||||
sqlbuilder.CTEQuery("__total_samples").As(totalSamplesSB),
|
||||
)
|
||||
|
||||
cteBuilder := sqlbuilder.With(cteQueries...)
|
||||
|
||||
finalSB := cteBuilder.Select(
|
||||
"mc.metric_name",
|
||||
"COALESCE(sc.samples, 0) AS samples",
|
||||
"CASE WHEN ts.total_samples = 0 THEN 0 ELSE (COALESCE(sc.samples, 0) * 100.0 / ts.total_samples) END AS percentage",
|
||||
)
|
||||
finalSB.From("__metric_candidates mc")
|
||||
finalSB.JoinWithOption(sqlbuilder.LeftJoin, "__sample_counts sc", "mc.metric_name = sc.metric_name")
|
||||
finalSB.Join("__total_samples ts", "1=1")
|
||||
finalSB.OrderBy("percentage DESC")
|
||||
finalSB.Limit(req.Limit)
|
||||
|
||||
query, args := finalSB.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute samples treemap query")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
entries := make([]metricsexplorertypes.TreemapEntry, 0)
|
||||
for rows.Next() {
|
||||
var treemapEntry metricsexplorertypes.TreemapEntry
|
||||
if err := rows.Scan(&treemapEntry.MetricName, &treemapEntry.TotalValue, &treemapEntry.Percentage); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan samples treemap row")
|
||||
}
|
||||
entries = append(entries, treemapEntry)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating samples treemap rows")
|
||||
}
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
25
pkg/modules/metricsexplorer/metricsexplorer.go
Normal file
25
pkg/modules/metricsexplorer/metricsexplorer.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package metricsexplorer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// Handler exposes HTTP handlers for the metrics module.
|
||||
type Handler interface {
|
||||
GetStats(http.ResponseWriter, *http.Request)
|
||||
GetTreemap(http.ResponseWriter, *http.Request)
|
||||
GetMetricMetadata(http.ResponseWriter, *http.Request)
|
||||
UpdateMetricMetadata(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
// Module represents the metrics module interface.
|
||||
type Module interface {
|
||||
GetStats(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.StatsRequest) (*metricsexplorertypes.StatsResponse, error)
|
||||
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
|
||||
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)
|
||||
UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module promote.Module
|
||||
}
|
||||
|
||||
func NewHandler(module promote.Module) promote.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
func (h *handler) HandlePromote(w http.ResponseWriter, r *http.Request) {
|
||||
_, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.NewInternalf(errors.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
h.GetPromotedAndIndexedPaths(w, r)
|
||||
return
|
||||
case http.MethodPost:
|
||||
h.PromotePaths(w, r)
|
||||
return
|
||||
case http.MethodDelete:
|
||||
h.DropIndex(w, r)
|
||||
return
|
||||
default:
|
||||
render.Error(w, errors.NewMethodNotAllowedf(errors.CodeMethodNotAllowed, "method not allowed"))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) DropIndex(w http.ResponseWriter, r *http.Request) {
|
||||
var req promotetypes.PromotePath
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(w, errors.NewInvalidInputf(errors.CodeInvalidInput, "Invalid data"))
|
||||
return
|
||||
}
|
||||
|
||||
err := h.module.DropIndex(r.Context(), req)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, nil)
|
||||
}
|
||||
|
||||
func (h *handler) PromotePaths(w http.ResponseWriter, r *http.Request) {
|
||||
var req []promotetypes.PromotePath
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(w, errors.NewInvalidInputf(errors.CodeInvalidInput, "Invalid data"))
|
||||
return
|
||||
}
|
||||
|
||||
// Delegate all processing to the reader
|
||||
err := h.module.PromoteAndIndexPaths(r.Context(), req...)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, nil)
|
||||
}
|
||||
|
||||
func (h *handler) GetPromotedAndIndexedPaths(w http.ResponseWriter, r *http.Request) {
|
||||
response, err := func() ([]promotetypes.PromotePath, error) {
|
||||
indexes, err := h.module.ListBodySkipIndexes(r.Context())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
aggr := map[string][]promotetypes.WrappedIndex{}
|
||||
for _, index := range indexes {
|
||||
path, columnType, err := schemamigrator.UnfoldJSONSubColumnIndexExpr(index.Expression)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// clean backticks from the path
|
||||
path = strings.ReplaceAll(path, "`", "")
|
||||
|
||||
aggr[path] = append(aggr[path], promotetypes.WrappedIndex{
|
||||
ColumnType: columnType,
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
promotedPaths, err := h.module.ListPromotedPaths(r.Context())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := []promotetypes.PromotePath{}
|
||||
for _, path := range promotedPaths {
|
||||
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
item := promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Promote: true,
|
||||
}
|
||||
indexes, ok := aggr[fullPath]
|
||||
if ok {
|
||||
item.Indexes = indexes
|
||||
delete(aggr, fullPath)
|
||||
}
|
||||
response = append(response, item)
|
||||
}
|
||||
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Indexes: indexes,
|
||||
})
|
||||
}
|
||||
return response, nil
|
||||
}()
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, response)
|
||||
}
|
||||
@@ -1,238 +0,0 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"maps"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeFailedToPrepareBatch = errors.MustNewCode("failed_to_prepare_batch_promoted_paths")
|
||||
CodeFailedToSendBatch = errors.MustNewCode("failed_to_send_batch_promoted_paths")
|
||||
CodeFailedToAppendPath = errors.MustNewCode("failed_to_append_path_promoted_paths")
|
||||
CodeFailedToCreateIndex = errors.MustNewCode("failed_to_create_index_promoted_paths")
|
||||
CodeFailedToDropIndex = errors.MustNewCode("failed_to_drop_index_promoted_paths")
|
||||
CodeFailedToQueryPromotedPaths = errors.MustNewCode("failed_to_query_promoted_paths")
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store telemetrystore.TelemetryStore
|
||||
}
|
||||
|
||||
func NewModule(store telemetrystore.TelemetryStore) promote.Module {
|
||||
return &module{store: store}
|
||||
}
|
||||
|
||||
func (m *module) ListBodySkipIndexes(ctx context.Context) ([]schemamigrator.Index, error) {
|
||||
return telemetrymetadata.ListLogsJSONIndexes(ctx, m.store.Cluster(), m.store.ClickhouseDB())
|
||||
}
|
||||
|
||||
func (m *module) ListPromotedPaths(ctx context.Context) ([]string, error) {
|
||||
paths, err := telemetrymetadata.ListPromotedPaths(ctx, m.store.ClickhouseDB())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return slices.Collect(maps.Keys(paths)), nil
|
||||
}
|
||||
|
||||
// PromotePaths inserts provided JSON paths into the promoted paths table for logs queries.
|
||||
func (m *module) PromotePaths(ctx context.Context, paths []string) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
batch, err := m.store.ClickhouseDB().PrepareBatch(ctx,
|
||||
fmt.Sprintf("INSERT INTO %s.%s (path, created_at) VALUES", telemetrymetadata.DBName,
|
||||
telemetrymetadata.PromotedPathsTableName))
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToPrepareBatch, "failed to prepare batch")
|
||||
}
|
||||
|
||||
nowMs := uint64(time.Now().UnixMilli())
|
||||
for _, p := range paths {
|
||||
trimmed := strings.TrimSpace(p)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
if err := batch.Append(trimmed, nowMs); err != nil {
|
||||
_ = batch.Abort()
|
||||
return errors.WrapInternalf(err, CodeFailedToAppendPath, "failed to append path")
|
||||
}
|
||||
}
|
||||
|
||||
if err := batch.Send(); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToSendBatch, "failed to send batch")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// createIndexes creates string ngram + token filter indexes on JSON path subcolumns for LIKE queries.
|
||||
func (m *module) createIndexes(ctx context.Context, indexes []schemamigrator.Index) error {
|
||||
if len(indexes) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, index := range indexes {
|
||||
alterStmt := schemamigrator.AlterTableAddIndex{
|
||||
Database: telemetrylogs.DBName,
|
||||
Table: telemetrylogs.LogsV2LocalTableName,
|
||||
Index: index,
|
||||
}
|
||||
op := alterStmt.OnCluster(m.store.Cluster())
|
||||
if err := m.store.ClickhouseDB().Exec(ctx, op.ToSQL()); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToCreateIndex, "failed to create index")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *module) DropIndex(ctx context.Context, path promotetypes.PromotePath) error {
|
||||
// validate the paths
|
||||
if err := path.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
promoted, err := telemetrymetadata.IsPathPromoted(ctx, m.store.ClickhouseDB(), path.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
if promoted {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
}
|
||||
|
||||
for _, index := range path.Indexes {
|
||||
typeIndex := schemamigrator.IndexTypeTokenBF
|
||||
switch {
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeNGramBF)):
|
||||
typeIndex = schemamigrator.IndexTypeNGramBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeTokenBF)):
|
||||
typeIndex = schemamigrator.IndexTypeTokenBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeMinMax)):
|
||||
typeIndex = schemamigrator.IndexTypeMinMax
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid index type: %s", index.Type)
|
||||
}
|
||||
|
||||
alterStmt := schemamigrator.AlterTableDropIndex{
|
||||
Database: telemetrylogs.DBName,
|
||||
Table: telemetrylogs.LogsV2LocalTableName,
|
||||
Index: schemamigrator.Index{
|
||||
Name: schemamigrator.JSONSubColumnIndexName(parentColumn, path.Path, index.JSONDataType.StringValue(), typeIndex),
|
||||
Expression: schemamigrator.JSONSubColumnIndexExpr(parentColumn, path.Path, index.JSONDataType.StringValue()),
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
},
|
||||
}
|
||||
op := alterStmt.OnCluster(m.store.Cluster())
|
||||
if err := m.store.ClickhouseDB().Exec(ctx, op.ToSQL()); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToDropIndex, "failed to drop index")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// PromoteAndIndexPaths handles promoting paths and creating indexes in one call.
|
||||
func (m *module) PromoteAndIndexPaths(
|
||||
ctx context.Context,
|
||||
paths ...promotetypes.PromotePath,
|
||||
) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
// validate the paths
|
||||
for _, path := range paths {
|
||||
if err := path.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder().From(fmt.Sprintf("%s.%s", telemetrymetadata.DBName, telemetrymetadata.PromotedPathsTableName)).Select("path")
|
||||
cond := []string{}
|
||||
for _, path := range paths {
|
||||
cond = append(cond, sb.Equal("path", path.Path))
|
||||
}
|
||||
sb.Where(sb.Or(cond...))
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := m.store.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToQueryPromotedPaths, "failed to query promoted paths")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Load existing promoted paths once
|
||||
existingPromotedPaths := make(map[string]struct{})
|
||||
for rows.Next() {
|
||||
var p string
|
||||
if err := rows.Scan(&p); err == nil {
|
||||
existingPromotedPaths[p] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
var toInsert []string
|
||||
indexes := []schemamigrator.Index{}
|
||||
for _, it := range paths {
|
||||
if it.Promote {
|
||||
if _, promoted := existingPromotedPaths[it.Path]; !promoted {
|
||||
toInsert = append(toInsert, it.Path)
|
||||
}
|
||||
}
|
||||
if len(it.Indexes) > 0 {
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
// if the path is already promoted or is being promoted, add it to the promoted column
|
||||
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
}
|
||||
|
||||
for _, index := range it.Indexes {
|
||||
typeIndex := schemamigrator.IndexTypeTokenBF
|
||||
switch {
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeNGramBF)):
|
||||
typeIndex = schemamigrator.IndexTypeNGramBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeTokenBF)):
|
||||
typeIndex = schemamigrator.IndexTypeTokenBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeMinMax)):
|
||||
typeIndex = schemamigrator.IndexTypeMinMax
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid index type: %s", index.Type)
|
||||
}
|
||||
indexes = append(indexes, schemamigrator.Index{
|
||||
Name: schemamigrator.JSONSubColumnIndexName(parentColumn, it.Path, index.JSONDataType.StringValue(), typeIndex),
|
||||
Expression: schemamigrator.JSONSubColumnIndexExpr(parentColumn, it.Path, index.JSONDataType.StringValue()),
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(toInsert) > 0 {
|
||||
err := m.PromotePaths(ctx, toInsert)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(indexes) > 0 {
|
||||
if err := m.createIndexes(ctx, indexes); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
package promote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
ListBodySkipIndexes(ctx context.Context) ([]schemamigrator.Index, error)
|
||||
ListPromotedPaths(ctx context.Context) ([]string, error)
|
||||
PromoteAndIndexPaths(ctx context.Context, paths ...promotetypes.PromotePath) error
|
||||
DropIndex(ctx context.Context, path promotetypes.PromotePath) error
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
HandlePromote(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
@@ -1,55 +1,42 @@
|
||||
package prometheustest
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"os"
|
||||
"time"
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus/clickhouseprometheus"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/prometheus/common/model"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/tsdb"
|
||||
"github.com/prometheus/prometheus/storage/remote"
|
||||
)
|
||||
|
||||
var _ prometheus.Prometheus = (*Provider)(nil)
|
||||
|
||||
type Provider struct {
|
||||
db *tsdb.DB
|
||||
dir string
|
||||
engine *prometheus.Engine
|
||||
queryable storage.SampleAndChunkQueryable
|
||||
engine *prometheus.Engine
|
||||
}
|
||||
|
||||
func New(logger *slog.Logger, cfg prometheus.Config, outOfOrderTimeWindow ...int64) *Provider {
|
||||
dir, err := os.MkdirTemp("", "test_storage")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
var stCallback = func() (int64, error) {
|
||||
return int64(model.Latest), nil
|
||||
}
|
||||
|
||||
// Tests just load data for a series sequentially. Thus we
|
||||
// need a long appendable window.
|
||||
opts := tsdb.DefaultOptions()
|
||||
opts.MinBlockDuration = int64(24 * time.Hour / time.Millisecond)
|
||||
opts.MaxBlockDuration = int64(24 * time.Hour / time.Millisecond)
|
||||
opts.RetentionDuration = 0
|
||||
opts.EnableNativeHistograms = true
|
||||
func New(ctx context.Context, providerSettings factory.ProviderSettings, config prometheus.Config, telemetryStore telemetrystore.TelemetryStore) *Provider {
|
||||
|
||||
// Set OutOfOrderTimeWindow if provided, otherwise use default (0)
|
||||
if len(outOfOrderTimeWindow) > 0 {
|
||||
opts.OutOfOrderTimeWindow = outOfOrderTimeWindow[0]
|
||||
} else {
|
||||
opts.OutOfOrderTimeWindow = 0 // Default value is zero
|
||||
}
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/prometheus/prometheustest")
|
||||
|
||||
db, err := tsdb.Open(dir, nil, nil, opts, tsdb.NewDBStats())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
engine := prometheus.NewEngine(settings.Logger(), config)
|
||||
|
||||
engine := prometheus.NewEngine(logger, cfg)
|
||||
readClient := clickhouseprometheus.NewReadClient(settings, telemetryStore)
|
||||
|
||||
queryable := remote.NewSampleAndChunkQueryableClient(readClient, labels.EmptyLabels(), []*labels.Matcher{}, false, stCallback)
|
||||
|
||||
return &Provider{
|
||||
db: db,
|
||||
dir: dir,
|
||||
engine: engine,
|
||||
engine: engine,
|
||||
queryable: queryable,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,12 +45,12 @@ func (provider *Provider) Engine() *prometheus.Engine {
|
||||
}
|
||||
|
||||
func (provider *Provider) Storage() storage.Queryable {
|
||||
return provider.db
|
||||
return provider.queryable
|
||||
}
|
||||
|
||||
func (provider *Provider) Close() error {
|
||||
if err := provider.db.Close(); err != nil {
|
||||
return err
|
||||
if provider.engine != nil {
|
||||
provider.engine.Close()
|
||||
}
|
||||
return os.RemoveAll(provider.dir)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -10,11 +10,9 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
type builderQuery[T any] struct {
|
||||
@@ -250,40 +248,6 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge body_json and promoted into body
|
||||
if q.spec.Signal == telemetrytypes.SignalLogs {
|
||||
switch typedPayload := payload.(type) {
|
||||
case *qbtypes.RawData:
|
||||
for _, rr := range typedPayload.Rows {
|
||||
seeder := func() error {
|
||||
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
seed(promoted, body)
|
||||
str, err := sonic.MarshalString(body)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
|
||||
}
|
||||
rr.Data["body"] = str
|
||||
return nil
|
||||
}
|
||||
err := seeder()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
|
||||
}
|
||||
payload = typedPayload
|
||||
}
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Type: q.kind,
|
||||
Value: payload,
|
||||
@@ -411,18 +375,3 @@ func decodeCursor(cur string) (int64, error) {
|
||||
}
|
||||
return strconv.ParseInt(string(b), 10, 64)
|
||||
}
|
||||
|
||||
func seed(promoted map[string]any, body map[string]any) {
|
||||
for key, fromValue := range promoted {
|
||||
if toValue, ok := body[key]; !ok {
|
||||
body[key] = fromValue
|
||||
} else {
|
||||
if fromValue, ok := fromValue.(map[string]any); ok {
|
||||
if toValue, ok := toValue.(map[string]any); ok {
|
||||
seed(fromValue, toValue)
|
||||
body[key] = toValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -52,6 +51,7 @@ func consume(rows driver.Rows, kind qbtypes.RequestType, queryWindow *qbtypes.Ti
|
||||
}
|
||||
|
||||
func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbtypes.Step, queryName string) (*qbtypes.TimeSeriesData, error) {
|
||||
|
||||
colTypes := rows.ColumnTypes()
|
||||
colNames := rows.Columns()
|
||||
|
||||
@@ -354,22 +354,10 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
colTypes := rows.ColumnTypes()
|
||||
colCnt := len(colNames)
|
||||
|
||||
// Helper that decides scan target per column based on DB type
|
||||
makeScanTarget := func(i int) any {
|
||||
dbt := strings.ToUpper(colTypes[i].DatabaseTypeName())
|
||||
if strings.HasPrefix(dbt, "JSON") {
|
||||
// Since the driver fails to decode JSON/Dynamic into native Go values, we read it as raw bytes
|
||||
// TODO: check in future if fixed in the driver
|
||||
var v []byte
|
||||
return &v
|
||||
}
|
||||
return reflect.New(colTypes[i].ScanType()).Interface()
|
||||
}
|
||||
|
||||
// Build a template slice of correctly-typed pointers once
|
||||
scanTpl := make([]any, colCnt)
|
||||
for i := range colTypes {
|
||||
scanTpl[i] = makeScanTarget(i)
|
||||
for i, ct := range colTypes {
|
||||
scanTpl[i] = reflect.New(ct.ScanType()).Interface()
|
||||
}
|
||||
|
||||
var outRows []*qbtypes.RawRow
|
||||
@@ -378,7 +366,7 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
// fresh copy of the scan slice (otherwise the driver reuses pointers)
|
||||
scan := make([]any, colCnt)
|
||||
for i := range scanTpl {
|
||||
scan[i] = makeScanTarget(i)
|
||||
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
|
||||
}
|
||||
|
||||
if err := rows.Scan(scan...); err != nil {
|
||||
@@ -395,28 +383,6 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
// de-reference the typed pointer to any
|
||||
val := reflect.ValueOf(cellPtr).Elem().Interface()
|
||||
|
||||
// Post-process JSON columns: normalize into structured values
|
||||
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
|
||||
switch x := val.(type) {
|
||||
case []byte:
|
||||
if len(x) > 0 {
|
||||
var v any
|
||||
if err := sonic.Unmarshal(x, &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
case string:
|
||||
if x != "" {
|
||||
var v any
|
||||
if err := sonic.Unmarshal([]byte(x), &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
default:
|
||||
// already a structured type (map[string]any, []any, etc.)
|
||||
}
|
||||
}
|
||||
|
||||
// special-case: timestamp column
|
||||
if name == "timestamp" || name == "timestamp_datetime" {
|
||||
switch t := val.(type) {
|
||||
|
||||
@@ -36,7 +36,7 @@ func NewFactory(
|
||||
}
|
||||
|
||||
func newProvider(
|
||||
ctx context.Context,
|
||||
_ context.Context,
|
||||
settings factory.ProviderSettings,
|
||||
cfg querier.Config,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
@@ -78,7 +78,7 @@ func newProvider(
|
||||
telemetryMetadataStore,
|
||||
)
|
||||
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil)
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, "", nil)
|
||||
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
|
||||
settings,
|
||||
telemetryMetadataStore,
|
||||
@@ -100,15 +100,16 @@ func newProvider(
|
||||
traceAggExprRewriter,
|
||||
)
|
||||
|
||||
// Create field mapper and condition builder for body JSON queries
|
||||
// Create log statement builder
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper, telemetryMetadataStore)
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
settings,
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
telemetryMetadataStore,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||
@@ -116,6 +117,7 @@ func newProvider(
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
logFieldMapper,
|
||||
logConditionBuilder,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logStmtBuilder := telemetrylogs.NewLogQueryStatementBuilder(
|
||||
@@ -126,6 +128,7 @@ func newProvider(
|
||||
logResourceFilterStmtBuilder,
|
||||
logAggExprRewriter,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ type inMemoryQueryProgressTracker struct {
|
||||
|
||||
func (tracker *inMemoryQueryProgressTracker) ReportQueryStarted(
|
||||
queryId string,
|
||||
) (postQueryCleanup func(), err *model.ApiError) {
|
||||
) (postQueryCleanup func(), apiErr *model.ApiError) {
|
||||
tracker.lock.Lock()
|
||||
defer tracker.lock.Unlock()
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ type QueryProgressTracker interface {
|
||||
// Tells the tracker that query with id `queryId` has started.
|
||||
// Progress can only be reported for and tracked for a query that is in progress.
|
||||
// Returns a cleanup function that must be called after the query finishes.
|
||||
ReportQueryStarted(queryId string) (postQueryCleanup func(), err *model.ApiError)
|
||||
ReportQueryStarted(queryId string) (postQueryCleanup func(), apiErr *model.ApiError)
|
||||
|
||||
// Report progress stats received from clickhouse for `queryId`
|
||||
ReportQueryProgress(queryId string, chProgress *clickhouse.Progress) *model.ApiError
|
||||
@@ -18,7 +18,7 @@ type QueryProgressTracker interface {
|
||||
// The returned channel will produce `QueryProgress` instances representing
|
||||
// the latest state of query progress stats. Also returns a function that
|
||||
// can be called to unsubscribe before the query finishes, if needed.
|
||||
SubscribeToQueryProgress(queryId string) (ch <-chan model.QueryProgress, unsubscribe func(), err *model.ApiError)
|
||||
SubscribeToQueryProgress(queryId string) (ch <-chan model.QueryProgress, unsubscribe func(), apiErr *model.ApiError)
|
||||
}
|
||||
|
||||
func NewQueryProgressTracker() QueryProgressTracker {
|
||||
|
||||
@@ -1297,8 +1297,8 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
|
||||
// check if there is existing things to be done
|
||||
for _, tableName := range tableNameArray {
|
||||
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if err != nil {
|
||||
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if apiErr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending {
|
||||
@@ -1378,8 +1378,8 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume)
|
||||
if err != nil {
|
||||
zap.L().Error("error in setting cold storage", zap.Error(err))
|
||||
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if err == nil {
|
||||
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if apiErr == nil {
|
||||
_, dbErr := r.
|
||||
sqlDB.
|
||||
BunDB().
|
||||
@@ -1455,8 +1455,8 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
|
||||
// check if there is existing things to be done
|
||||
for _, tableName := range tableNames {
|
||||
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if err != nil {
|
||||
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if apiErr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending {
|
||||
@@ -1523,8 +1523,8 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in setting cold storage", zap.Error(err))
|
||||
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if err == nil {
|
||||
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if apiErr == nil {
|
||||
_, dbErr := r.
|
||||
sqlDB.
|
||||
BunDB().
|
||||
@@ -1669,8 +1669,8 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
}
|
||||
|
||||
for _, tableName := range tableNames {
|
||||
statusItem, err := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
|
||||
if err != nil {
|
||||
statusItem, apiErr := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
|
||||
if apiErr != nil {
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "error in processing custom_retention_ttl_status check sql query")
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending {
|
||||
@@ -1974,8 +1974,8 @@ func (r *ClickHouseReader) checkCustomRetentionTTLStatusItem(ctx context.Context
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) updateCustomRetentionTTLStatus(ctx context.Context, orgID, tableName, status string) {
|
||||
statusItem, err := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
|
||||
if err == nil && statusItem != nil {
|
||||
statusItem, apiErr := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
|
||||
if apiErr == nil && statusItem != nil {
|
||||
_, dbErr := r.sqlDB.BunDB().NewUpdate().
|
||||
Model(new(types.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
@@ -2126,8 +2126,8 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
|
||||
signozMetricDBName + "." + signozTSLocalTableNameV41Week,
|
||||
}
|
||||
for _, tableName := range tableNames {
|
||||
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if err != nil {
|
||||
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if apiErr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending {
|
||||
@@ -2176,8 +2176,8 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
|
||||
err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in setting cold storage", zap.Error(err))
|
||||
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if err == nil {
|
||||
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
if apiErr == nil {
|
||||
_, dbErr := r.
|
||||
sqlDB.
|
||||
BunDB().
|
||||
@@ -2285,17 +2285,17 @@ func (r *ClickHouseReader) checkTTLStatusItem(ctx context.Context, orgID string,
|
||||
return ttl, nil
|
||||
}
|
||||
|
||||
// setTTLQueryStatus fetches ttl_status table status from DB
|
||||
func (r *ClickHouseReader) setTTLQueryStatus(ctx context.Context, orgID string, tableNameArray []string) (string, *model.ApiError) {
|
||||
// getTTLQueryStatus fetches ttl_status table status from DB
|
||||
func (r *ClickHouseReader) getTTLQueryStatus(ctx context.Context, orgID string, tableNameArray []string) (string, *model.ApiError) {
|
||||
failFlag := false
|
||||
status := constants.StatusSuccess
|
||||
for _, tableName := range tableNameArray {
|
||||
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
emptyStatusStruct := new(types.TTLSetting)
|
||||
if statusItem == emptyStatusStruct {
|
||||
return "", nil
|
||||
}
|
||||
if err != nil {
|
||||
if apiErr != nil {
|
||||
return "", &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending && statusItem.UpdatedAt.Unix()-time.Now().Unix() < 3600 {
|
||||
@@ -2439,20 +2439,26 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
|
||||
switch ttlParams.Type {
|
||||
case constants.TraceTTL:
|
||||
tableNameArray := []string{signozTraceDBName + "." + signozTraceTableName, signozTraceDBName + "." + signozDurationMVTable, signozTraceDBName + "." + signozSpansTable, signozTraceDBName + "." + signozErrorIndexTable, signozTraceDBName + "." + signozUsageExplorerTable, signozTraceDBName + "." + defaultDependencyGraphTable}
|
||||
|
||||
tableNameArray := []string{
|
||||
r.TraceDB + "." + r.traceTableName,
|
||||
r.TraceDB + "." + r.traceResourceTableV3,
|
||||
r.TraceDB + "." + signozErrorIndexTable,
|
||||
r.TraceDB + "." + signozUsageExplorerTable,
|
||||
r.TraceDB + "." + defaultDependencyGraphTable,
|
||||
r.TraceDB + "." + r.traceSummaryTable,
|
||||
}
|
||||
tableNameArray = getLocalTableNameArray(tableNameArray)
|
||||
status, err := r.setTTLQueryStatus(ctx, orgID, tableNameArray)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
status, apiErr := r.getTTLQueryStatus(ctx, orgID, tableNameArray)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
dbResp, err := getTracesTTL()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
dbResp, apiErr := getTracesTTL()
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
ttlQuery, err := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
ttlQuery, apiErr := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
ttlQuery.TTL = ttlQuery.TTL / 3600 // convert to hours
|
||||
if ttlQuery.ColdStorageTTL != -1 {
|
||||
@@ -2465,17 +2471,17 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
case constants.MetricsTTL:
|
||||
tableNameArray := []string{signozMetricDBName + "." + signozSampleTableName}
|
||||
tableNameArray = getLocalTableNameArray(tableNameArray)
|
||||
status, err := r.setTTLQueryStatus(ctx, orgID, tableNameArray)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
status, apiErr := r.getTTLQueryStatus(ctx, orgID, tableNameArray)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
dbResp, err := getMetricsTTL()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
dbResp, apiErr := getMetricsTTL()
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
ttlQuery, err := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
ttlQuery, apiErr := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
ttlQuery.TTL = ttlQuery.TTL / 3600 // convert to hours
|
||||
if ttlQuery.ColdStorageTTL != -1 {
|
||||
@@ -2488,17 +2494,17 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
case constants.LogsTTL:
|
||||
tableNameArray := []string{r.logsDB + "." + r.logsTableName}
|
||||
tableNameArray = getLocalTableNameArray(tableNameArray)
|
||||
status, err := r.setTTLQueryStatus(ctx, orgID, tableNameArray)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
status, apiErr := r.getTTLQueryStatus(ctx, orgID, tableNameArray)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
dbResp, err := getLogsTTL()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
dbResp, apiErr := getLogsTTL()
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
ttlQuery, err := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
ttlQuery, apiErr := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
ttlQuery.TTL = ttlQuery.TTL / 3600 // convert to hours
|
||||
if ttlQuery.ColdStorageTTL != -1 {
|
||||
@@ -2681,19 +2687,19 @@ func (r *ClickHouseReader) GetNextPrevErrorIDs(ctx context.Context, queryParams
|
||||
zap.L().Error("errorId missing from params")
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("ErrorID missing from params")}
|
||||
}
|
||||
var err *model.ApiError
|
||||
var apiErr *model.ApiError
|
||||
getNextPrevErrorIDsResponse := model.NextPrevErrorIDs{
|
||||
GroupID: queryParams.GroupID,
|
||||
}
|
||||
getNextPrevErrorIDsResponse.NextErrorID, getNextPrevErrorIDsResponse.NextTimestamp, err = r.getNextErrorID(ctx, queryParams)
|
||||
if err != nil {
|
||||
zap.L().Error("Unable to get next error ID due to err: ", zap.Error(err))
|
||||
return nil, err
|
||||
getNextPrevErrorIDsResponse.NextErrorID, getNextPrevErrorIDsResponse.NextTimestamp, apiErr = r.getNextErrorID(ctx, queryParams)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("Unable to get next error ID due to err: ", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
getNextPrevErrorIDsResponse.PrevErrorID, getNextPrevErrorIDsResponse.PrevTimestamp, err = r.getPrevErrorID(ctx, queryParams)
|
||||
if err != nil {
|
||||
zap.L().Error("Unable to get prev error ID due to err: ", zap.Error(err))
|
||||
return nil, err
|
||||
getNextPrevErrorIDsResponse.PrevErrorID, getNextPrevErrorIDsResponse.PrevTimestamp, apiErr = r.getPrevErrorID(ctx, queryParams)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("Unable to get prev error ID due to err: ", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
return &getNextPrevErrorIDsResponse, nil
|
||||
|
||||
|
||||
@@ -549,7 +549,6 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.ViewAccess(aH.getTTL)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.AdminAccess(aH.setCustomRetentionTTL)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.ViewAccess(aH.getCustomRetentionTTL)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.AdminAccess(aH.Signoz.Handlers.Apdex.Set)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.ViewAccess(aH.Signoz.Handlers.Apdex.Get)).Methods(http.MethodGet)
|
||||
|
||||
@@ -660,6 +659,11 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.Au
|
||||
router.HandleFunc("/api/v1/metrics/{metric_name}/metadata",
|
||||
am.ViewAccess(ah.UpdateMetricsMetadata)).
|
||||
Methods(http.MethodPost)
|
||||
// v2 endpoints
|
||||
router.HandleFunc("/api/v2/metrics/stats", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetStats)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/treemap", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetTreemap)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetMetricMetadata)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.ViewAccess(ah.Signoz.Handlers.Metrics.UpdateMetricMetadata)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
func Intersection(a, b []int) (c []int) {
|
||||
@@ -981,14 +985,14 @@ func (aH *APIHandler) metaForLinks(ctx context.Context, rule *ruletypes.Gettable
|
||||
keys := make(map[string]v3.AttributeKey)
|
||||
|
||||
if rule.AlertType == ruletypes.AlertTypeLogs {
|
||||
logFields, err := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(rule.PostableRule.RuleCondition.CompositeQuery))
|
||||
if err == nil {
|
||||
logFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(rule.PostableRule.RuleCondition.CompositeQuery))
|
||||
if apiErr == nil {
|
||||
params := &v3.QueryRangeParamsV3{
|
||||
CompositeQuery: rule.RuleCondition.CompositeQuery,
|
||||
}
|
||||
keys = model.GetLogFieldsV3(ctx, params, logFields)
|
||||
} else {
|
||||
zap.L().Error("failed to get log fields using empty keys; the link might not work as expected", zap.Error(err))
|
||||
zap.L().Error("failed to get log fields using empty keys; the link might not work as expected", zap.Error(apiErr))
|
||||
}
|
||||
} else if rule.AlertType == ruletypes.AlertTypeTraces {
|
||||
traceFields, err := aH.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(rule.PostableRule.RuleCondition.CompositeQuery))
|
||||
@@ -4021,9 +4025,6 @@ func (aH *APIHandler) RegisterLogsRoutes(router *mux.Router, am *middleware.Auth
|
||||
subRouter.HandleFunc("/pipelines/preview", am.ViewAccess(aH.PreviewLogsPipelinesHandler)).Methods(http.MethodPost)
|
||||
subRouter.HandleFunc("/pipelines/{version}", am.ViewAccess(aH.ListLogsPipelinesHandler)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/pipelines", am.EditAccess(aH.CreateLogsPipeline)).Methods(http.MethodPost)
|
||||
|
||||
// Promote and index JSON paths used in logs
|
||||
subRouter.HandleFunc("/promote_paths", am.AdminAccess(aH.Signoz.Handlers.Promote.HandlePromote)).Methods(http.MethodGet, http.MethodPost, http.MethodDelete)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) logFields(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -4294,9 +4295,9 @@ func (aH *APIHandler) getQueryBuilderSuggestions(w http.ResponseWriter, r *http.
|
||||
return
|
||||
}
|
||||
|
||||
response, err := aH.reader.GetQBFilterSuggestionsForLogs(r.Context(), req)
|
||||
if err != nil {
|
||||
RespondError(w, err, nil)
|
||||
response, apiErr := aH.reader.GetQBFilterSuggestionsForLogs(r.Context(), req)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -4452,10 +4453,9 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
|
||||
logsFields, err := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, errQuriesByName)
|
||||
return
|
||||
}
|
||||
// get the fields if any logs query is present
|
||||
@@ -4492,12 +4492,12 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
// Hook up query progress tracking if requested
|
||||
queryIdHeader := r.Header.Get("X-SIGNOZ-QUERY-ID")
|
||||
if len(queryIdHeader) > 0 {
|
||||
onQueryFinished, err := aH.reader.ReportQueryStartForProgressTracking(queryIdHeader)
|
||||
onQueryFinished, apiErr := aH.reader.ReportQueryStartForProgressTracking(queryIdHeader)
|
||||
|
||||
if err != nil {
|
||||
if apiErr != nil {
|
||||
zap.L().Error(
|
||||
"couldn't report query start for progress tracking",
|
||||
zap.String("queryId", queryIdHeader), zap.Error(err),
|
||||
zap.String("queryId", queryIdHeader), zap.Error(apiErr),
|
||||
)
|
||||
|
||||
} else {
|
||||
@@ -4808,10 +4808,9 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
|
||||
// get the fields if any logs query is present
|
||||
logsFields, err := aH.reader.GetLogFieldsFromNames(r.Context(), logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, nil)
|
||||
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(r.Context(), logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
fields := model.GetLogFieldsV3(r.Context(), queryRangeParams, logsFields)
|
||||
|
||||
@@ -293,7 +293,7 @@ func (m *Manager) dashboardUuid(integrationId string, dashboardId string) string
|
||||
}
|
||||
|
||||
func (m *Manager) parseDashboardUuid(dashboardUuid string) (
|
||||
integrationId string, dashboardId string, err *model.ApiError,
|
||||
integrationId string, dashboardId string, apiErr *model.ApiError,
|
||||
) {
|
||||
parts := strings.SplitN(dashboardUuid, "--", 3)
|
||||
if len(parts) != 3 || parts[0] != "integration" {
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -19,7 +18,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -130,40 +128,6 @@ func (ic *LogParsingPipelineController) ValidatePipelines(ctx context.Context,
|
||||
return err
|
||||
}
|
||||
|
||||
func (ic *LogParsingPipelineController) getDefaultPipelines() ([]pipelinetypes.GettablePipeline, error) {
|
||||
defaultPipelines := []pipelinetypes.GettablePipeline{}
|
||||
if constants.BodyJSONQueryEnabled {
|
||||
preprocessingPipeline := pipelinetypes.GettablePipeline{
|
||||
StoreablePipeline: pipelinetypes.StoreablePipeline{
|
||||
Name: "Default Pipeline - PreProcessing Body",
|
||||
Alias: "NormalizeBodyDefault",
|
||||
Enabled: true,
|
||||
},
|
||||
Filter: &v3.FilterSet{
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "body",
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []pipelinetypes.PipelineOperator{
|
||||
{
|
||||
ID: uuid.NewString(),
|
||||
Type: "normalize",
|
||||
Enabled: true,
|
||||
If: "body != nil",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
defaultPipelines = append(defaultPipelines, preprocessingPipeline)
|
||||
}
|
||||
return defaultPipelines, nil
|
||||
}
|
||||
|
||||
// Returns effective list of pipelines including user created
|
||||
// pipelines and pipelines for installed integrations
|
||||
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
||||
@@ -294,13 +258,6 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
// recommend default pipelines along with user created pipelines
|
||||
defaultPipelines, err := pc.getDefaultPipelines()
|
||||
if err != nil {
|
||||
return nil, "", model.InternalError(fmt.Errorf("failed to get default pipelines: %w", err))
|
||||
}
|
||||
pipelinesResp.Pipelines = append(pipelinesResp.Pipelines, defaultPipelines...)
|
||||
|
||||
updatedConf, err := GenerateCollectorConfigWithPipelines(currentConfYaml, pipelinesResp.Pipelines)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
|
||||
@@ -132,7 +132,7 @@ func SignozLogsToPLogs(logs []model.SignozLog) []plog.Logs {
|
||||
slRecord.SetSeverityText(log.SeverityText)
|
||||
slRecord.SetSeverityNumber(plog.SeverityNumber(log.SeverityNumber))
|
||||
|
||||
slRecord.Body().FromRaw(log.Body)
|
||||
slRecord.Body().SetStr(log.Body)
|
||||
|
||||
slAttribs := slRecord.Attributes()
|
||||
for k, v := range log.Attributes_int64 {
|
||||
|
||||
@@ -404,7 +404,7 @@ func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.Build
|
||||
// if noop create the query and return
|
||||
if mq.AggregateOperator == v3.AggregateOperatorNoOp {
|
||||
// with noop any filter or different order by other than ts will use new table
|
||||
sqlSelect := constants.LogsSQLSelectV2()
|
||||
sqlSelect := constants.LogsSQLSelectV2
|
||||
queryTmpl := sqlSelect + "from signoz_logs.%s where %s%s order by %s"
|
||||
query := fmt.Sprintf(queryTmpl, DISTRIBUTED_LOGS_V2, timeFilter, filterSubQuery, orderBy)
|
||||
return query, nil
|
||||
@@ -488,7 +488,7 @@ func buildLogsLiveTailQuery(mq *v3.BuilderQuery) (string, error) {
|
||||
// the reader will add the timestamp and id filters
|
||||
switch mq.AggregateOperator {
|
||||
case v3.AggregateOperatorNoOp:
|
||||
query := constants.LogsSQLSelectV2() + "from signoz_logs." + DISTRIBUTED_LOGS_V2 + " where "
|
||||
query := constants.LogsSQLSelectV2 + "from signoz_logs." + DISTRIBUTED_LOGS_V2 + " where "
|
||||
if len(filterSubQuery) > 0 {
|
||||
query = query + filterSubQuery + " AND "
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_getClickhouseKey(t *testing.T) {
|
||||
@@ -1211,8 +1210,9 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
t.Errorf("PrepareLogsQuery() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.want, got)
|
||||
if got != tt.want {
|
||||
t.Errorf("PrepareLogsQuery() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,23 +62,23 @@ func (receiver *SummaryService) FilterValues(ctx context.Context, orgID valuer.U
|
||||
response.FilterValues = filterValues
|
||||
return &response, nil
|
||||
case "metric_unit":
|
||||
attributes, err := receiver.reader.GetAllMetricFilterUnits(ctx, params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
attributes, apiErr := receiver.reader.GetAllMetricFilterUnits(ctx, params)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
response.FilterValues = attributes
|
||||
return &response, nil
|
||||
case "metric_type":
|
||||
attributes, err := receiver.reader.GetAllMetricFilterTypes(ctx, params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
attributes, apiErr := receiver.reader.GetAllMetricFilterTypes(ctx, params)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
response.FilterValues = attributes
|
||||
return &response, nil
|
||||
default:
|
||||
attributes, err := receiver.reader.GetAllMetricFilterAttributeValues(ctx, params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
attributes, apiErr := receiver.reader.GetAllMetricFilterAttributeValues(ctx, params)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
response.FilterValues = attributes
|
||||
return &response, nil
|
||||
@@ -108,45 +108,45 @@ func (receiver *SummaryService) GetMetricsSummary(ctx context.Context, orgID val
|
||||
})
|
||||
|
||||
g.Go(func() error {
|
||||
dataPoints, err := receiver.reader.GetMetricsDataPoints(ctx, metricName)
|
||||
if err != nil {
|
||||
return err
|
||||
dataPoints, apiErr := receiver.reader.GetMetricsDataPoints(ctx, metricName)
|
||||
if apiErr != nil {
|
||||
return apiErr.ToError()
|
||||
}
|
||||
metricDetailsDTO.Samples = dataPoints
|
||||
return nil
|
||||
})
|
||||
|
||||
g.Go(func() error {
|
||||
lastReceived, err := receiver.reader.GetMetricsLastReceived(ctx, metricName)
|
||||
if err != nil {
|
||||
return err
|
||||
lastReceived, apiErr := receiver.reader.GetMetricsLastReceived(ctx, metricName)
|
||||
if apiErr != nil {
|
||||
return apiErr.ToError()
|
||||
}
|
||||
metricDetailsDTO.LastReceived = lastReceived
|
||||
return nil
|
||||
})
|
||||
|
||||
g.Go(func() error {
|
||||
totalSeries, err := receiver.reader.GetTotalTimeSeriesForMetricName(ctx, metricName)
|
||||
if err != nil {
|
||||
return err
|
||||
totalSeries, apiErr := receiver.reader.GetTotalTimeSeriesForMetricName(ctx, metricName)
|
||||
if apiErr != nil {
|
||||
return apiErr.ToError()
|
||||
}
|
||||
metricDetailsDTO.TimeSeriesTotal = totalSeries
|
||||
return nil
|
||||
})
|
||||
|
||||
g.Go(func() error {
|
||||
activeSeries, err := receiver.reader.GetActiveTimeSeriesForMetricName(ctx, metricName, 120*time.Minute)
|
||||
if err != nil {
|
||||
return err
|
||||
activeSeries, apiErr := receiver.reader.GetActiveTimeSeriesForMetricName(ctx, metricName, 120*time.Minute)
|
||||
if apiErr != nil {
|
||||
return apiErr.ToError()
|
||||
}
|
||||
metricDetailsDTO.TimeSeriesActive = activeSeries
|
||||
return nil
|
||||
})
|
||||
|
||||
g.Go(func() error {
|
||||
attributes, err := receiver.reader.GetAttributesForMetricName(ctx, metricName, nil, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
attributes, apiErr := receiver.reader.GetAttributesForMetricName(ctx, metricName, nil, nil, nil)
|
||||
if apiErr != nil {
|
||||
return apiErr.ToError()
|
||||
}
|
||||
if attributes != nil {
|
||||
metricDetailsDTO.Attributes = *attributes
|
||||
|
||||
@@ -1405,7 +1405,7 @@ func Test_querier_Traces_runWindowBasedListQueryDesc(t *testing.T) {
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
@@ -1630,7 +1630,7 @@ func Test_querier_Traces_runWindowBasedListQueryAsc(t *testing.T) {
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
@@ -1930,7 +1930,7 @@ func Test_querier_Logs_runWindowBasedListQueryDesc(t *testing.T) {
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
@@ -2157,7 +2157,7 @@ func Test_querier_Logs_runWindowBasedListQueryAsc(t *testing.T) {
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
|
||||
@@ -1457,7 +1457,7 @@ func Test_querier_Traces_runWindowBasedListQueryDesc(t *testing.T) {
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
@@ -1682,7 +1682,7 @@ func Test_querier_Traces_runWindowBasedListQueryAsc(t *testing.T) {
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
@@ -1981,7 +1981,7 @@ func Test_querier_Logs_runWindowBasedListQueryDesc(t *testing.T) {
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
@@ -2208,7 +2208,7 @@ func Test_querier_Logs_runWindowBasedListQueryAsc(t *testing.T) {
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
|
||||
@@ -53,9 +53,14 @@ func getClickHouseTracesColumnDataType(columnDataType v3.AttributeKeyDataType) s
|
||||
|
||||
func getColumnName(key v3.AttributeKey, replaceAlias bool) string {
|
||||
if replaceAlias {
|
||||
// It should be in DeprecatedStaticFieldsTraces
|
||||
if _, ok := constants.DeprecatedStaticFieldsTraces[key.Key]; ok {
|
||||
// It should not be in NewStaticFieldsTraces
|
||||
if _, ok := constants.NewStaticFieldsTraces[key.Key]; !ok {
|
||||
key = constants.NewStaticFieldsTraces[constants.OldToNewTraceFieldsMap[key.Key]]
|
||||
// It should have a mapping in OldToNewTraceFieldsMap
|
||||
if _, ok := constants.OldToNewTraceFieldsMap[key.Key]; ok {
|
||||
key = constants.NewStaticFieldsTraces[constants.OldToNewTraceFieldsMap[key.Key]]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -200,7 +200,7 @@ func Test_getSelectLabels(t *testing.T) {
|
||||
want: " name as `name`, `resource_string_service_name` as `service_name`",
|
||||
},
|
||||
{
|
||||
name: "depricated Columns",
|
||||
name: "deprecated Columns",
|
||||
args: args{
|
||||
groupBy: []v3.AttributeKey{
|
||||
{Key: "spanKind", DataType: v3.AttributeKeyDataTypeString},
|
||||
@@ -209,19 +209,20 @@ func Test_getSelectLabels(t *testing.T) {
|
||||
{Key: "spanID", DataType: v3.AttributeKeyDataTypeString},
|
||||
{Key: "serviceName", DataType: v3.AttributeKeyDataTypeString},
|
||||
{Key: "httpRoute", DataType: v3.AttributeKeyDataTypeString},
|
||||
{Key: "kind", DataType: v3.AttributeKeyDataTypeInt64},
|
||||
},
|
||||
},
|
||||
want: " kind_string as `spanKind`, status_message as `statusMessage`, trace_id as `traceID`, span_id as `spanID`, resource_string_service$$name as `serviceName`, attribute_string_http$$route as `httpRoute`",
|
||||
want: " kind_string as `spanKind`, status_message as `statusMessage`, trace_id as `traceID`, span_id as `spanID`, resource_string_service$$name as `serviceName`, attribute_string_http$$route as `httpRoute`, kind as `kind`",
|
||||
},
|
||||
{
|
||||
name: "non depricated Columns",
|
||||
args: args{
|
||||
groupBy: []v3.AttributeKey{
|
||||
{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
{Key: "kind", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
{Key: "kind_string", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
},
|
||||
},
|
||||
want: " name as `name`, kind as `kind`",
|
||||
want: " name as `name`, kind_string as `kind_string`",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -217,6 +216,13 @@ const (
|
||||
"CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool," +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope "
|
||||
LogsSQLSelectV2 = "SELECT " +
|
||||
"timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, " +
|
||||
"attributes_string, " +
|
||||
"attributes_number, " +
|
||||
"attributes_bool, " +
|
||||
"resources_string, " +
|
||||
"scope_string "
|
||||
TracesExplorerViewSQLSelectWithSubQuery = "(SELECT traceID, durationNano, " +
|
||||
"serviceName, name FROM %s.%s WHERE parentSpanID = '' AND %s ORDER BY durationNano DESC LIMIT 1 BY traceID"
|
||||
TracesExplorerViewSQLSelectBeforeSubQuery = "SELECT subQuery.serviceName as `subQuery.serviceName`, subQuery.name as `subQuery.name`, count() AS " +
|
||||
@@ -356,11 +362,6 @@ var NewStaticFieldsTraces = map[string]v3.AttributeKey{
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
"kind": {
|
||||
Key: "kind",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
"kind_string": {
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
@@ -515,7 +516,7 @@ var DeprecatedStaticFieldsTraces = map[string]v3.AttributeKey{
|
||||
},
|
||||
"kind": {
|
||||
Key: "kind",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
DataType: v3.AttributeKeyDataTypeInt64,
|
||||
IsColumn: true,
|
||||
},
|
||||
"spanKind": {
|
||||
@@ -686,7 +687,6 @@ var StaticFieldsTraces = map[string]v3.AttributeKey{}
|
||||
var IsDotMetricsEnabled = false
|
||||
var PreferSpanMetrics = false
|
||||
var MaxJSONFlatteningDepth = 1
|
||||
var BodyJSONQueryEnabled = GetOrDefaultEnv("BODY_JSON_QUERY_ENABLED", "false") == "true"
|
||||
|
||||
func init() {
|
||||
StaticFieldsTraces = maps.Clone(NewStaticFieldsTraces)
|
||||
@@ -727,15 +727,3 @@ const InspectMetricsMaxTimeDiff = 1800000
|
||||
|
||||
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
|
||||
const maxJSONFlatteningDepth = "MAX_JSON_FLATTENING_DEPTH"
|
||||
|
||||
func LogsSQLSelectV2() string {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
columns := []string{"timestamp", "id", "trace_id", "span_id", "trace_flags", "severity_text", "severity_number", "scope_name", "scope_version", "body"}
|
||||
if BodyJSONQueryEnabled {
|
||||
columns = append(columns, "body_json", "body_json_promoted")
|
||||
}
|
||||
columns = append(columns, "attributes_string", "attributes_number", "attributes_bool", "resources_string", "scope_string")
|
||||
sb.Select(columns...)
|
||||
query, _ := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return query + " " // add space to avoid concatenation issues
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ type Reader interface {
|
||||
GetMinAndMaxTimestampForTraceID(ctx context.Context, traceID []string) (int64, int64, error)
|
||||
|
||||
// Query Progress tracking helpers.
|
||||
ReportQueryStartForProgressTracking(queryId string) (reportQueryFinished func(), err *model.ApiError)
|
||||
ReportQueryStartForProgressTracking(queryId string) (reportQueryFinished func(), apiErr *model.ApiError)
|
||||
SubscribeToQueryProgress(queryId string) (<-chan model.QueryProgress, func(), *model.ApiError)
|
||||
|
||||
GetCountOfThings(ctx context.Context, query string) (uint64, error)
|
||||
|
||||
@@ -598,16 +598,13 @@ type SignozLog struct {
|
||||
TraceFlags uint32 `json:"trace_flags" ch:"trace_flags"`
|
||||
SeverityText string `json:"severity_text" ch:"severity_text"`
|
||||
SeverityNumber uint8 `json:"severity_number" ch:"severity_number"`
|
||||
Body any `json:"body" ch:"body"`
|
||||
BodyJSON map[string]any `json:"-" ch:"body_json"`
|
||||
BodyPromoted map[string]any `json:"-" ch:"body_json_promoted"`
|
||||
Body string `json:"body" ch:"body"`
|
||||
Resources_string map[string]string `json:"resources_string" ch:"resources_string"`
|
||||
Attributes_string map[string]string `json:"attributes_string" ch:"attributes_string"`
|
||||
Attributes_int64 map[string]int64 `json:"attributes_int" ch:"attributes_int64"`
|
||||
Attributes_float64 map[string]float64 `json:"attributes_float" ch:"attributes_float64"`
|
||||
Attributes_bool map[string]bool `json:"attributes_bool" ch:"attributes_bool"`
|
||||
}
|
||||
|
||||
type GetLogsAggregatesResponse struct {
|
||||
Items map[int64]LogsAggregatesResponseItem `json:"items"`
|
||||
}
|
||||
|
||||
@@ -119,15 +119,10 @@ func (r *PromRule) getPqlQuery() (string, error) {
|
||||
return "", fmt.Errorf("invalid promql rule query")
|
||||
}
|
||||
|
||||
func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error) {
|
||||
|
||||
prevState := r.State()
|
||||
|
||||
func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
|
||||
start, end := r.Timestamps(ts)
|
||||
interval := 60 * time.Second // TODO(srikanthccv): this should be configurable
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
|
||||
q, err := r.getPqlQuery()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -140,12 +135,35 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
for _, series := range res {
|
||||
resultSeries, err := r.Threshold.Eval(toCommonSeries(series), r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, resultSeries...)
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error) {
|
||||
prevState := r.State()
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
|
||||
// prepare query, run query get data and filter the data based on the threshold
|
||||
results, err := r.buildAndRunQuery(ctx, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r.mtx.Lock()
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
|
||||
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
|
||||
var alerts = make(map[uint64]*ruletypes.Alert, len(results))
|
||||
|
||||
ruleReceivers := r.Threshold.GetRuleReceivers()
|
||||
ruleReceiverMap := make(map[string][]string)
|
||||
@@ -153,90 +171,76 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
ruleReceiverMap[value.Name] = value.Channels
|
||||
}
|
||||
|
||||
for _, series := range res {
|
||||
for _, result := range results {
|
||||
l := make(map[string]string, len(result.Metric))
|
||||
for _, lbl := range result.Metric {
|
||||
l[lbl.Name] = lbl.Value
|
||||
}
|
||||
r.logger.DebugContext(ctx, "alerting for series", "rule_name", r.Name(), "series", result)
|
||||
|
||||
if len(series.Floats) == 0 {
|
||||
continue
|
||||
threshold := valueFormatter.Format(result.Target, result.TargetUnit)
|
||||
|
||||
tmplData := ruletypes.AlertTemplateData(l, valueFormatter.Format(result.V, r.Unit()), threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
// who are not used to Go's templating system.
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
|
||||
expand := func(text string) string {
|
||||
|
||||
tmpl := ruletypes.NewTemplateExpander(
|
||||
ctx,
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
tmplData,
|
||||
times.Time(timestamp.FromTime(ts)),
|
||||
nil,
|
||||
)
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
result = fmt.Sprintf("<error expanding template: %s>", err)
|
||||
r.logger.WarnContext(ctx, "Expanding alert template failed", "rule_name", r.Name(), "error", err, "data", tmplData)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
results, err := r.Threshold.Eval(toCommonSeries(series), r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
if err != nil {
|
||||
lb := qslabels.NewBuilder(result.Metric).Del(qslabels.MetricNameLabel)
|
||||
resultLabels := qslabels.NewBuilder(result.Metric).Del(qslabels.MetricNameLabel).Labels()
|
||||
|
||||
for name, value := range r.labels.Map() {
|
||||
lb.Set(name, expand(value))
|
||||
}
|
||||
|
||||
lb.Set(qslabels.AlertNameLabel, r.Name())
|
||||
lb.Set(qslabels.AlertRuleIdLabel, r.ID())
|
||||
lb.Set(qslabels.RuleSourceLabel, r.GeneratorURL())
|
||||
|
||||
annotations := make(qslabels.Labels, 0, len(r.annotations.Map()))
|
||||
for name, value := range r.annotations.Map() {
|
||||
annotations = append(annotations, qslabels.Label{Name: name, Value: expand(value)})
|
||||
}
|
||||
|
||||
lbs := lb.Labels()
|
||||
h := lbs.Hash()
|
||||
resultFPs[h] = struct{}{}
|
||||
|
||||
if _, ok := alerts[h]; ok {
|
||||
err = fmt.Errorf("vector contains metrics with the same labelset after applying alert labels")
|
||||
// We have already acquired the lock above hence using SetHealth and
|
||||
// SetLastError will deadlock.
|
||||
r.health = ruletypes.HealthBad
|
||||
r.lastError = err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, result := range results {
|
||||
l := make(map[string]string, len(series.Metric))
|
||||
for _, lbl := range series.Metric {
|
||||
l[lbl.Name] = lbl.Value
|
||||
}
|
||||
r.logger.DebugContext(ctx, "alerting for series", "rule_name", r.Name(), "series", series)
|
||||
|
||||
threshold := valueFormatter.Format(result.Target, result.TargetUnit)
|
||||
|
||||
tmplData := ruletypes.AlertTemplateData(l, valueFormatter.Format(result.V, r.Unit()), threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
// who are not used to Go's templating system.
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
|
||||
expand := func(text string) string {
|
||||
|
||||
tmpl := ruletypes.NewTemplateExpander(
|
||||
ctx,
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
tmplData,
|
||||
times.Time(timestamp.FromTime(ts)),
|
||||
nil,
|
||||
)
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
result = fmt.Sprintf("<error expanding template: %s>", err)
|
||||
r.logger.WarnContext(ctx, "Expanding alert template failed", "rule_name", r.Name(), "error", err, "data", tmplData)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
lb := qslabels.NewBuilder(result.Metric).Del(qslabels.MetricNameLabel)
|
||||
resultLabels := qslabels.NewBuilder(result.Metric).Del(qslabels.MetricNameLabel).Labels()
|
||||
|
||||
for name, value := range r.labels.Map() {
|
||||
lb.Set(name, expand(value))
|
||||
}
|
||||
|
||||
lb.Set(qslabels.AlertNameLabel, r.Name())
|
||||
lb.Set(qslabels.AlertRuleIdLabel, r.ID())
|
||||
lb.Set(qslabels.RuleSourceLabel, r.GeneratorURL())
|
||||
|
||||
annotations := make(qslabels.Labels, 0, len(r.annotations.Map()))
|
||||
for name, value := range r.annotations.Map() {
|
||||
annotations = append(annotations, qslabels.Label{Name: name, Value: expand(value)})
|
||||
}
|
||||
|
||||
lbs := lb.Labels()
|
||||
h := lbs.Hash()
|
||||
resultFPs[h] = struct{}{}
|
||||
|
||||
if _, ok := alerts[h]; ok {
|
||||
err = fmt.Errorf("vector contains metrics with the same labelset after applying alert labels")
|
||||
// We have already acquired the lock above hence using SetHealth and
|
||||
// SetLastError will deadlock.
|
||||
r.health = ruletypes.HealthBad
|
||||
r.lastError = err
|
||||
return nil, err
|
||||
}
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
Annotations: annotations,
|
||||
ActiveAt: ts,
|
||||
State: model.StatePending,
|
||||
Value: result.V,
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
IsRecovering: result.IsRecovering,
|
||||
}
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
Annotations: annotations,
|
||||
ActiveAt: ts,
|
||||
State: model.StatePending,
|
||||
Value: result.V,
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
IsRecovering: result.IsRecovering,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,23 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus/prometheustest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
pql "github.com/prometheus/prometheus/promql"
|
||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
@@ -723,6 +732,612 @@ func TestPromRuleEval(t *testing.T) {
|
||||
assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func TestPromRuleUnitCombinations(t *testing.T) {
|
||||
// fixed base time for deterministic tests
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Units test",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "test_metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// time_series_v4 cols of interest
|
||||
fingerprintCols := []cmock.ColumnType{
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "any(labels)", Type: "String"},
|
||||
}
|
||||
|
||||
// samples_v4 columns
|
||||
samplesCols := []cmock.ColumnType{
|
||||
{Name: "metric_name", Type: "String"},
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "unix_milli", Type: "Int64"},
|
||||
{Name: "value", Type: "Float64"},
|
||||
{Name: "flags", Type: "UInt32"},
|
||||
}
|
||||
|
||||
// see Timestamps on base_rule
|
||||
evalWindowMs := int64(5 * 60 * 1000) // 5 minutes in ms
|
||||
evalTimeMs := evalTime.UnixMilli()
|
||||
queryStart := ((evalTimeMs-2*evalWindowMs)/60000)*60000 + 1 // truncate to minute + 1ms
|
||||
queryEnd := (evalTimeMs / 60000) * 60000 // truncate to minute
|
||||
|
||||
cases := []struct {
|
||||
targetUnit string
|
||||
yAxisUnit string
|
||||
values []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}
|
||||
expectAlerts int
|
||||
compareOp string
|
||||
matchType string
|
||||
target float64
|
||||
summaryAny []string
|
||||
}{
|
||||
{
|
||||
targetUnit: "s",
|
||||
yAxisUnit: "ns",
|
||||
values: []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}{
|
||||
{baseTime, 572588400}, // 0.57 seconds
|
||||
{baseTime.Add(1 * time.Minute), 572386400}, // 0.57 seconds
|
||||
{baseTime.Add(2 * time.Minute), 300947400}, // 0.3 seconds
|
||||
{baseTime.Add(3 * time.Minute), 299316000}, // 0.3 seconds
|
||||
{baseTime.Add(4 * time.Minute), 66640400.00000001}, // 0.06 seconds
|
||||
},
|
||||
expectAlerts: 0,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 1, // 1 second
|
||||
},
|
||||
{
|
||||
targetUnit: "ms",
|
||||
yAxisUnit: "ns",
|
||||
values: []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}{
|
||||
{baseTime, 572588400}, // 572.58 ms
|
||||
{baseTime.Add(1 * time.Minute), 572386400}, // 572.38 ms
|
||||
{baseTime.Add(2 * time.Minute), 300947400}, // 300.94 ms
|
||||
{baseTime.Add(3 * time.Minute), 299316000}, // 299.31 ms
|
||||
{baseTime.Add(4 * time.Minute), 66640400.00000001}, // 66.64 ms
|
||||
},
|
||||
expectAlerts: 1,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 200, // 200 ms
|
||||
summaryAny: []string{
|
||||
"observed metric value is 299 ms",
|
||||
"the observed metric value is 573 ms",
|
||||
"the observed metric value is 572 ms",
|
||||
"the observed metric value is 301 ms",
|
||||
},
|
||||
},
|
||||
{
|
||||
targetUnit: "decgbytes",
|
||||
yAxisUnit: "bytes",
|
||||
values: []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}{
|
||||
{baseTime, 2863284053}, // 2.86 GB
|
||||
{baseTime.Add(1 * time.Minute), 2863388842}, // 2.86 GB
|
||||
{baseTime.Add(2 * time.Minute), 300947400}, // 0.3 GB
|
||||
{baseTime.Add(3 * time.Minute), 299316000}, // 0.3 GB
|
||||
{baseTime.Add(4 * time.Minute), 66640400.00000001}, // 66.64 MB
|
||||
},
|
||||
expectAlerts: 0,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 200, // 200 GB
|
||||
},
|
||||
{
|
||||
targetUnit: "decgbytes",
|
||||
yAxisUnit: "By",
|
||||
values: []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}{
|
||||
{baseTime, 2863284053}, // 2.86 GB
|
||||
{baseTime.Add(1 * time.Minute), 2863388842}, // 2.86 GB
|
||||
{baseTime.Add(2 * time.Minute), 300947400}, // 0.3 GB
|
||||
{baseTime.Add(3 * time.Minute), 299316000}, // 0.3 GB
|
||||
{baseTime.Add(4 * time.Minute), 66640400.00000001}, // 66.64 MB
|
||||
},
|
||||
expectAlerts: 0,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 200, // 200 GB
|
||||
},
|
||||
{
|
||||
targetUnit: "h",
|
||||
yAxisUnit: "min",
|
||||
values: []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}{
|
||||
{baseTime, 55}, // 55 minutes
|
||||
{baseTime.Add(1 * time.Minute), 57}, // 57 minutes
|
||||
{baseTime.Add(2 * time.Minute), 30}, // 30 minutes
|
||||
{baseTime.Add(3 * time.Minute), 29}, // 29 minutes
|
||||
},
|
||||
expectAlerts: 0,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 1, // 1 hour
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for idx, c := range cases {
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
// single fingerprint with labels JSON
|
||||
fingerprint := uint64(12345)
|
||||
labelsJSON := `{"__name__":"test_metric"}`
|
||||
fingerprintData := [][]interface{}{
|
||||
{fingerprint, labelsJSON},
|
||||
}
|
||||
fingerprintRows := cmock.NewRows(fingerprintCols, fingerprintData)
|
||||
|
||||
// create samples data from test case values
|
||||
samplesData := make([][]interface{}, len(c.values))
|
||||
for i, v := range c.values {
|
||||
samplesData[i] = []interface{}{
|
||||
"test_metric",
|
||||
fingerprint,
|
||||
v.timestamp.UnixMilli(),
|
||||
v.value,
|
||||
uint32(0), // flags - 0 means normal value, 1 means stale, we are not doing staleness tests
|
||||
}
|
||||
}
|
||||
samplesRows := cmock.NewRows(samplesCols, samplesData)
|
||||
|
||||
// args: $1=metric_name, $2=label_name, $3=label_value
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT fingerprint, any").
|
||||
WithArgs("test_metric", "__name__", "test_metric").
|
||||
WillReturnRows(fingerprintRows)
|
||||
|
||||
// args: $1=metric_name (outer), $2=metric_name (subquery), $3=label_name, $4=label_value, $5=start, $6=end
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT metric_name, fingerprint, unix_milli").
|
||||
WithArgs(
|
||||
"test_metric",
|
||||
"test_metric",
|
||||
"__name__",
|
||||
"test_metric",
|
||||
queryStart,
|
||||
queryEnd,
|
||||
).
|
||||
WillReturnRows(samplesRows)
|
||||
|
||||
promProvider := prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore)
|
||||
|
||||
postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
postableRule.RuleCondition.CompositeQuery.Unit = c.yAxisUnit
|
||||
postableRule.RuleCondition.TargetUnit = c.targetUnit
|
||||
postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{
|
||||
{
|
||||
Name: postableRule.AlertName,
|
||||
TargetValue: &c.target,
|
||||
TargetUnit: c.targetUnit,
|
||||
MatchType: ruletypes.MatchType(c.matchType),
|
||||
CompareOp: ruletypes.CompareOp(c.compareOp),
|
||||
},
|
||||
},
|
||||
}
|
||||
postableRule.Annotations = map[string]string{
|
||||
"description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})",
|
||||
"summary": "The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}",
|
||||
}
|
||||
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, promProvider, "", time.Duration(time.Second), nil, nil, options)
|
||||
rule, err := NewPromRule("69", valuer.GenerateUUID(), &postableRule, logger, reader, promProvider)
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
promProvider.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
retVal, err := rule.Eval(context.Background(), evalTime)
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
promProvider.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
assert.Equal(t, c.expectAlerts, retVal.(int), "case %d", idx)
|
||||
if c.expectAlerts != 0 {
|
||||
foundCount := 0
|
||||
for _, item := range rule.Active {
|
||||
for _, summary := range c.summaryAny {
|
||||
if strings.Contains(item.Annotations.Get("summary"), summary) {
|
||||
foundCount++
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
assert.Equal(t, c.expectAlerts, foundCount, "case %d", idx)
|
||||
}
|
||||
|
||||
promProvider.Close()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(abhishekhugetech): enable this
|
||||
func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T) {
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "No data test",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "test_metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
AlertOnAbsent: true,
|
||||
},
|
||||
}
|
||||
|
||||
// time_series_v4 cols of interest
|
||||
fingerprintCols := []cmock.ColumnType{
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "any(labels)", Type: "String"},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
values []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}
|
||||
expectNoData bool
|
||||
}{
|
||||
{
|
||||
values: []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}{},
|
||||
expectNoData: true,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for idx, c := range cases {
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
// no data
|
||||
fingerprintData := [][]interface{}{}
|
||||
fingerprintRows := cmock.NewRows(fingerprintCols, fingerprintData)
|
||||
|
||||
// no rows == no data
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT fingerprint, any").
|
||||
WithArgs("test_metric", "__name__", "test_metric").
|
||||
WillReturnRows(fingerprintRows)
|
||||
|
||||
promProvider := prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore)
|
||||
|
||||
var target float64 = 0
|
||||
postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{
|
||||
{
|
||||
Name: postableRule.AlertName,
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsEq,
|
||||
},
|
||||
},
|
||||
}
|
||||
postableRule.Annotations = map[string]string{
|
||||
"description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})",
|
||||
"summary": "The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}",
|
||||
}
|
||||
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, promProvider, "", time.Duration(time.Second), nil, nil, options)
|
||||
rule, err := NewPromRule("69", valuer.GenerateUUID(), &postableRule, logger, reader, promProvider)
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
promProvider.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
retVal, err := rule.Eval(context.Background(), evalTime)
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
promProvider.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
assert.Equal(t, 1, retVal.(int), "case %d", idx)
|
||||
for _, item := range rule.Active {
|
||||
if c.expectNoData {
|
||||
assert.True(t, strings.Contains(item.Labels.Get(qslabels.AlertNameLabel), "[No data]"), "case %d", idx)
|
||||
} else {
|
||||
assert.False(t, strings.Contains(item.Labels.Get(qslabels.AlertNameLabel), "[No data]"), "case %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
promProvider.Close()
|
||||
}
|
||||
}
|
||||
|
||||
func TestMultipleThresholdPromRule(t *testing.T) {
|
||||
// fixed base time for deterministic tests
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Multiple threshold test",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "test_metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
fingerprintCols := []cmock.ColumnType{
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "any(labels)", Type: "String"},
|
||||
}
|
||||
|
||||
samplesCols := []cmock.ColumnType{
|
||||
{Name: "metric_name", Type: "String"},
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "unix_milli", Type: "Int64"},
|
||||
{Name: "value", Type: "Float64"},
|
||||
{Name: "flags", Type: "UInt32"},
|
||||
}
|
||||
|
||||
// see .Timestamps of base rule
|
||||
evalWindowMs := int64(5 * 60 * 1000)
|
||||
evalTimeMs := evalTime.UnixMilli()
|
||||
queryStart := ((evalTimeMs-2*evalWindowMs)/60000)*60000 + 1
|
||||
queryEnd := (evalTimeMs / 60000) * 60000
|
||||
|
||||
cases := []struct {
|
||||
targetUnit string
|
||||
yAxisUnit string
|
||||
values []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}
|
||||
expectAlerts int
|
||||
compareOp string
|
||||
matchType string
|
||||
target float64
|
||||
secondTarget float64
|
||||
summaryAny []string
|
||||
}{
|
||||
{
|
||||
targetUnit: "s",
|
||||
yAxisUnit: "ns",
|
||||
values: []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}{
|
||||
{baseTime, 572588400}, // 0.57 seconds
|
||||
{baseTime.Add(1 * time.Minute), 572386400}, // 0.57 seconds
|
||||
{baseTime.Add(2 * time.Minute), 300947400}, // 0.3 seconds
|
||||
{baseTime.Add(3 * time.Minute), 299316000}, // 0.3 seconds
|
||||
{baseTime.Add(4 * time.Minute), 66640400.00000001}, // 0.06 seconds
|
||||
},
|
||||
expectAlerts: 1,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 1, // 1 second
|
||||
secondTarget: .5,
|
||||
summaryAny: []string{
|
||||
"observed metric value is 573 ms",
|
||||
"observed metric value is 572 ms",
|
||||
},
|
||||
},
|
||||
{
|
||||
targetUnit: "ms",
|
||||
yAxisUnit: "ns",
|
||||
values: []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}{
|
||||
{baseTime, 572588400}, // 572.58 ms
|
||||
{baseTime.Add(1 * time.Minute), 572386400}, // 572.38 ms
|
||||
{baseTime.Add(2 * time.Minute), 300947400}, // 300.94 ms
|
||||
{baseTime.Add(3 * time.Minute), 299316000}, // 299.31 ms
|
||||
{baseTime.Add(4 * time.Minute), 66640400.00000001}, // 66.64 ms
|
||||
},
|
||||
expectAlerts: 2, // One alert per threshold that fires
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 200, // 200 ms
|
||||
secondTarget: 500,
|
||||
summaryAny: []string{
|
||||
"observed metric value is 299 ms",
|
||||
"the observed metric value is 573 ms",
|
||||
"the observed metric value is 572 ms",
|
||||
"the observed metric value is 301 ms",
|
||||
},
|
||||
},
|
||||
{
|
||||
targetUnit: "decgbytes",
|
||||
yAxisUnit: "bytes",
|
||||
values: []struct {
|
||||
timestamp time.Time
|
||||
value float64
|
||||
}{
|
||||
{baseTime, 2863284053}, // 2.86 GB
|
||||
{baseTime.Add(1 * time.Minute), 2863388842}, // 2.86 GB
|
||||
{baseTime.Add(2 * time.Minute), 300947400}, // 0.3 GB
|
||||
{baseTime.Add(3 * time.Minute), 299316000}, // 0.3 GB
|
||||
{baseTime.Add(4 * time.Minute), 66640400.00000001}, // 66.64 MB
|
||||
},
|
||||
expectAlerts: 1,
|
||||
compareOp: "1", // Above
|
||||
matchType: "1", // Once
|
||||
target: 200, // 200 GB
|
||||
secondTarget: 2, // 2GB
|
||||
summaryAny: []string{
|
||||
"observed metric value is 2.7 GiB",
|
||||
"the observed metric value is 0.3 GB",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for idx, c := range cases {
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
fingerprint := uint64(12345)
|
||||
labelsJSON := `{"__name__":"test_metric"}`
|
||||
fingerprintData := [][]interface{}{
|
||||
{fingerprint, labelsJSON},
|
||||
}
|
||||
fingerprintRows := cmock.NewRows(fingerprintCols, fingerprintData)
|
||||
|
||||
samplesData := make([][]interface{}, len(c.values))
|
||||
for i, v := range c.values {
|
||||
samplesData[i] = []interface{}{
|
||||
"test_metric",
|
||||
fingerprint,
|
||||
v.timestamp.UnixMilli(),
|
||||
v.value,
|
||||
uint32(0),
|
||||
}
|
||||
}
|
||||
samplesRows := cmock.NewRows(samplesCols, samplesData)
|
||||
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT fingerprint, any").
|
||||
WithArgs("test_metric", "__name__", "test_metric").
|
||||
WillReturnRows(fingerprintRows)
|
||||
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT metric_name, fingerprint, unix_milli").
|
||||
WithArgs(
|
||||
"test_metric",
|
||||
"test_metric",
|
||||
"__name__",
|
||||
"test_metric",
|
||||
queryStart,
|
||||
queryEnd,
|
||||
).
|
||||
WillReturnRows(samplesRows)
|
||||
|
||||
promProvider := prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore)
|
||||
|
||||
postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
postableRule.RuleCondition.CompositeQuery.Unit = c.yAxisUnit
|
||||
postableRule.RuleCondition.TargetUnit = c.targetUnit
|
||||
postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{
|
||||
{
|
||||
Name: "first_threshold",
|
||||
TargetValue: &c.target,
|
||||
TargetUnit: c.targetUnit,
|
||||
MatchType: ruletypes.MatchType(c.matchType),
|
||||
CompareOp: ruletypes.CompareOp(c.compareOp),
|
||||
},
|
||||
{
|
||||
Name: "second_threshold",
|
||||
TargetValue: &c.secondTarget,
|
||||
TargetUnit: c.targetUnit,
|
||||
MatchType: ruletypes.MatchType(c.matchType),
|
||||
CompareOp: ruletypes.CompareOp(c.compareOp),
|
||||
},
|
||||
},
|
||||
}
|
||||
postableRule.Annotations = map[string]string{
|
||||
"description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})",
|
||||
"summary": "The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}",
|
||||
}
|
||||
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, promProvider, "", time.Duration(time.Second), nil, nil, options)
|
||||
rule, err := NewPromRule("69", valuer.GenerateUUID(), &postableRule, logger, reader, promProvider)
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
promProvider.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
retVal, err := rule.Eval(context.Background(), evalTime)
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
promProvider.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
assert.Equal(t, c.expectAlerts, retVal.(int), "case %d", idx)
|
||||
if c.expectAlerts != 0 {
|
||||
foundCount := 0
|
||||
for _, item := range rule.Active {
|
||||
for _, summary := range c.summaryAny {
|
||||
if strings.Contains(item.Annotations.Get("summary"), summary) {
|
||||
foundCount++
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
assert.Equal(t, c.expectAlerts, foundCount, "case %d", idx)
|
||||
}
|
||||
|
||||
promProvider.Close()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -404,9 +404,9 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
if hasLogsQuery {
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(params) {
|
||||
logsFields, err := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
logsFields, apiErr := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
|
||||
if apiErr != nil {
|
||||
return nil, apiErr.ToError()
|
||||
}
|
||||
logsKeys := model.GetLogFieldsV3(ctx, params, logsFields)
|
||||
r.logsKeys = logsKeys
|
||||
|
||||
@@ -488,7 +488,7 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
|
||||
AlertName: "Test Eval Delay",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
@@ -551,7 +551,7 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
|
||||
AlertName: "Tricky Condition Tests",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
@@ -620,7 +620,7 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
AlertName: "Units test",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
@@ -784,7 +784,7 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), nil, readerCache, options)
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), "", time.Duration(time.Second), nil, readerCache, options)
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, reader, nil, logger)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
@@ -821,7 +821,7 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
AlertName: "No data test",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
@@ -899,7 +899,7 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
)
|
||||
assert.NoError(t, err)
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), nil, readerCache, options)
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), "", time.Duration(time.Second), nil, readerCache, options)
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, reader, nil, logger)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
@@ -932,7 +932,7 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
AlertName: "Traces link test",
|
||||
AlertType: ruletypes.AlertTypeTraces,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
@@ -1019,7 +1019,7 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
}
|
||||
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), nil, nil, options)
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), "", time.Duration(time.Second), nil, nil, options)
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, reader, nil, logger)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
@@ -1057,7 +1057,7 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
AlertName: "Logs link test",
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
@@ -1156,7 +1156,7 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
}
|
||||
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), nil, nil, options)
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), "", time.Duration(time.Second), nil, nil, options)
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, reader, nil, logger)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
@@ -1195,7 +1195,7 @@ func TestThresholdRuleShiftBy(t *testing.T) {
|
||||
AlertName: "Logs link test",
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
@@ -1269,7 +1269,7 @@ func TestMultipleThresholdRule(t *testing.T) {
|
||||
AlertName: "Mulitple threshold test",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
@@ -1423,7 +1423,7 @@ func TestMultipleThresholdRule(t *testing.T) {
|
||||
},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), nil, readerCache, options)
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), "", time.Duration(time.Second), nil, readerCache, options)
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, reader, nil, logger)
|
||||
rule.TemporalityMap = map[string]map[v3.Temporality]bool{
|
||||
"signoz_calls_total": {
|
||||
|
||||
@@ -20,6 +20,7 @@ type aggExprRewriter struct {
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -30,6 +31,7 @@ func NewAggExprRewriter(
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *aggExprRewriter {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite")
|
||||
@@ -39,6 +41,7 @@ func NewAggExprRewriter(
|
||||
fullTextColumn: fullTextColumn,
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -78,6 +81,7 @@ func (r *aggExprRewriter) Rewrite(
|
||||
r.fullTextColumn,
|
||||
r.fieldMapper,
|
||||
r.conditionBuilder,
|
||||
r.jsonBodyPrefix,
|
||||
r.jsonKeyToKey,
|
||||
)
|
||||
// Rewrite the first select item (our expression)
|
||||
@@ -125,6 +129,7 @@ type exprVisitor struct {
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
Modified bool
|
||||
chArgs []any
|
||||
@@ -137,6 +142,7 @@ func newExprVisitor(
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
@@ -145,6 +151,7 @@ func newExprVisitor(
|
||||
fullTextColumn: fullTextColumn,
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -183,7 +190,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
if aggFunc.FuncCombinator {
|
||||
// Map the predicate (last argument)
|
||||
origPred := args[len(args)-1].String()
|
||||
whereClause, err := PrepareWhereClause(
|
||||
whereClause, err := PrepareWhereClause(
|
||||
origPred,
|
||||
FilterExprVisitorOpts{
|
||||
Logger: v.logger,
|
||||
@@ -191,8 +198,9 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
FieldMapper: v.fieldMapper,
|
||||
ConditionBuilder: v.conditionBuilder,
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonBodyPrefix: v.jsonBodyPrefix,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
}, 0, 0,
|
||||
}, 0, 0,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -212,7 +220,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -230,7 +238,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ func CollisionHandledFinalExpr(
|
||||
cb qbtypes.ConditionBuilder,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
requiredDataType telemetrytypes.FieldDataType,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) (string, []any, error) {
|
||||
|
||||
@@ -44,7 +45,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -57,8 +58,8 @@ func CollisionHandledFinalExpr(
|
||||
return nil
|
||||
}
|
||||
|
||||
colName, fieldForErr := fm.FieldFor(ctx, field)
|
||||
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
|
||||
colName, err := fm.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
keysForField := keys[field.Name]
|
||||
@@ -81,10 +82,10 @@ func CollisionHandledFinalExpr(
|
||||
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||
if found {
|
||||
// we found a close match, in the error message send the suggestion
|
||||
return "", nil, errors.WithAdditionalf(fieldForErr, "%s", correction)
|
||||
return "", nil, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
} else {
|
||||
// not even a close match, return an error
|
||||
return "", nil, errors.WithAdditionalf(fieldForErr, "field `%s` not found", field.Name)
|
||||
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
|
||||
}
|
||||
} else {
|
||||
for _, key := range keysForField {
|
||||
@@ -103,8 +104,10 @@ func CollisionHandledFinalExpr(
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
if field.FieldContext == telemetrytypes.FieldContextBody && jsonKeyToKey != nil {
|
||||
return "", nil, fieldForErr
|
||||
if strings.HasPrefix(field.Name, jsonBodyPrefix) && jsonBodyPrefix != "" && jsonKeyToKey != nil {
|
||||
// TODO(nitya): enable group by on body column?
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
|
||||
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
|
||||
} else {
|
||||
colName, _ = DataTypeCollisionHandledFieldName(field, dummyValue, colName, qbtypes.FilterOperatorUnknown)
|
||||
}
|
||||
@@ -201,7 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// While we expect user not to send the mixed data types, it inevitably happens
|
||||
// So we handle the data type collisions here
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
|
||||
case telemetrytypes.FieldDataTypeString:
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
// try to convert the string value to to number
|
||||
@@ -216,40 +219,8 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
|
||||
value = fmt.Sprintf("%t", v)
|
||||
}
|
||||
case telemetrytypes.FieldDataTypeFloat64,
|
||||
telemetrytypes.FieldDataTypeArrayFloat64:
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
// check if it's a number inside a string
|
||||
isNumber := false
|
||||
if _, err := strconv.ParseFloat(v, 64); err == nil {
|
||||
isNumber = true
|
||||
}
|
||||
|
||||
if !operator.IsComparisonOperator() || !isNumber {
|
||||
// try to convert the number attribute to string
|
||||
tblFieldName = castString(tblFieldName) // numeric col vs string literal
|
||||
} else {
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
}
|
||||
case []any:
|
||||
if allFloats(v) {
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
} else if hasString(v) {
|
||||
tblFieldName, value = castString(tblFieldName), toStrings(v)
|
||||
}
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeInt64,
|
||||
telemetrytypes.FieldDataTypeArrayInt64,
|
||||
telemetrytypes.FieldDataTypeNumber,
|
||||
telemetrytypes.FieldDataTypeArrayNumber:
|
||||
// case telemetrytypes.FieldDataTypeFloat64,
|
||||
// telemetrytypes.FieldDataTypeArrayFloat64,
|
||||
// telemetrytypes.FieldDataTypeInt64,
|
||||
// telemetrytypes.FieldDataTypeArrayInt64,
|
||||
// telemetrytypes.FieldDataTypeNumber,
|
||||
// telemetrytypes.FieldDataTypeArrayNumber:
|
||||
case telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeNumber:
|
||||
switch v := value.(type) {
|
||||
// why? ; CH returns an error for a simple check
|
||||
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
|
||||
@@ -287,8 +258,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
}
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeBool,
|
||||
telemetrytypes.FieldDataTypeArrayBool:
|
||||
case telemetrytypes.FieldDataTypeBool:
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
tblFieldName = castString(tblFieldName)
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
)
|
||||
|
||||
func TestQueryToKeys(t *testing.T) {
|
||||
|
||||
testCases := []struct {
|
||||
query string
|
||||
expectedKeys []telemetrytypes.FieldKeySelector
|
||||
@@ -65,9 +66,9 @@ func TestQueryToKeys(t *testing.T) {
|
||||
query: `body.user_ids[*] = 123`,
|
||||
expectedKeys: []telemetrytypes.FieldKeySelector{
|
||||
{
|
||||
Name: "user_ids[*]",
|
||||
Name: "body.user_ids[*]",
|
||||
Signal: telemetrytypes.SignalUnspecified,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldContext: telemetrytypes.FieldContextUnspecified,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
|
||||
@@ -43,6 +43,7 @@ type resourceFilterStatementBuilder[T any] struct {
|
||||
signal telemetrytypes.Signal
|
||||
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -75,6 +76,7 @@ func NewLogResourceFilterStatementBuilder(
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
|
||||
@@ -85,6 +87,7 @@ func NewLogResourceFilterStatementBuilder(
|
||||
metadataStore: metadataStore,
|
||||
signal: telemetrytypes.SignalLogs,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -97,18 +100,12 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
|
||||
keySelectors = append(keySelectors, whereClauseSelectors...)
|
||||
}
|
||||
|
||||
// exclude out the body related key selectors
|
||||
filteredKeySelectors := []*telemetrytypes.FieldKeySelector{}
|
||||
for idx := range keySelectors {
|
||||
if keySelectors[idx].FieldContext == telemetrytypes.FieldContextBody {
|
||||
continue
|
||||
}
|
||||
keySelectors[idx].Signal = b.signal
|
||||
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
|
||||
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
|
||||
}
|
||||
|
||||
return filteredKeySelectors
|
||||
return keySelectors
|
||||
}
|
||||
|
||||
// Build builds a SQL query based on the given parameters
|
||||
@@ -165,13 +162,14 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
ConditionBuilder: b.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: b.fullTextColumn,
|
||||
JsonBodyPrefix: b.jsonBodyPrefix,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
SkipFullTextFilter: true,
|
||||
SkipFunctionCalls: true,
|
||||
// there is no need for "key" not found error for resource filtering
|
||||
IgnoreNotFoundKeys: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
grammar "github.com/SigNoz/signoz/pkg/parser/grammar"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
@@ -34,6 +33,7 @@ type filterExpressionVisitor struct {
|
||||
mainErrorURL string
|
||||
builder *sqlbuilder.SelectBuilder
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
skipResourceFilter bool
|
||||
skipFullTextFilter bool
|
||||
@@ -53,6 +53,7 @@ type FilterExprVisitorOpts struct {
|
||||
FieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
Builder *sqlbuilder.SelectBuilder
|
||||
FullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
JsonBodyPrefix string
|
||||
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
SkipResourceFilter bool
|
||||
SkipFullTextFilter bool
|
||||
@@ -72,6 +73,7 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
|
||||
fieldKeys: opts.FieldKeys,
|
||||
builder: opts.Builder,
|
||||
fullTextColumn: opts.FullTextColumn,
|
||||
jsonBodyPrefix: opts.JsonBodyPrefix,
|
||||
jsonKeyToKey: opts.JsonKeyToKey,
|
||||
skipResourceFilter: opts.SkipResourceFilter,
|
||||
skipFullTextFilter: opts.SkipFullTextFilter,
|
||||
@@ -92,6 +94,7 @@ type PreparedWhereClause struct {
|
||||
|
||||
// PrepareWhereClause generates a ClickHouse compatible WHERE clause from the filter query
|
||||
func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64, endNs uint64) (*PreparedWhereClause, error) {
|
||||
|
||||
// Setup the ANTLR parsing pipeline
|
||||
input := antlr.NewInputStream(query)
|
||||
lexer := grammar.NewFilterQueryLexer(input)
|
||||
@@ -170,7 +173,7 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64
|
||||
|
||||
whereClause := sqlbuilder.NewWhereClause().AddWhereExpr(visitor.builder.Args, cond)
|
||||
|
||||
return &PreparedWhereClause{WhereClause: whereClause, Warnings: visitor.warnings, WarningsDocURL: visitor.mainWarnURL}, nil
|
||||
return &PreparedWhereClause{whereClause, visitor.warnings, visitor.mainWarnURL}, nil
|
||||
}
|
||||
|
||||
// Visit dispatches to the specific visit method based on node type
|
||||
@@ -715,7 +718,7 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon
|
||||
conds = append(conds, fmt.Sprintf("hasToken(LOWER(%s), LOWER(%s))", key.Name, v.builder.Var(value[0])))
|
||||
} else {
|
||||
// this is that all other functions only support array fields
|
||||
if key.FieldContext == telemetrytypes.FieldContextBody {
|
||||
if strings.HasPrefix(key.Name, v.jsonBodyPrefix) {
|
||||
fieldName, _ = v.jsonKeyToKey(context.Background(), key, qbtypes.FilterOperatorUnknown, value)
|
||||
} else {
|
||||
// TODO(add docs for json body search)
|
||||
@@ -806,8 +809,10 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
|
||||
|
||||
// VisitKey handles field/column references
|
||||
func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(ctx.GetText())
|
||||
keyName := fieldKey.Name
|
||||
|
||||
keyName := strings.TrimPrefix(fieldKey.Name, v.jsonBodyPrefix)
|
||||
|
||||
fieldKeysForName := v.fieldKeys[keyName]
|
||||
|
||||
@@ -841,11 +846,10 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// if there is a field with the same name as attribute/resource attribute
|
||||
// Since it will ORed with the fieldKeysForName, it will not result empty
|
||||
// when either of them have values
|
||||
// Note: Skip this logic if body json query is enabled so we can look up the key inside fields
|
||||
//
|
||||
// TODO(Piyush): After entire migration this is supposed to be removed.
|
||||
if !constants.BodyJSONQueryEnabled && fieldKey.FieldContext == telemetrytypes.FieldContextBody {
|
||||
fieldKeysForName = append(fieldKeysForName, &fieldKey)
|
||||
if strings.HasPrefix(fieldKey.Name, v.jsonBodyPrefix) && v.jsonBodyPrefix != "" {
|
||||
if keyName != "" {
|
||||
fieldKeysForName = append(fieldKeysForName, &fieldKey)
|
||||
}
|
||||
}
|
||||
|
||||
if len(fieldKeysForName) == 0 {
|
||||
@@ -856,7 +860,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
return v.fieldKeys[keyWithContext]
|
||||
}
|
||||
|
||||
if fieldKey.FieldContext == telemetrytypes.FieldContextBody && keyName == "" {
|
||||
if strings.HasPrefix(fieldKey.Name, v.jsonBodyPrefix) && v.jsonBodyPrefix != "" && keyName == "" {
|
||||
v.errors = append(v.errors, "missing key for body json search - expected key of the form `body.key` (ex: `body.status`)")
|
||||
} else if !v.ignoreNotFoundKeys {
|
||||
// TODO(srikanthccv): do we want to return an error here?
|
||||
|
||||
@@ -9,12 +9,12 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote/implpromote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
@@ -48,7 +48,7 @@ type Handlers struct {
|
||||
Session session.Handler
|
||||
SpanPercentile spanpercentile.Handler
|
||||
Services services.Handler
|
||||
Promote promote.Handler
|
||||
Metrics metricsexplorer.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing) Handlers {
|
||||
@@ -65,7 +65,7 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
|
||||
AuthDomain: implauthdomain.NewHandler(modules.AuthDomain),
|
||||
Session: implsession.NewHandler(modules.Session),
|
||||
Services: implservices.NewHandler(modules.Services),
|
||||
Metrics: implmetricsexplorer.NewHandler(modules.Metrics),
|
||||
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
|
||||
Promote: implpromote.NewHandler(modules.Promote),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,8 @@ func TestNewHandlers(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
tokenizer := tokenizertest.New()
|
||||
emailing := emailingtest.New()
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil)
|
||||
require.NoError(t, err)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil)
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user